query
stringlengths
9
3.4k
document
stringlengths
9
87.4k
metadata
dict
negatives
sequencelengths
4
101
negative_scores
sequencelengths
4
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
Make sure all required tags are set
def clean_tags(self): if hasattr(self.instance, 'get_required_keys') and hasattr(self.instance, 'tags'): for key in self.instance.get_required_keys(): if key not in self.cleaned_data.get('tags'): raise forms.ValidationError("Tag %s missing." % key) return self.cleaned_data.get('tags')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _verify_tags(self):\n for tag in self.tags:\n if tag.lower() in VASP_TAG_LIST:\n continue\n else:\n print((\"Warning: unknown INCAR tag '\" + tag + \"' with value '\" + str(self.tags[tag]) + \"'\"))", "def test_tags(question):\n assert \"tags\" in question[\"instance\"]\n tags = set(question[\"instance\"][\"tags\"])\n # there should be at least one tag\n assert len(tags) >= 1\n # each tags should be in VALID_TAGS\n assert len(tags - VALID_TAGS) == 0\n # there should be exactly one category-defining tag\n assert len(tags.intersection(CATEGORY_TAGS)) == 1", "def standard_tag_checks(self, tag, attrs):\n if self.doctype == \"missing\":\n self.errmsg(\"A <!doctype ...> is required at the top of the file\")\n self.doctype = \"reported\"\n\n if 0 == len(self.stack) and tag != 'html':\n self.errmsg(\"The top level tag should be <html>\")\n elif 1 == len(self.stack) and tag != 'head' and tag != 'body':\n self.errmsg(\"stray '%s' tag found\" % tag)\n\n if tag == 'style' or 'style' in [n for (n,v) in attrs]:\n self.errmsg('warning: external styling is prefered', 0)\n \n self.handle_named_tag(tag, attrs)\n self.catch_unquoted_attrs(self.get_starttag_text(), attrs)\n self.catch_deprecated_tags(tag)\n self.catch_uppercase_tags(tag)", "def check_tags(self):\n if(self.tags is None or not self.tags.get('subscriber', False)):\n self.filters |= Filters.NonSubs\n\n if(self.tags is None or not self.tags.get('user-type', 0) > 0):\n self.filters |= Filters.NonMods", "def _add_default_tags(self):\n self.tags.add_tag('ban', required=True)", "def test_02_Tags(self):\n # print(PrettyFormatAny.form(self.m_xml, 'Xml'))\n self.assertEqual(self.m_xml.root.tag, TESTING_PYHOUSE)\n self.assertEqual(self.m_xml.computer_div.tag, 'ComputerDivision')\n self.assertEqual(self.m_xml.house_div.tag, 'HouseDivision')\n self.assertEqual(self.m_xml.lighting_sect.tag, 'LightingSection')\n self.assertEqual(self.m_xml.button_sect.tag, 'ButtonSection')\n self.assertEqual(self.m_xml.button.tag, 'Button')\n self.assertEqual(self.m_xml.controller_sect.tag, 'ControllerSection')\n self.assertEqual(self.m_xml.controller.tag, 'Controller')\n self.assertEqual(self.m_xml.light_sect.tag, 'LightSection')\n self.assertEqual(self.m_xml.light.tag, 'Light')", "def __load_tags(self) -> None:\n self.tags = TagHelper.TagHelper.generate_tag_object(self)\n self.tag_helper = TagHelper.TagHelper(self)\n self.tag_helper.fetch()", "def sanity_check(self):\n pass", "def test_01_Tags(self):\n # print(PrettyFormatAny.form(self.m_xml, 'A1-01-A - Tags'))\n self.assertEqual(self.m_xml.root.tag, TESTING_PYHOUSE)", "def test_format_bad_tags(self):\n tags = self.c._format_tags(None)\n self.assertEqual(0, len(tags))", "def test_init(self):\n for tag in self.tags:\n for value in self.values:\n this_tag = tag(value)\n self.assertEqual(value, this_tag.value)\n self.assertEqual([], this_tag.body)", "def required_fields():\n module_logger.debug(\"In required_fields.\")\n\n # A tuple of one must have a comma after the single value...\n return (\"tags\",)", "def prefill_tags(self, prefill_tags):\n\n self._prefill_tags = prefill_tags", "def __init__(self, initial_tags, other_tags, tagger):\n self.itags = set([i for i in initial_tags])\n self.otags = set([i for i in other_tags])\n self.tagger = tagger", "def __init__(self, tags=''):\n self.tags = tags", "def unknown_starttag(self, tag, attrs):\n if tag in self.valid_tags:\n self.result.append('<' + tag)\n for k, v in attrs:\n if string.lower(k[0:2]) != 'on' and", "def tags():", "def test_no_tags(self):\n self.request.log(\"Hello World\")\n self.request.end()\n entry = self.get_entry()\n assert len(entry['tags']) == 0", "def setup(self):\r\n self.text_input_values = {}\r\n if self.tag == 'radiotextgroup':\r\n self.html_input_type = \"radio\"\r\n elif self.tag == 'checkboxtextgroup':\r\n self.html_input_type = \"checkbox\"\r\n else:\r\n raise Exception(\"ChoiceGroup: unexpected tag {0}\".format(self.tag))\r\n\r\n if self.value == '':\r\n # Make `value` an empty dictionary, if it currently has an empty\r\n # value. This is necessary because the template expects a\r\n # dictionary.\r\n self.value = {}\r\n self.choices = self.extract_choices(self.xml)", "def __init__(self, tags):\n self.tags = tags", "def _check_required_fields(self):\n assert self.volume_title\n super(MultiMonoComposer, self)._check_required_fields()", "def tags(self, tags):\n self._tags = tags", "def tags(self, tags):\n self._tags = tags", "def tags(self, tags):\n self._tags = tags", "def _check_required_fields(self):\n assert self.title\n assert self.format", "def ignores(self):\n pass # make ignore_tags unaccessible", "def __init__(__self__, *,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)", "def defined_tags(self, defined_tags):\n self._defined_tags = defined_tags", "def test_has_required_attrs(self):\n\n for attr in ['entity_id', 'open_badge_id', 'created_at',\n 'created_by', 'issuer', 'issuer_open_badge_id',\n 'image', 'expires', 'extensions']:\n self.assertIn(attr, Badge.REQUIRED_ATTRS)", "def test_post_blank_tag(self):\n response = self.client.post(self.get_url(self.trait.pk), {'tag': '', })\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 1)\n self.assertTrue('Oops!' in str(messages[0]))\n form = response.context['form']\n self.assertEqual(form['tag'].errors, [u'This field is required.'])\n self.assertNotIn(self.tag, self.trait.all_tags.all())", "def test_post_blank_tag(self):\n response = self.client.post(self.get_url(self.trait.pk), {'tag': '', })\n messages = list(response.wsgi_request._messages)\n self.assertEqual(len(messages), 1)\n self.assertTrue('Oops!' in str(messages[0]))\n form = response.context['form']\n self.assertEqual(form['tag'].errors, [u'This field is required.'])\n self.assertNotIn(self.tag, self.trait.all_tags.all())", "def validate(self):\n for search_tag_name in self.get_search_tag_names():\n search_tag_obj = Tag(search_tag_name)\n for search_tag_value in self.get_search_tag_values(search_tag_name):\n for new_tag_name in self.get_new_tag_names(search_tag_name, search_tag_value):\n new_tag_obj = Tag(new_tag_name)\n new_tag_value = self.get_new_tag_value(search_tag_name, search_tag_value, new_tag_name)\n if new_tag_obj.repeatable:\n if not isinstance(new_tag_value, list):\n raise KeyError('%s needs a list'%(new_tag_name))\n else:\n if isinstance(new_tag_value, list):\n raise KeyError('%s needs a scalar value'%(new_tag_name))", "def test_good_practice_attrs(self):\n # FormOverrideMixIn.good_practice_attrs\n pass", "def test_non_additive_requires_tags(self):\n\n # local imports of code-under-test ensure moto has mocks\n # registered before any possible calls out to AWS\n from awstools.awstools import launch_instances, run_block_device_dict, farm_security_group_setup\n\n # launch_instances requires vpc setup as done by firesim/scripts/setup_firesim.py\n from awstools.aws_setup import aws_setup\n aws_setup()\n farm_security_group_setup()\n\n type = 'f1.2xlarge'\n\n with pytest.raises(ValueError):\n launch_instances(type, 1,\n instancemarket=\"ondemand\", spotinterruptionbehavior=None, spotmaxprice=None,\n blockdevices=run_block_device_dict(),\n always_expand=False)", "def unknown_starttag(self, tag, attrs):\n starttrs = \"\".join(['%s=\"%s\"' % (key, value) for key, value in attrs])\n self.pieces.append(\"<%(tag)s %(starttrs)s>\" % locals())", "def sanity_check(self):\n return True", "def validate(self, attrs):\n tag_name = attrs['tag_name']\n club = attrs['club']\n request = self.context['request']\n profile = UserProfile.objects.get(user=request.user)\n if (club not in profile.get_club_privileges() and\n club not in profile.get_workshop_privileges().values_list('club', flat=True)):\n raise serializers.ValidationError(\"You are not allowed to create tag for this club\")\n if Tag.objects.filter(tag_name=tag_name, club=club):\n raise serializers.ValidationError(\"The tag already exists for this club\")\n return attrs", "def test_page_tags(self):\n page, page_2 = self.get_pages()\n page_tags = models.PageTags.objects.create(extended_object=page)\n page_tags.tags.add(*self.tag_strings)\n\n self.assertTrue(page_has_tag(page, slugify(self.tag_strings[0])))\n self.assertTrue(page_has_tag(page, Tag.objects.get(slug=slugify(self.tag_strings[0]))))\n self.assertEqual(set(self.tag_strings), {tag.name for tag in get_page_tags(page)})\n\n self.assertFalse(page_has_tag(page_2, slugify(self.tag_strings[0])))\n self.assertEqual(set(), {tag.name for tag in get_page_tags(page_2)})", "def validate(self, obj):\n if 'tags' in obj and not isinstance(obj['tags'], list):\n raise aomi.exceptions.Validation('tags must be a list')\n\n if self.present:\n check_obj(self.required_fields, self.name(), obj)", "def test_get_all_tags(self):\n print(self.session.tags)\n self.assertEqual(\n len(self.session.tags),\n (3 * len(self.session.wp_post_objects)) #3 tags added by default\n )", "def freeform_tags(self, freeform_tags):\n self._freeform_tags = freeform_tags", "def test_title_tags(self):\n page, page_2 = self.get_pages()\n\n # Assign and test english tags\n title_en = page.get_title_obj(language=\"en\")\n title_en_tags = models.TitleTags.objects.create(extended_object=title_en)\n title_en_tags.tags.add(*self.tag_strings)\n\n self.assertTrue(title_has_tag(page, \"en\", slugify(self.tag_strings[0])))\n self.assertTrue(title_has_tag(page, \"en\", Tag.objects.get(slug=slugify(self.tag_strings[0]))))\n self.assertEqual(set(self.tag_strings), {tag.name for tag in get_title_tags(page, \"en\")})\n\n # Assign and test french tags\n title_fr = page.get_title_obj(language=\"fr\", fallback=False)\n title_fr_tags = models.TitleTags.objects.create(extended_object=title_fr)\n title_fr_tags.tags.add(*self.tag_strings_fr)\n self.assertTrue(title_has_tag(page, \"fr\", slugify(self.tag_strings_fr[0])))\n self.assertEqual(set(self.tag_strings_fr), {tag.name for tag in get_title_tags(page, \"fr\")})\n\n self.assertFalse(title_has_tag(page, \"it\", slugify(self.tag_strings_fr[0])))\n self.assertEqual(set(), {tag.name for tag in get_title_tags(page, \"it\")})", "def check_required(self):\n for argument in self.arguments:\n if argument.required:\n raise ArgumentRequiredError(argument, self.tagname)\n else:\n self.kwargs[argument.name] = argument.get_default()", "def populate_initial_valid_metadata(self):\n pass", "def validate_tag(tag=None):\n if not tag:\n raise AttributeError('Tag cannot be empty')\n\n if tag not in TAGS:\n raise ValueError('{0} tag is not supported')", "def _load_attrs_requirements(self) -> None:\n if self.element not in self.attr_requirements:\n return\n self.attr_req_ids.clear()\n for attr_req_label, attr_req_value in \\\n self.attr_requirements[self.element].items():\n self.add_attr_requirement(None, attr_req_label, attr_req_value)\n self._update_attr_list()", "def test_tags_request_title(self):\n page, page_2 = self.get_pages()\n\n # Assign tags to title\n title_tags = models.TitleTags.objects.create(extended_object=page.get_title_obj(\"en\"))\n title_tags.tags.add(*self.tag_strings)\n for lang in self.languages:\n page.publish(lang)\n\n site_id = page.node.site_id\n\n # Reload page from request and extract tags from it\n request = self.get_request(page, \"en\")\n tags_list = get_title_tags_from_request(request, page.get_public_object().pk, \"en\", site_id)\n self.assertEqual(set(self.tag_strings), {tag.name for tag in tags_list})", "def __init__(__self__, *,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)", "def __do_essential_memebers_exist__(self):\n assert self.element_type is not None\n assert self.elements is not None\n assert self.points is not None", "def verify_attrs(self):\n self.verify_namespace_attrs(self.newlibrary.wrap_namespace)", "def _setup(self) -> None:\n\t\treturn", "def _check_sanity(self, tags: List[str], n_words: int):\n n_out = 0\n\n for tag in tags:\n if (\"<\" not in tag) and (\">\" not in tag):\n n_out += 1\n\n return n_out == n_words", "def _check_sanity(self, tags: List[str], n_words: int):\n n_out = 0\n\n for tag in tags:\n if (\"<\" not in tag) and (\">\" not in tag):\n n_out += 1\n\n return n_out == n_words", "def validate_tag(self, tag_field):\n if not tag_field.data or tag_field.data == '':\n raise ValidationError('All users must be tagged')\n return True", "def set_tags(self, tags):\n self.tags = []\n for tag in [t.strip() for t in tags.split(', ')]:\n self.tags.append(Tag(title=tag))", "def handle_starttag(self, tag, attrs):\n forbidden_tags = ['data-srcset', 'srcset']\n if tag != 'a':\n attr = dict(attrs)\n self.links_text.append(attr)\n else:\n if tag not in forbidden_tags:\n attr = dict(attrs)\n self.links.append(attr)", "def test_search_tags(self):\n page = self.page1\n page.search_tags = \"Chutes, Ladders\"\n page.save_revision().publish()\n taglist = page.clean_search_tags\n for name in [\"Chutes\", \"Ladders\"]:\n self.assertIn(name, taglist)", "def __init__(self):\n self.tag = None", "def set_tags(self, tags):\n uniques = set()\n distinct = []\n for tag in tags:\n if tag not in uniques:\n distinct.append(tag)\n uniques.add(tag)\n self.__post_changes(distinct)", "def testRequiredAttributes(self):\n\n\t\trequiredAttributes = (\"name\",\n\t\t\t\t\t\t\t\"uiFile\",\n\t\t\t\t\t\t\t\"activated\",\n\t\t\t\t\t\t\t\"initializedUi\",\n\t\t\t\t\t\t\t\"deactivatable\")\n\n\t\tfor attribute in requiredAttributes:\n\t\t\tself.assertIn(attribute, dir(QWidgetComponentFactory()))", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def tags(self, tags):\n\n self._tags = tags", "def unknown_starttag(self, tag, attrs):\n if tag in self.valid_tags:\n self.result = self.result + '<' + tag\n for k, v in attrs:\n if (string.lower(k[0:2]) != 'on' and\n string.lower(v[0:10]) != 'javascript'):\n self.result = '%s %s=\"%s\"' % (self.result, k, v)\n endTag = '</%s>' % tag\n self.endTagList.insert(0, endTag)\n self.result = self.result + '>'", "def test_should_set_default_properties(self): # pylint: disable=invalid-name\n initialize_semver_git_tag(self.project)\n self.assertEquals(\n self.project.get_property('semver_git_tag_increment_part'), 'patch')\n self.assertEquals(\n self.project.get_property('semver_git_tag_version_prefix'), '')", "def _check_mandatory(self):\n for subtoken in self.subtokens:\n if subtoken.mandatory != 0:\n self.mandatory = np.random.uniform()\n return\n self.mandatory = 0", "def test_fails_if_required_attrs_not_included(self):\n\n with vcr.use_cassette('test/vcr_cassettes/badge_retrieval.yaml'):\n with self.assertRaises(exceptions.RequiredAttributesMissingError):\n # We need more attrs than just created_at\n Badge({'created_at': '2019-09-04T19:03:24Z'})", "def _allowed_components():\n pass", "def __validate(self):\n pass", "def _setup(self):", "def _setup(self):", "def test_avoids_bombing_on_none(self):\r\n test_value = None\r\n self.assertEqual(set(), suggest_tags(test_value))", "def prepare_node_attrs(self):", "def _init_node_attributes(self):\n assert False", "def test_empty_tags(self):\n\n # Base Schema-derived types\n schemas = [\"type: integer\",\n \"type: number\",\n \"type: boolean\",\n \"type: string\",\n \"type: 'null'\",\n \"type: timestamp\",\n \"type: timestamp-hp\",\n\n (\"type: array\\n\"\n \"items: { type: number }\"),\n\n (\"type: object\\n\"\n \"properties:\\n\"\n \" foo: { type: string }\")]\n\n for schema in schemas:\n parsed = self.parse(schema)\n self.assertEqual(parsed.tags, {})\n\n # Links and relations\n schema = self.parse(\"type: integer\\n\"\n \"links:\\n\"\n \" self: { path: $/foo }\\n\"\n \"relations:\\n\"\n \" foo:\\n\"\n \" resource: /foo\")\n\n self.assertEqual(schema.links['self'].tags, {})\n self.assertEqual(schema.relations['foo'].tags, {})\n\n # Typeless schema fragment\n schema = self.parse(\"oneOf:\\n\"\n \"- type: integer\\n\"\n \"- type: 'null'\")\n self.assertEqual(schema.tags, {})", "def _transform_known_tags(self):\n self.missing_known_tags = []\n\n for k, tf in self._known_tags.items():\n v = self.tags.get(k, [])\n if not v:\n self.missing_known_tags.append(k)\n continue\n\n if len(v) > 1:\n raise Exception(f\"multiple instances of tag {k}\")\n\n setattr(self, k, v[0])", "def test_clean_tags_with_valid_tags(self):\n Tag.objects.create(name='these')\n Tag.objects.create(name='are')\n Tag.objects.create(name='valid')\n Tag.objects.create(name='tags')\n form = forms.GroupForm(\n {\n 'tags': 'these,are, valid, tags',\n 'category': self.category.pk\n })\n self.assertTrue(form.is_valid())", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def _setup(self):\n pass", "def validate(self):\n super(PipelineContext, self).validate()\n for instrument, imap in self.selections.normal_items():\n self._check_nested(\"instrument\", instrument, imap)\n\n # Will raise pkg_resources.RequirementParseError if the requirement\n # is malformed:\n self.get_asdf_standard_requirement()", "def test_add_tag_invalid(self):\n payload = {'name': ''}\n res = self.client.post(TAGS_URL, payload)\n\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)", "def test_set_derived_metric_tags(self):\n pass", "def __attrs_post_init__(self):", "def setUp(self):\n self.platform = wirelesstagpy.WirelessTags(username=USERNAME, password=PASSWORD)\n self.tag_outdoor = wirelesstagpy.SensorTag(MOCK.OUTDOOR_PROBE, self.platform)\n self.platform._tags[\"fake-1\"] = self.tag_outdoor # pylint: disable=protected-access", "def _GetKnownTags(self) -> Set[str]:\n raise NotImplementedError()", "def finish(self):\n if verbose(): print(\"TIParser.finish()\")\n for tag in ['head', 'body']:\n if getattr(self, tag) == 'missing':\n self.errmsg('%s tag not found' % (tag))\n elif getattr(self, tag) != 'closed':\n self.errmsg('%s tag not complete' % (tag))\n\n if self.filetype == 'missing':\n self.errmsg(\"Filetype missing. Please add \"\n + \"'<meta name=\\\"keywords\\\" content=\\\"[ft]\\\" /> \"\n + \"where 'ft' is one of 'about', 'proj', 'member', \"\n + \"'contact', 'jobs', 'nav', 'pub', or 'software' \"\n + \"in the <head> section.\")\n elif self.title == 'missing':\n self.errmsg(\"A <title> tag is needed for this file.\", 0)\n \n if self.charset == 'missing':\n self.errmsg(\"Charset not specified. Please add \"\n + \"<meta charset='utf-8' /> \"\n + \"in the <head> section.\")\n\n if self.css == 'missing':\n self.errmsg(\"No CSS link found in <head>. Please add at least \"\n + \"<link rel='stylesheet' type='text/css' \"\n + \"href='techint_f.css' />\")\n\n if self.description == 'missing':\n self.errmsg(\"File description not found. Please add at least \"\n + '<meta name=\"description\" content=\"page description\"> '\n + 'in the <head> section.')", "def prepareDocument(self):\n self.checkSyntaxDocument()", "def _prepare(self):", "def _prepare(self):", "def tagger():" ]
[ "0.6713339", "0.62802184", "0.62078756", "0.61916", "0.6153197", "0.6120965", "0.61105335", "0.6104445", "0.60799277", "0.6070347", "0.6061206", "0.5990083", "0.5961519", "0.5878047", "0.5865956", "0.5823758", "0.5815917", "0.577706", "0.57572424", "0.57379663", "0.568212", "0.5634121", "0.5634121", "0.5634121", "0.5623735", "0.56210136", "0.5618945", "0.5616414", "0.56067383", "0.5593884", "0.5593884", "0.5591721", "0.55812085", "0.55720913", "0.5569073", "0.5568857", "0.5519136", "0.55182", "0.55170715", "0.5516317", "0.5473688", "0.547262", "0.5457431", "0.54559314", "0.545033", "0.5442986", "0.5442452", "0.54337114", "0.54324126", "0.5422746", "0.5420777", "0.5410092", "0.5410092", "0.54057485", "0.5403116", "0.5402751", "0.5402187", "0.5400707", "0.53864944", "0.5381493", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.5380098", "0.53664184", "0.5361985", "0.53385234", "0.5335514", "0.5326989", "0.5318404", "0.53146386", "0.53146386", "0.52888155", "0.5284947", "0.52813774", "0.52811927", "0.527773", "0.52614564", "0.52612406", "0.52612406", "0.52612406", "0.52612406", "0.52612406", "0.5258685", "0.52548665", "0.52545327", "0.5252029", "0.52472335", "0.5245932", "0.52348906", "0.5227923", "0.522334", "0.522334", "0.5220307" ]
0.6199197
3
function used for writing late checkin record in payslip input tree.
def get_inputs(self, contracts, date_from, date_to): res = super(PayslipLateCheckIn, self).get_inputs(contracts, date_to, date_from) late_check_in_type = self.env.ref('employee_late_check_in.late_check_in') contract = self.contract_id late_check_in_id = self.env['late.check_in'].search([('employee_id', '=', self.employee_id.id), ('date', '<=', self.date_to), ('date', '>=', self.date_from), ('state', '=', 'approved'), ]) amount = late_check_in_id.mapped('amount') cash_amount = sum(amount) if late_check_in_id: self.late_check_in_ids = late_check_in_id input_data = { 'name': late_check_in_type.name, 'code': late_check_in_type.code, 'amount': cash_amount, 'contract_id': contract.id, } res.append(input_data) return res
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def writeto(self, fileout):\n \n dump_pkl(self.data, fileout)", "def dump(self,out):\n if self.changed: raise StateError(_('Data changed: ')+ self.name)\n if not self.data: raise StateError(_('Data undefined: ')+self.name)\n out.write(struct.pack('4s3i',self.name,self.size,self.delFlag,self.recFlag))\n out.write(self.data)", "def write_node(self, record) -> None:\n pass", "def save_to_checkpoint(self, chkpt):\n chkpt[self.name] = self.state_dict()", "def _amber_write_input_file(self):\n logger.debug(\"Writing {}\".format(self.input))\n with open(os.path.join(self.path, self.input), \"w\") as f:\n f.write(\"{}\\n\".format(self.title))\n f.write(\" &cntrl\\n\")\n self._write_dict_to_mdin(f, self.cntrl)\n\n if self.ewald is not None:\n f.write(\" &ewald\\n\")\n self._write_dict_to_mdin(f, self.ewald)\n\n if self.cntrl[\"nmropt\"] == 1:\n if self.wt is not None:\n for line in self.wt:\n f.write(\" \"+line+\"\\n\")\n f.write(\" &wt type = 'END', /\\n\")\n if self.restraint_file is not None:\n f.write(\"DISANG = {}\\n\".format(self.restraint_file))\n f.write(\"LISTOUT = POUT\\n\\n\")\n if self.group is not None:\n f.write(\"{:s}\".format(self.group))", "def post_build(self, p, pay):\n p += pay\n if self.auxdlen != 0:\n print \"NOTICE: A properly formatted and complaint V3 Group Record should have an Auxiliary Data length of zero (0).\"\n print \" Subsequent Group Records are lost!\"\n return p", "def _write(self, out_file):\n out_file.write(' '.encode()) # pad byte\n out_file.write('{:4d}'.format(self.key).encode())\n out_file.write(self.code.encode())\n out_file.write((' '*18).encode()) # pad bytes\n out_file.write('{:12d}'.format(self.numnod).encode())\n out_file.write((' '*37).encode()) # pad bytes\n out_file.write('{:1d}'.format(self.format).encode())\n out_file.write('\\n'.encode())\n\n for node in self.nodes:\n if self.format < 2:\n out_file.write(' '.encode())\n out_file.write('-1'.encode())\n if self.format == 0:\n out_file.write('{:5d}'.format(node.number).encode())\n else:\n out_file.write('{:10d}'.format(node.number).encode())\n for i in range(3):\n out_file.write('{:12.5E}'.format(node.pos[i]).encode())\n out_file.write('\\n'.encode())\n else:\n out_file.write(struct.pack('i', node.number))\n if self.format == 2:\n out_file.write(struct.pack('fff', *node.pos))\n else:\n out_file.write(struct.pack('ddd', *node.pos))\n\n if self.format < 2:\n out_file.write(' -3\\n'.encode()) # last record for ascii only", "def write_checkpoint(self):\n self.file_checkpoint_data = open(self.path_checkpoint, \"a+\")\n array_to_write = [str(self.radious), self.type_feature, self.type_filtering, self.h_filterig]\n self.file_checkpoint_data.write(','.join(array_to_write) + \"\\n\")\n self.file_checkpoint_data.flush()", "def _write(self, out_file):\n #\n # I know this function is long, but the FRD block is long as well...\n # Splitting this into multiple functions would not help in my opinion.\n # Therefore -> shut up pylint\n # pylint: disable=too-many-branches\n # pylint: disable=too-many-statements\n #\n out_file.write(' '.encode()) # pad byte\n out_file.write('{:4d}'.format(self.key).encode())\n out_file.write(self.code.encode())\n out_file.write(self.setname.ljust(6).encode())\n out_file.write('{:12.5E}'.format(self.value).encode())\n out_file.write('{:12d}'.format(self.numnod).encode())\n out_file.write(self.text.ljust(20).encode())\n out_file.write('{:2d}'.format(self.ictype).encode())\n out_file.write('{:5d}'.format(self.numstep).encode())\n out_file.write(self.analys.ljust(10).encode())\n out_file.write('{:2d}'.format(self.format).encode())\n out_file.write('\\n'.encode())\n\n out_file.write(' '.encode()) # pad byte\n out_file.write('-4'.encode()) # key = -4\n out_file.write((' '*2).encode()) # pad bytes\n out_file.write(self.name.ljust(8).encode())\n if self.entities[0].ictype == 2 and self.ncomps == 3:\n out_file.write('{:5d}'.format(self.ncomps + 1).encode())\n else:\n out_file.write('{:5d}'.format(self.ncomps).encode())\n out_file.write('{:5d}'.format(self.irtype).encode())\n out_file.write('\\n'.encode()) # eol\n\n for entity in self.entities:\n out_file.write(' '.encode()) # pad byte\n out_file.write('-5'.encode())\n out_file.write((' '*2).encode()) # pad bytes\n out_file.write(entity.name.ljust(8).encode())\n out_file.write('{:5d}'.format(entity.menu).encode())\n out_file.write('{:5d}'.format(entity.ictype).encode())\n out_file.write('{:5d}'.format(entity.icind1).encode())\n if entity.ictype == 4:\n out_file.write('{:5d}'.format(entity.icind2).encode())\n elif entity.ictype == 2 and entity is self.entities[-1]:\n out_file.write('{:5d}'.format(entity.icind2).encode())\n out_file.write('{:5d}'.format(entity.iexist).encode())\n out_file.write(entity.icname.encode())\n else:\n out_file.write('{:5d}'.format(entity.iexist).encode())\n out_file.write('\\n'.encode()) # eol\n\n for result in self.results:\n if self.format < 2:\n num_lines = int(self.ncomps/(6 + 1)) + 1\n for j in range(num_lines):\n if j == 0:\n out_file.write(' -1'.encode()) # pad byte and key = -1\n if self.format == 0:\n out_file.write(\n '{:5d}'.format(result.node).encode())\n else:\n out_file.write(\n '{:10d}'.format(result.node).encode())\n else:\n out_file.write(' -2'.encode()) # pad byte and key = -2\n out_file.write(' '*(5*(self.format+1)).encode())\n k_start = j*6\n k_end = min(self.ncomps - k_start, (j+1)*6)\n for k in range(k_start, k_end):\n out_file.write(\n '{:12.5E}'.format(result.data[k]).encode())\n out_file.write('\\n'.encode()) # eol\n else:\n out_file.write(struct.pack('i', result.node))\n out_file.write(struct.pack('f'*self.ncomps, *result.data))\n\n if self.format < 2:\n out_file.write(' -3\\n'.encode()) # last record for ascii only", "def write_data():", "def write(data):", "def journal_parser(out_data):\n # TODO:\n # - change the jorunal code to upper case everytime (this is helps for\n # thouse personal accounts (no numbers).\n # - try to use more account numbers in the journal code.\n # - manage the uniqueness of the journal code before write the xml.\n my_model = 'account.journal'\n bank_data = get_bank_data()\n pattern = re.compile(r'(cta|cuenta|cc|cte|ca|no)(\\.|-)*(\\s)*', re.DOTALL)\n pattern2 = re.compile(r'(\\s|\\.)', re.DOTALL)\n value = {\n 'company_id': 'base.main_company',\n 'type': 'bank',\n }\n field_type = {\n 'name': 'str',\n 'code': 'str',\n 'type': 'str',\n 'default_credit_account_id': 'ref',\n 'default_debit_account_id': 'ref',\n 'company_id': 'ref',\n }\n\n for line in bank_data:\n value['name'] = unicode(line[-1], 'utf-8')\n value['name'] = unidecode.unidecode(value['name'])\n value['default_credit_account_id'] = line[0]\n value['default_debit_account_id'] = line[0]\n xml_id = pattern.sub('', value['name'].lower())\n xml_id = pattern2.sub('_', xml_id)\n out_record = libxml2.newNode('record')\n out_record.setProp('id', 'aj_%s_edima' % (xml_id,))\n out_record.setProp('model', my_model)\n\n value['code'] = 'BJ' + xml_id.split('_')[-1][-3:]\n\n for aj_field in value.keys():\n out_field = libxml2.newNode('field')\n out_field.setProp('name', aj_field)\n if field_type[aj_field] == 'str':\n out_field.setContent(value[aj_field])\n elif field_type[aj_field] == 'ref':\n out_field.setProp('ref', value[aj_field])\n else:\n assert False, ('Error. This field type is not defined yet.'\n 'define Field %s' % (aj_field,))\n out_record.addChild(out_field)\n\n out_data.addChild(out_record)\n return True", "def write_newtree(node,text):\r\n if node.left!=None: # if the left node is not equal to none\r\n text.write(\"Question:\\n\") # write the question\r\n text.write(node.data) # write the data\r\n write_newtree(node.left,text) # recursivly call to insert question to the left node\r\n write_newtree(node.right,text) # recursivly call to insert question to the right node\r\n return node # updating node\r\n\r\n else: # if the left node is equal to none\r\n text.write(\"Guess:\\n\") # write the guess\r\n text.write(node.data) # write the data\r\n return node # updating node\r", "def record(self, pop, off, dad, mom):\n if mom is not None:\n off.setInfo(0, str(self.field))\n else:\n off.setInfo(dad.info(self.field) + 1, self.field)\n return True", "def save(self, output, data):", "def dumpData(self,out):\n out.packSub0('INAM',self.id)\n out.packSub0('PNAM',self.prevId)\n out.packSub0('NNAM',self.nextId)\n if not self.isDeleted:\n out.packSub('DATA','2i4B',\n self.type, self.spDisp, self.spRank, self.spSex, self.pcRank, self.unk02)\n if self.spId: out.packSub0('ONAM',self.spId)\n if self.spRace: out.packSub0('RNAM',self.spRace)\n if self.spClass: out.packSub0('CNAM',self.spClass)\n if self.spFaction: out.packSub0('FNAM',self.spFaction)\n if self.cell: out.packSub0('ANAM',self.cell)\n if self.pcFaction: out.packSub0('DNAM',self.pcFaction)\n if self.speak: out.packSub0('SNAM',self.speak)\n if self.text: out.packSub('NAME',self.text)\n if self.qflag == 0:\n pass\n if self.qflag == 1: out.packSub('QSTN','\\x01')\n if self.qflag == 2: out.packSub('QSTF','\\x01')\n if self.qflag == 3: out.packSub('QSTR','\\x01')\n for index,test in enumerate(self.tests):\n if test: test.dumpData(out,index)\n if self.script: out.packSub('BNAM',self.script)\n if self.isDeleted: out.pack('DELE','i',0)", "def write_po(self, outputfile):\n raise NotImplementedError(\n \"Writing to this file format is not yet implemented\")", "def dump(self, mark):", "def checkpoint():", "def write_to_file(self):\n\t\tfile = open(\"states.txt\", \"w\")\n\t\t\n\t\tpointer = self.head\n\t\twhile pointer != None:\n\t\t\tfile.write(pointer.state + \"\\t\" + pointer.info)\t\n\t\t\tpointer = pointer.next\n\n\t\tfile.close()", "def _encode_trail(self):\n\t\tptrail = self.config.get('ptrail')\n\t\tif ptrail is not None:\n\t\t\treturn self._encode_bit('1', ptrail)", "def save(self,outPath=None):\n if (not self.canSave): raise StateError(_(\"Insufficient data to write file.\"))\n if not outPath:\n fileInfo = self.fileInfo\n outPath = os.path.join(fileInfo.dir,fileInfo.name)\n out = file(outPath,'wb')\n #--Tes3 Record\n self.tes3.setChanged()\n self.tes3.hedr.setChanged()\n self.tes3.hedr.numRecords = len(self.records) #--numRecords AFTER TES3 record\n self.tes3.getSize()\n self.tes3.dump(out)\n #--Other Records\n for record in self.records:\n record.getSize()\n record.dump(out)\n out.close()", "def write( data ):", "def write_preverbs(recs,fileout):\n fout = codecs.open(fileout,'w')\n n = 0\n nadj=0\n for rec in recs:\n L = rec.L # headword record number\n hw = rec.hw # the headword\n pfx = rec.pfx # the preverb prefixes\n pfxhw = rec.pfxhw\n linenum = rec.linenum\n out = \"%s:%s:%s:%s:%s\" %(L,hw,pfx,pfxhw,linenum)\n fout.write(out + '\\n')\n n = n + 1\n dumb_pfxhw = pfx + hw\n if dumb_pfxhw != pfxhw:\n nadj = nadj+1\n outadj = \"ADJUST %03d: %s:%s:%s:%s (dumb=%s)\" %(nadj,L,hw,pfx,pfxhw,dumb_pfxhw)\n try:\n #print outadj.encode('utf-8')\n pass\n except :\n print \"ERROR PRINTING for line=\",n,rec.line\n fout.close()\n print n,\"records written to\",fileout\n print nadj,\"prefixed verbs required sandhi adjustments\"", "def _writeRecord(self, path, name, data):\n file_path = os.path.join(path, name)\n with open(file_path, 'w') as f:\n for item in data:\n f.write(str(item)+'\\t')\n f.write('\\n')", "def dumpData(self,out):\n out.packSub0('NAME',self.id)\n if getattr(self,'isDeleted',False):\n out.packSub('DELE','i',0)\n return\n out.packSub('FNAM',self.type)\n out.packSub('FLTV','f',self.value)", "def save_exit(name, data):\n jrn_path = build_path(name)\n print(f'... saving new journal entries to {jrn_path} ...')\n with open(jrn_path, 'w') as file:\n for line in data:\n file.write(line + '\\n')\n print('... save complete ...')", "def write(self):", "def write(self):", "def dumpData(self,out):\n #--Header\n out.packSub0('NAME',self.id)\n if getattr(self,'isDeleted',False):\n out.packSub('DELE','i',0)\n return\n if self.name == 'LEVC':\n flags = 1*self.calcFromAllLevels\n etype = 'CNAM'\n else:\n flags = 1*self.calcForEachItem + 2*self.calcFromAllLevels\n etype = 'INAM'\n out.packSub('DATA','i',flags)\n out.packSub('NNAM','B',self.chanceNone)\n out.packSub('INDX','i',len(self.entries))\n #--Entries\n for pcLevel, objectId in self.entries:\n out.packSub0(etype,objectId)\n out.packSub('INTV','h',pcLevel)", "def write(self, data_pref)\n\n def _writeToAddama(self, addama_dir):", "def save_node(self):\n # save node in path2node\n if self.full_path in self.file.path2node:\n print \"** Error, created node with path twice:\\n%s\" % self.full_path\n traceback.print_stack()\n sys.exit(1)\n self.file.path2node[self.full_path] = self \n # save node in id_lookups\n id = self.sdef['id']\n ns = self.sdef['ns']\n type = self.sdef['type']\n custom = 'custom' in self.sdef and self.sdef['custom']\n if self.parent is None and self.sdef['df'] and not custom:\n # structure (not custom) created at top level, save in id_lookups\n if id not in self.file.id_lookups[ns]:\n print \"** Error: Unable to find id '%s' in id_lookups when saving node\" % id\n traceback.print_stack()\n sys.exit(1)\n if self.path not in self.file.id_lookups[ns][id]:\n print (\"** Error: Unable to find path '%s' in id_lookups when\"\n \" saving node %s\") % (self.path, id)\n print \"self.sdef['df'] is:\"\n pp.pprint (self.sdef['df'])\n traceback.print_stack()\n sys.exit(1)\n self.file.id_lookups[ns][id][self.path]['created'].append(self)\n # save node in all_nodes, either at top level (if no parent) or inside\n # mstats structure of parent node\n if self.parent is None:\n if self.path in self.file.all_nodes:\n self.file.all_nodes[self.path].append(self)\n else:\n self.file.all_nodes[self.path] = [self, ]\n else:\n if id not in self.parent.mstats:\n if custom:\n # custom node created, add id to mstats of parent\n self.parent.mstats[id] = { 'df': {}, 'type':type, 'ns': ns,\n 'created': [ self, ], 'qty':'?' }\n else:\n print \"** Error: Unable to find key '%s' in parent mstats\" % id\n print \"self.parent.mstats is\"\n pp.pprint (self.parent.mstats)\n traceback.print_stack()\n sys.exit(1)\n else: \n # append node to parent created mstats \n self.parent.mstats[id]['created'].append(self)", "def checkpoint(self):\n save()", "def append_to_FocusModel(inputfile, outputfile, path):\n #boolean to control whether new data have been found\n #if True, then the new data will be appended to the\n #outputfile\n newData = False\n #read in data\n inputdata = open(inputfile).readlines()\n outdata = open(path + outputfile).readlines()\n #last line in output data\n lastout = outdata[-1]\n #last MJD in output file\n oMJD = float(lastout.split()[3])\n for line in inputdata:\n if '#' not in line:\n if float(line.split()[3]) > oMJD:\n newData = True\n break\n #New data available, an update is required.\n if newData:\n #file handler\n fh = open(path + outputfile, 'a')\n #find the line that is the same\n try:\n ind = inputdata.index(lastout)\n except:\n #this except is required after year change\n ind = 0\n for x in range(ind + 1, len(inputdata)):\n print 'Adding line %s to %s' % (inputdata[x].strip(), outputfile)\n fh.write(inputdata[x])\n fh.close()\n return newData", "def update_holding_data(holding,new_call):\n holding_data = ET.fromstring(holding)\n location_field =holding_data.find(\".//datafield[@tag='852']\")\n location_field.set('ind1', ' ')\n call_subfield = holding_data.find(\".//datafield[@tag='852']/subfield[@code='h']\")\n call_subfield.text = new_call\n return ET.tostring(holding_data)", "def fix_steering(self, src_dir, pkl_name, dest_dir):\n if not os.path.exists(dest_dir):\n os.makedirs(dest_dir)\n \n dump_array = []\n data_in = open(os.path.join(src_dir, pkl_name), 'rb')\n data_array = pickle.load(data_in)\n for i, data_dict in enumerate(data_array):\n try:\n new_data_dict = self.oracle.fix(data_dict)\n except Exception as e:\n print(e)\n new_data_dict[\"flag\"] = False\n if new_data_dict.get(\"flag\", True):\n dump_array.append(new_data_dict)\n new_pkl_name = 'proc_' + pkl_name\n dump_path = os.path.join(dest_dir, new_pkl_name)\n self.pickledump(dump_array, dump_path)\n return new_pkl_name", "def save_filter_tree(self,root,good_nodes):\n date = str(datetime.now())[5:10]\n filename = \"./log/filter_tree_\" + date\n if os.path.isfile(filename):\n os.remove(filename)\n if good_nodes == []:\n with open(filename,\"a+\") as inp:\n inp.write(\"root\\n\")\n else:\n for node in good_nodes:\n node.save_node(filename)\n with open(filename,\"a+\") as inp:\n inp.write(\"root\\n\")\n \n parent = root\n curr_node = root.children[0]\n while True:\n curr_node.save_node(filename)\n if curr_node.children == []:\n while parent.next_child(curr_node) is None:\n if parent == root and parent.next_child(curr_node) is None:\n return\n curr_node = parent\n parent = curr_node.parent\n curr_node = parent.next_child(curr_node)\n else:\n parent = curr_node\n curr_node = parent.children[0]", "def write(self, out):", "def save_depfile(depdata,outname,is31=True): \n\n if outname==None:\n print('save_depfile requires a filename to save.')\n return\n try:\n fp=open(outname,'w')\n except IOError:\n print('save_depfile: invalid filename.')\n return data\n if is31:\n fp.write('Node Number = %d\\n' % len(depdata['node_num']) )\n for i in range(0,len(depdata['node_num'])):\n fp.write('%f %f %f\\n'% (depdata['x'][i],depdata['y'][i],depdata['h'][i]))\n fp.close()\n \n return", "def _write(self, out_file):\n out_file.write(' '.encode()) # pad byte\n out_file.write('{:4d}'.format(self.key).encode())\n out_file.write(self.code.encode())\n out_file.write((' '*18).encode()) # pad bytes\n out_file.write('{:12d}'.format(self.numelem).encode())\n out_file.write((' '*37).encode()) # pad bytes\n out_file.write('{:1d}'.format(self.format).encode())\n out_file.write('\\n'.encode())\n\n for elem in self.elems:\n if self.format < 2:\n out_file.write(' -1'.encode())\n if self.format == 0:\n out_file.write('{:5d}'.format(elem.number).encode())\n else:\n out_file.write('{:10d}'.format(elem.number).encode())\n out_file.write('{:5d}'.format(elem.type).encode())\n out_file.write('{:5d}'.format(elem.group).encode())\n out_file.write('{:5d}'.format(elem.material).encode())\n out_file.write('\\n'.encode())\n num_nodes = FRDElem.nodesPerType[elem.type]\n num_lines = int(num_nodes/(5*(3-self.format)+1))+1\n for j in range(num_lines):\n out_file.write(' -2'.encode()) # pad byte and key = -2\n k_start = j*5*(3-self.format)\n k_end = min(num_nodes, (j+1)*5*(3-self.format))\n if self.format == 0:\n for k in range(k_start, k_end):\n out_file.write(\n '{:5d}'.format(elem.nodes[k]).encode())\n else:\n for k in range(k_start, k_end):\n out_file.write(\n '{:10d}'.format(elem.nodes[k]).encode())\n out_file.write('\\n'.encode()) # eol\n else:\n out_file.write(struct.pack('i', elem.number))\n out_file.write(struct.pack('i', elem.type))\n out_file.write(struct.pack('i', elem.group))\n out_file.write(struct.pack('i', elem.material))\n out_file.write(struct.pack('i'*num_nodes, *elem.nodes))\n\n if self.format < 2:\n out_file.write(' -3\\n') # last record for ascii only", "def write(self,fout,line):\n \n def trans(a,m):\n if a == '0': return m[0]+'\\t'+m[0]\n if a == '1': return m[0]+'\\t'+m[1]\n if a == '2': return m[1]+'\\t'+m[1]\n return '0\\t0'\n\n if not line: return\n animal = line[0]\n if animal in self.ped:\n father,mother = self.ped[animal]['father'],self.ped[animal]['mother']\n sex = self.ped[animal]['sex']\n phe = self.ped[animal]['phe']\n family = self.ped[animal]['family'][0]\n else:\n father,mother,sex,phe,family = '0','0','3','-9','0'\n if len(self.mark['marklist']) > 0:\n lmark = self.mark['marklist']\n fout.write('%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n' % (family,animal,father,mother,sex,phe,\n '\\t'.join([trans(line[i+1],self.mark[name]['a1']+self.mark[name]['a2']) for i,name in enumerate(lmark)])))\n else:\n fout.write('%s\\t%s\\t%s\\t%s\\t%s\\t%s\\t%s\\n' % (family,animal,father,mother,sex,phe,'\\t'.join(line[1:])))", "def peek_write(self):\n ...", "def writeto(self,output_file,**kwargs):\n dump_pkl(self._properties,output_file,**kwargs)\n return", "def save_data(self, tuple_to_save, add_info):\n cwd = os.getcwd()\n folder_path = cwd+'/PLP_data/'\n if add_info != '':\n add_info = '_' + add_info\n \n newpath = folder_path + 'DSL' + add_info + '.pkl'\n try:\n with open(newpath, 'wb') as handle:\n pickle.dump(tuple_to_save, handle)\n except:\n return 0\n return 1", "def write(self):\r\n for prop in self.prpnames:\r\n elem = SubElement(self._root, prop)\r\n data = self.__getattribute__(prop)\r\n if self.prpnames[prop]['type'] == \"text\":\r\n elem.text = data\r\n elif self.prpnames[prop]['type'] == 'list':\r\n for x in data:\r\n SubElement(elem, 'regel').text = x\r\n elif self.prpnames[prop]['type'] == 'attr':\r\n elem.set(self.prpnames[prop]['naam'], data)\r\n tree = ElementTree(self._root)\r\n tree.write(self._fn)\r\n if not self.exists:\r\n self.exists = True", "def action_payslip_done(self):\n for recd in self.late_check_in_ids:\n recd.state = 'deducted'\n return super(PayslipLateCheckIn, self).action_payslip_done()", "def jp_save(self, *args, **kwargs):\n self.while_switch_stack.append((\"switch\", []))\n self.add_pc(2)\n self.pb[self.pc - 2] = \"JP\", _m(self.pc)\n self.push(self.pc - 1)", "def main():\r\n node = None\r\n file = open(\"animals.txt\",\"r\") # opening the file\r\n text = open(\"output.txt\",\"w\") # writing to a new file\r\n node = make_a_question(node,file) # node equals the insert function\r\n preorder(node)\r\n playagain = input(\"Do you want to play? Enter yes or exit\") # makes the user play the game\r\n while playagain==\"yes\": # if the user wants to play\r\n animalguess(node) # play the game\r\n write_newtree(node,text) # write the new tree\r\n playagain = input(\"Do you want to play? Enter yes or exit\") # ask if play again or exit\r\n print(\"The tree was saved to a new file\") # the print statement if he exits\r\n sys.exit(0) # the exit\r", "def save_node(self, node: Node):", "def post_order_helper(self, node, alist=[], verbose=False):\n if node:\n pre_order_helper(node.left, alist, verbose)\n pre_order_helper(node.right, alist, verbose)\n if verbose:\n print(node.data)\n alist.append(node)", "def record(self, step):", "def writeState(self, saveState: ghidra.framework.options.SaveState) -> None:\n ...", "def correct_tree_leaf_names(filename_in, filename_out):\n tree = Phylo.read(filename_in, \"newick\")\n ena_regex = re.compile(r\"ENA\\|[A-Z0-9]*\\|\")\n for terminal in tree.get_terminals():\n terminal.name = terminal.name.replace(\"_\", \"@\", 1)\n terminal.name = ena_regex.sub(\"\", terminal.name)\n Phylo.write(tree, filename_out, \"newick\")", "def createckfk(self, observer, dbname, t0, field1, nfields, mk): \n\n observerint=self.mpc2internal(observer)\n instrumentint=observerint*1000\n\n with open(\"cksetupfile\", \"w\") as f:\n f.write(\"KPL/IK \\nComments describing the keywords and values \\nto follow, as well as any other pertinent \\ninformation.\\n\\\\begindata\\n\")\n f.write(\"LSK_FILE_NAME = '%s'\\n\" %(mk))\n f.write(\"\\n\")\n f.write(\"INTERNAL_FILE_NAME = 'Survey Sim Camera Orientation'\\n\")\n f.write(\"\\n\")\n f.write(\"MAKE_FAKE_SCLK = 'tmpsclk'\\n\")\n f.write(\"CK_TYPE = 3\\n\")\n f.write(\"CK_SEGMENT_ID = 'Instrument Orientation'\\n\")\n f.write(\"INSTRUMENT_ID = %i \\n\" %(instrumentint))\n f.write(\"REFERENCE_FRAME_NAME = 'J2000'\\n\")\n f.write(\"ANGULAR_RATE_PRESENT = 'NO'\\n\")\n f.write(\"\\n\")\n f.write(\"INPUT_DATA_TYPE = 'SPICE QUATERNIONS'\\n\")\n f.write(\"INPUT_TIME_TYPE = 'UTC'\\n\")\n f.write(\"MAXIMUM_VALID_INTERVAL = 60\\n\") \n f.write(\"\\n\")\n f.write(\"PRODUCER_ID = 'Survey Sim, JPL'\\n\")\n f.write(\"\\\\begintext\")\n f.close()\n\n\n self.readfields(dbname,field1,nfields, t0)\n with open(\"ckip\",\"w\") as f:\n\n for i in range(len(self.fieldRA)):\n quat=self.computerotmat(self.fieldRA[i], self.fieldDec[i], self.rotSkyPos[i])\n\n #This helps with duplicate entries. For example enigma_1189 can have same fieldID's under different propID's\n #Issue warning for duplicate time. Have a verbose mode for displaying that (true as default)\n if (self.fieldMJD[i] !=self.fieldMJD[i-1]):\n JD=self.fieldMJD[i]+shared.mjd2jd\n timestring= 'JD'+repr(JD)\n f.write(\"%s %f %f %f %f\\n\" %(timestring,quat[0],quat[1],quat[2],quat[3]))\n f.close()\n try:\n os.system('rm tmp.ck tmpsclk test.ck fakesclk >/dev/null')\n except:\n pass\n os.system('msopck cksetupfile ckip tmp.ck > /dev/null')\n\n os.system('rsync tmpsclk fakesclk > /dev/null')\n os.system('rsync tmp.ck test.ck > /dev/null')\n\n with open(\"tmp.fk\",\"w\") as f:\n f.write(\"\\\\begindata\\n\\n\")\n f.write(\"FRAME_CAMERA_FRAME = %i\\n\" %(instrumentint))\n f.write(\"FRAME_%i_NAME = 'CAMERA_FRAME'\\n\" %(instrumentint))\n f.write(\"FRAME_%i_CLASS = 3\\n\" %(instrumentint))\n f.write(\"FRAME_%i_CLASS_ID = %i\\n\" %(instrumentint, instrumentint))\n f.write(\"FRAME_%i_CENTER = %i\\n\" %(instrumentint, observerint))\n f.write(\"CK_%i_SCLK = %i\\n\" %(instrumentint, observerint))\n f.write(\"CK_%i_SPK = %i\\n\\n\" %(instrumentint, observerint))\n f.write(\"\\\\begintext\\n\")\n f.close()\n \n os.system('rsync tmp.fk test.fk')", "def writetif(self,outputname,):\n pass", "def write_pickle_object_to_file(self, inpobj):\n with gzip.open('%s.tmp' % self.pickle_file, 'wb') as pkl_file:\n pickle.dump(inpobj, pkl_file, pickle.HIGHEST_PROTOCOL)\n run_command('mv %s.tmp %s' % (self.pickle_file, self.pickle_file))\n return True", "def _rec(jet, parent, node_id, outers_list):\n if jet[\"tree\"][node_id, 0] == -1:\n outers_list.append(jet[\"content\"][node_id])\n else:\n _rec(jet, node_id, jet[\"tree\"][node_id, 0], outers_list)\n _rec(jet, node_id, jet[\"tree\"][node_id, 1], outers_list)\n\n return outers_list", "def handleExistingData(iIndent):\r\n\tglobal sEType, sEVar, sEData\r\n\r\n\t# If none, quit.\r\n\tif not sEType:\r\n\t\treturn\r\n\r\n\t# Skip if we have no data.\r\n\tif not sEData:\r\n\t\treturn\r\n\r\n\t# Insert tab level and comments into a header.\r\n\tsHead = (\" \" * iIndent) + \"/// \"\r\n\r\n\t# Sanitise data.\r\n\tsEData.rstrip()\r\n\r\n\t# Swap breaks for heads.\r\n\tsEData = sEData.replace(BREAK, \"\\n\" + sHead)\r\n\r\n\t# Write out the respective blocks.\r\n\tif sEType == BRIEF:\r\n\t\t#sEData = sEData.replace(\"<summary>\", \"\")\r\n\t\t#sEData = sEData.replace(\"</summary>\", \"\")\r\n\t\tpOutFile.write(sHead + \"<summary>\\n\")\r\n\t\tpOutFile.write(sHead + sEData + \"\\n\")\r\n\t\tpOutFile.write(sHead + \"</summary>\\n\")\r\n\r\n\telif sEType == PARAM:\r\n\t\tpOutFile.write(sHead + \"<param name=\\\"\" + str(sEVar) + \"\\\">\" + str(sEData) + \"</param>\\n\")\r\n\r\n\telif sEType == RETURN:\r\n\t\tpOutFile.write(sHead + \"<returns>\" + str(sEData) + \"</returns>\\n\")\r\n\r\n\telif sEType == AUTHOR:\r\n\t\tpOutFile.write(sHead + \"<author>\" + str(sEData) + \"</author>\\n\")\r\n\t\t\r\n\telif sEType == DATE:\r\n\t\tpOutFile.write(sHead + \"<date>\" + str(sEData) + \"</date>\\n\")\r\n\t\t\r\n\telif sEType == RETURN:\r\n\t\tpOutFile.write(sHead + \"<returns>\" + str(sEData) + \"</returns>\\n\")\r\n\r\n\telif sEType == REMARK:\r\n\t\tpOutFile.write(sHead + str(sEData) + \"\\n\")\r\n\r\n\t# Zap any leftover data.\r\n\tsEType = None\r\n\tsEVar = None\r\n\tsEData = \"\"", "def dumpData(self,out):\n out.packSub0('NAME',self.id)\n if getattr(self,'isDeleted',False):\n out.packSub('DELE','i',0)\n return\n out.packSub0('MODL',self.model)\n if self.title: out.packSub0('FNAM',self.title)\n out.packSub('BKDT','f4i',\n self.weight, self.value, self.isScroll, self.teaches, self.enchantPoints)\n if self.script: out.packSub0('SCRI',self.script)\n if self.icon: out.packSub0('ITEX',self.icon)\n if self.text: out.packSub0('TEXT',self.text)\n if self.enchant: out.packSub0('TEXT',self.enchant)", "def save(self, output, data):\n pass", "def pop_write(self):\n ...", "def __savePreProcessedData(self):\n np.savetxt(self.X_filename, self.X, delimiter=',')\n np.savetxt(self.y_filename, self.le.fit_transform(self.y), delimiter=',')\n #Need to save the label Enconder to inverse transform later\n joblib.dump(self.le, self.le_filename)\n\n print(\"Saved X and y\")", "def test_serialize(self) :\n \tnode = Node(\"A\")\n \t# Create some application data for the node\n \tnode.addAppData(\"record1\",\"Record1 data\", Node.ALL, Node.ALL )\n \tnode.addAppData(\"record2\",\"Record2 data\", Node.ALL, Node.GENERIC )\n \tnode.serialize((Node.ALL, Node.ALL))\n\n\t\tself.assertEqual(len(node.appData), 2)\n\t\t# Check if their dirty bits have been cleared \n\t\tself.assertEqual(node.appData[0].dirtyBit, 0)\n\t\tself.assertEqual(node.appData[1].dirtyBit, 0)\n\n\t\tself.assertEqual(node.store[\"record1\"].lastSavedByInstance, \"A\")\n\t\tself.assertEqual(node.store[\"record1\"].lastSavedByCounter, 1)\n\t\tself.assertEqual(node.store[\"record1\"].lastSavedByHistory, {\"A\":1})\n\t\tself.assertEqual(node.store[\"record1\"].partitionFacility, Node.ALL)\n\t\tself.assertEqual(node.store[\"record1\"].partitionUser, Node.ALL)\n\n\t\tself.assertEqual(node.store[\"record2\"].lastSavedByInstance, \"A\")\n\t\tself.assertEqual(node.store[\"record2\"].lastSavedByCounter, 2)\n\t\tself.assertEqual(node.store[\"record2\"].lastSavedByHistory, {\"A\":2})\n\t\tself.assertEqual(node.store[\"record2\"].partitionFacility, Node.ALL)\n\t\tself.assertEqual(node.store[\"record2\"].partitionUser, Node.GENERIC)\n\n\t\t# Create data for different facilities and users\n \tnode.addAppData(\"record3\",\"Record3 data\", \"Facility1\", Node.GENERIC )\n \tnode.addAppData(\"record4\",\"Record4 data\", \"Facility1\", \"UserX\" )\n \tnode.addAppData(\"record5\",\"Record5 data\", \"Facility1\", \"UserY\" )\n \tnode.addAppData(\"record6\",\"Record6 data\", \"Facility2\", \"UserX\" )\n \tnode.addAppData(\"record7\",\"Record7 data\", Node.ALL, Node.ALL)\n\n\t\tself.assertRaises(ValueError, lambda:node.serialize((Node.ALL, \"UserX\")) )\n\n\t\tnode.serialize((\"Facility3\", \"UserZ\"))\n\t\t# Length of appData nd store should not change after serialization\n\t\tself.assertEqual(len(node.store), 2)\n\t\tself.assertEqual(len(node.appData), 7)\n\n\t\tnode.serialize((\"Facility1\", \"UserX\"))\n\t\tself.assertEqual(len(node.store), 3)\n\t\tself.assertEqual(len(node.appData), 7)\n\t\tself.assertEqual(node.appData[3].dirtyBit, 0)\n\t\tself.assertEqual(node.store[\"record4\"].lastSavedByInstance, \"A\")\n\t\tself.assertEqual(node.store[\"record4\"].lastSavedByCounter, 3)\n\t\tself.assertEqual(node.store[\"record4\"].lastSavedByHistory, {\"A\":3})\n\t\tself.assertEqual(node.store[\"record4\"].partitionFacility, \"Facility1\")\n\t\tself.assertEqual(node.store[\"record4\"].partitionUser, \"UserX\")\n\n\t\tnode.serialize((\"Facility1\", Node.ALL))\n\t\tself.assertEqual(len(node.store), 5)\n\t\tself.assertEqual(len(node.appData), 7)\n\t\tself.assertEqual(node.appData[2].dirtyBit, 0)\n\t\tself.assertEqual(node.appData[4].dirtyBit, 0)\n\t\tself.assertEqual(node.store[\"record3\"].lastSavedByHistory, {\"A\":4})\n\t\tself.assertEqual(node.store[\"record5\"].lastSavedByHistory, {\"A\":5})\n\n\t\tnode.serialize((Node.ALL, Node.ALL))\n\t\tself.assertEqual(len(node.store), 7)\n\t\tself.assertEqual(len(node.appData), 7)\n\t\tself.assertEqual(node.appData[5].dirtyBit, 0)\n\t\tself.assertEqual(node.appData[6].dirtyBit, 0)\n\t\tself.assertEqual(node.store[\"record6\"].lastSavedByHistory, {\"A\":6})\n\t\tself.assertEqual(node.store[\"record7\"].lastSavedByHistory, {\"A\":7})", "def write(self,data): \n if not os.path.exists(self.output_dir):\n os.makedirs(self.output_dir)\n\n # We will store these in a separate file and link them to the level2s\n fname = data.filename.split('/')[-1]\n \n if os.path.exists(self.outfile):\n output = h5py.File(self.outfile,'a')\n else:\n output = h5py.File(self.outfile,'w')\n\n # Set permissions and group\n if self.set_permissions:\n try:\n os.chmod(self.outfile,0o664)\n shutil.chown(self.outfile, group=self.permissions_group)\n except PermissionError:\n self.logger(f'{fname}:{self.name}: Warning, couldnt set the file permissions.')\n\n # Store datasets in root\n data_out = {'tod':self.all_tod,\n 'weights':self.all_weights,\n 'mask':self.all_mask,\n 'cal_factors':self.all_cal_factors,\n 'frequency':self.all_frequency,\n 'auto_rms':self.all_auto}\n\n for dname, dset in data_out.items():\n if dname in output:\n del output[dname]\n output.create_dataset(dname, data=dset)\n\n output.attrs['version'] = __level3_version__\n output['cal_factors'].attrs['source'] = self.cal_source\n output['cal_factors'].attrs['calibrator_obsid'] = self.nearest_calibrator\n\n output.close()\n \n if self.level3 in data.keys():\n del data[self.level3]\n data[self.level3] = h5py.ExternalLink(self.outfile,'/')", "def write(self, path):\n\n with open(path, 'w') as file:\n file.write(f\"\\n{self.subject}\")\n file.write(8 * '\\n')\n file.write(\"0\")\n file.write(8 * '\\n')\n file.write(self.data.date[0].strftime(\"%d.%m.%Y\"))\n file.write(7 * '\\n')\n file.write(\"Unknown Line\")\n file.write(26 * '\\n')\n file.write(self.valid_measurements + \"\\n\")\n printing_df = self.data.drop(columns=['date', 'time'])\n printing_df['hours'] = self.data.time.map(lambda x: x.strftime(\"%H\"))\n printing_df['minutes'] = self.data.time.map(lambda x: x.strftime(\"%M\"))\n order = ['hours', 'minutes', 'SYS(mmHg)', 'DIA(mmHg)', 'UNKNOW_1', 'UNKNOW_2', 'CODE', 'UNKNOW_3']\n printing_df = printing_df[order]\n printing_df.fillna(-9999, inplace=True)\n printing_df.replace('EB', -9998, inplace=True)\n printing_df.replace('AB', -9997, inplace=True)\n printing_df[['SYS(mmHg)', 'DIA(mmHg)', 'UNKNOW_1', 'UNKNOW_2', 'CODE', 'UNKNOW_3']] = printing_df[\n ['SYS(mmHg)', 'DIA(mmHg)', 'UNKNOW_1', 'UNKNOW_2', 'CODE', 'UNKNOW_3']].astype(int).astype(str)\n printing_df.replace('-9999', '\"\"', inplace=True)\n printing_df.replace('-9998', '\"EB\"', inplace=True)\n printing_df.replace('-9997', '\"AB\"', inplace=True)\n printing_df.to_csv(file, header=None, index=None, quoting=csv.QUOTE_NONE, line_terminator='\\n')\n\n xml_node = ET.Element('XML')\n xml_node.extend(self._dict_to_etree(self.metadata))\n xml_line = ET.tostring(xml_node, encoding=\"unicode\")\n file.write(xml_line)", "def __write_note(self, handle, nbr):\n try:\n note = self.database.get_note_from_handle(handle)\n self.__write_row(nbr, handle, note)\n except:\n note = \"NOT FOUND\"\n self.__write_row(nbr, handle, note)", "def saveState(fileid,data):\n global states\n info = data.get('info')\n fn = fileid + \".xml\"\n state = etree.Element(\"state\")\n # TODO: put this in a global variable, and make a function to populate it from the DTD.\n tags = [\"name\",\"start\",\"scue\",\"end\",\"ecue\",\"vital\",\"polit\",\"culture\",\"history\", \"geography\",\"econ\",\"demo\",\"events\",\"cities\",\"aspects\",\"update\"]\n for tag in tags:\n if tag == \"cities\":\n nodes = info.get(\"cities\")\n if nodes is not None:\n for node in nodes.keys():\n if nodes[node].get(\"name\"):\n connected = etree.Element(\"city\")\n value = info['cities'][node].get(\"name\")\n if value is None: value = ['',False]\n etree.SubElement(connected,\"name\").text = value[0]\n value = node\n if value is None: value = ''\n etree.SubElement(connected,\"file\").text = value\n value = info['cities'][node].get(\"note\")\n if value is not None and len(value[0]) > 0: etree.SubElement(connected,\"note\").text = value[0]\n state.append(connected)\n else:\n print \"A required tag is missing from city %s.\" % node\n else:\n print \"no cities found\"\n elif tag == \"events\":\n nodes = info.get(\"m\")\n nodes = nodes.get(\"events\")\n if nodes is not None:\n events = etree.Element(\"events\")\n for node in nodes.keys():\n if nodes[node].get(\"event\"):\n connected = etree.Element(\"mstone\")\n value = info['m']['events'][node].get(\"event\")\n if value is None: value = ['',False]\n etree.SubElement(connected,\"event\").text = value[0]\n value = info['m']['events'][node].get(\"date\")\n if value is None: value = ['',False]\n etree.SubElement(connected,\"date\").text = value[0]\n events.append(connected)\n else:\n print \"A required tag is missing from event %s.\" % node\n state.append(events)\n else:\n print \"no events found\"\n# 820 #\n elif tag == \"aspects\":\n nodes = info.get(\"aspects\")\n if nodes is not None:\n aspects = etree.Element(\"aspects\")\n for node in sorted(nodes.keys()):\n value = nodes[node]\n if value is None: value = ['',False]\n etree.SubElement(aspects,\"text\").text = value[0]\n state.append( aspects )\n else:\n print \"no aspects found\"\n\n elif tag == \"update\":\n etree.SubElement(state,tag).text = common.skrTimeStamp(config['datestyle'])\n else:\n value = info.get(tag)\n if value is None: value = ['',False]\n etree.SubElement(state,tag).text = value[0]\n r = saveXMLtree(state,\"state\",fileid)\n if r:\n try:\n states[fileid]['changed'] = False\n except KeyError:\n printPretty(states)\n return r", "def logbook_save(lb):\n return IMPL.logbook_save(lb)", "def write_nml(self, nml_write_path):\n\n # If the object does not have any trees, construct an empty tree before writing to enable webKnossos import\n if self.num_trees() == 0:\n self.add_tree()\n\n nml = self._skeleton_to_nml()\n with open(nml_write_path, \"wb\") as f:\n wknml.write_nml(f, nml)", "def __save_node(self):\n print(self._encoder.encode(self._current_node))\n self._count += 1", "def add_line_info(root_node):\n class AddLineNumbers(BottomUpVisitor):\n def __init__(self):\n BottomUpVisitor.__init__(self, strict_line_order=True, make_unique=True)\n def visit_one_node(self, node, lineno=None):\n# print(node, lineno, getattr(node, 'lineno', None))\n if not hasattr(node, 'lineno'):\n node.lineno = lineno\n else:\n if node.lineno != lineno:\n print(node, lineno, node.lineno)\n print(astor.dump(root_node))\n assert False\n BottomUpVisitor.visit_one_node(self, node, lineno)\n AddLineNumbers().visit(root_node)", "def pdbout(self, pdbout):\n self._pdbout = pdbout", "def save_record(record):\n record. save_details()", "def save_to_MTFIT_style_file(MTs, MTp, nlloc_hyp_filename, inversion_type, outdir, MTp_absolute=[], shift_idxs=[]):\n # Get uid and stations data:\n uid, stations = get_event_uid_and_station_data_MTFIT_FORMAT_from_nonlinloc_hyp_file(nlloc_hyp_filename)\n # Write all data to output dict:\n out_dict = {}\n out_dict[\"MTs\"] = MTs\n out_dict[\"MTp\"] = MTp\n out_dict[\"uid\"] = uid\n out_dict[\"stations\"] = stations\n if len(MTp_absolute)>0:\n out_dict[\"MTp_absolute\"] = MTp_absolute\n if len(shift_idxs)>0:\n out_dict[\"shift_idxs\"] = shift_idxs\n # And save to file:\n out_fname = outdir+\"/\"+uid+\"_FW_\"+inversion_type+\".pkl\"\n print(\"Saving FW inversion to file:\", out_fname)\n pickle.dump(out_dict, open(out_fname, \"wb\"))", "def save_checkpoint(self):\n checkpoin_path = self.get_checkpoint_path()\n _logger.info('Save checkpoint ignored by tuner, checkpoint path: %s', checkpoin_path)", "def writeLMIn(self, line1, line2, line3):\n\n LMIn = open(self.LMInputFName, 'w')\n\n if self.rawDataOutputFlag:\n line2 += '-R'\n\n LMIn.write(line1 + '\\n' + line2 + '\\n' + line3)\n LMIn.close()", "def _post_training(self):\n self._write_state_key()", "def save_check_point(self, uid, msg):\n key = \"%s_%s_%s\" % (self.opt_pop3_server,\n self.opt_global_account[\"username\"], uid)\n date = email.utils.mktime_tz(email.utils.parsedate_tz(msg.get('Date')))\n value = \"input=dmarc_pop, server=%s, username=%s, uid=%s, timestamp_utc=%d, subject='%s'\" % (\n self.opt_pop3_server, self.opt_global_account[\"username\"], uid, date, msg.get('Subject'))\n try:\n self.helper.save_check_point(key, value)\n except Exception as e:\n raise Exception(\n \"Error saving checkpoint data with with exception %s\" %\n str(e))", "def save(self, fh):\n\t\t#first write head of file\n\t\tfh.write('+INFO:IEEE 802.15.4 ZigBeeSniffer Log.\\r\\n')\n\t\tfh.write('+DATE:' + time.ctime() + '\\r\\n')\n\t\tfor packet in self.packets:\n\t\t\tload = [len(packet.load)] + packet.load\n\t\t\tload = map(str, map(hex, load))\t# int->hex->str\n\t\t\tload = map(lambda x: ('0'+x[2:])[-2:].upper(), load)\t# 0xa -> 0A\n\t\t\tload = ','.join(load)\t\t\t\t\t\t# glue it back\n\t\t\ttimee = ','.join(packet.time)\n\t\t\tfh.write('+FRAM:' + timee + ',' + load + '\\r\\n')\n\t\t\t\n\t\tfor node in self.nodes.nodes:\n\t\t\tpan = node.panId[2:] # cut '0x' from begining\n\t\t\taddr = node.address[2:] # cut '0x' from begining\n\t\t\tline = '+NODE:\"%s\"' % node.name\n\t\t\tline += ',%s,%s' % (pan[2:], pan[:2]) if pan else ',??,??' # PAN-ID\n\t\t\t#now split address into pairs (bytes):\n\t\t\tpairs = map(''.join, zip(iter(addr), iter(addr)))\n\t\t\tfor pair in reversed(pairs): \n\t\t\t\tline += ',%s' % pair\n\t\t\tfor i in range(8 - len(pairs)):\n\t\t\t\tline += ',??'\n\t\t\tline += '\\r\\n'\n\t\t\tfh.write(line)\n\t\t\n\t\tself.changed(False)", "def save(self, record):\n pass", "def _write_indom(parameters):\n if parameters[\"rocks_order\"]:\n order = parameters[\"rocks_order\"]\n for rock in parameters[\"rocks\"].keys():\n if rock not in order:\n order.append(rock)\n else:\n order = parameters[\"rocks\"].keys()\n\n # Formats\n fmt = block_to_format[\"INDOM\"]\n fmt1 = str2format(fmt[5])\n fmt2 = str2format(fmt[0])\n\n out = []\n for k in order:\n if \"initial_condition\" in parameters[\"rocks\"][k]:\n data = parameters[\"rocks\"][k][\"initial_condition\"]\n if any(x is not None for x in data):\n # Record 1\n values = [k]\n out += write_record(values, fmt1)\n\n # Record 2\n n = min(4, len(data))\n values = list(data[:n])\n out += write_record(values, fmt2)\n\n # Record 3\n if len(data) > 4:\n values = list(data[4:])\n out += write_record(values, fmt2)\n\n return out", "def save_bgn(self):\n self.savedata = ''", "def write_binary(file_name, global_var, instr_ls, hw_details, verb= False):\n \n #-------------------------------------------\n # Variable declaration\n #-------------------------------------------\n BIT_L = hw_details.n_bits # from hw_details \n TREE_DEPTH = hw_details.tree_depth \n BANK_DEPTH = hw_details.reg_bank_depth \n MEM_ADDR_L = hw_details.mem_addr_bits \n N_TREE = hw_details.n_tree\n\n N_IN_PER_TREE = (2**TREE_DEPTH) \n N_IN = N_TREE * N_IN_PER_TREE \n N_ALU_PER_TREE = ((2**TREE_DEPTH)-1) \n N_ALU = N_TREE * N_ALU_PER_TREE \n N_BANKS = N_IN \n PIPE_STAGES = TREE_DEPTH + 2\n\n \n OPCODE_L = 4 # from opcodes package\n NOP_STALL = 0\n NOP = 1\n BB = 2\n CP_8 = 3\n ST = 4\n LD = 5\n ST_4 = 6\n ST_8 = 7\n CP_2 = 8\n CP_4 = 9\n CP = 10\n\n SUM = 0 # typedefs\n PROD = 1\n PASS_0= 2 \n PASS_1= 3 \n alu_mode_enum_t = 4\n\n ARITH_L= clog2(alu_mode_enum_t)\n BANK_ADDR_L = clog2(BANK_DEPTH) # from instr_decd_pkg\n CROSSBAR_SEL_L = clog2(N_BANKS) \n BANK_WR_SEL_L= clog2(TREE_DEPTH + 1)\n\n NOP_OPCODE = NOP \n NOP_L = OPCODE_L \n\n NOP_STALL_OPCODE = NOP_STALL\n NOP_STALL_L = OPCODE_L\n\n LD_OPCODE = LD \n LD_EN_S = OPCODE_L \n LD_EN_L = N_BANKS \n LD_MEM_ADDR_S = LD_EN_S + LD_EN_L \n LD_MEM_ADDR_L = MEM_ADDR_L \n LD_L = OPCODE_L + LD_MEM_ADDR_L + LD_EN_L \n\n ST_OPCODE = ST \n ST_EN_S = OPCODE_L \n ST_EN_L = N_BANKS \n ST_BANK_RD_ADDR_S = ST_EN_S + ST_EN_L \n ST_BANK_RD_ADDR_L = BANK_ADDR_L * N_BANKS \n ST_MEM_ADDR_S = ST_BANK_RD_ADDR_S + ST_BANK_RD_ADDR_L \n ST_MEM_ADDR_L = MEM_ADDR_L \n ST_L = OPCODE_L + ST_BANK_RD_ADDR_L + ST_MEM_ADDR_L + ST_EN_L \n\n ST_4_OPCODE = ST_4 \n ST_4_EN_S = OPCODE_L \n ST_4_EN_L = N_BANKS \n ST_4_BANK_RD_ADDR_S = ST_4_EN_S + ST_4_EN_L \n ST_4_BANK_RD_ADDR_L = BANK_ADDR_L * 4 \n ST_4_MEM_ADDR_S = ST_4_BANK_RD_ADDR_S + ST_4_BANK_RD_ADDR_L \n ST_4_MEM_ADDR_L = MEM_ADDR_L \n ST_4_L = OPCODE_L + ST_4_BANK_RD_ADDR_L + ST_4_MEM_ADDR_L + ST_4_EN_L \n\n ST_8_OPCODE = ST_8 \n ST_8_EN_S = OPCODE_L \n ST_8_EN_L = N_BANKS \n ST_8_BANK_RD_ADDR_S = ST_8_EN_S + ST_8_EN_L \n ST_8_BANK_RD_ADDR_L = BANK_ADDR_L * 8 \n ST_8_MEM_ADDR_S = ST_8_BANK_RD_ADDR_S + ST_8_BANK_RD_ADDR_L \n ST_8_MEM_ADDR_L = MEM_ADDR_L \n ST_8_L = OPCODE_L + ST_8_BANK_RD_ADDR_L + ST_8_MEM_ADDR_L + ST_8_EN_L \n\n CP_OPCODE = CP;\n CP_WR_EN_S = OPCODE_L;\n CP_WR_EN_L = N_BANKS;\n CP_RD_EN_S = CP_WR_EN_S + CP_WR_EN_L;\n CP_RD_EN_L = N_BANKS;\n CP_BANK_RD_ADDR_S = CP_RD_EN_S + CP_RD_EN_L;\n CP_BANK_RD_ADDR_L = BANK_ADDR_L * N_BANKS;\n CP_CROSS_SEL_S = CP_BANK_RD_ADDR_S + CP_BANK_RD_ADDR_L;\n CP_CROSS_SEL_L = CROSSBAR_SEL_L * N_BANKS;\n CP_INVLD_S = CP_CROSS_SEL_S + CP_CROSS_SEL_L;\n CP_INVLD_L = N_BANKS;\n CP_L = OPCODE_L + CP_WR_EN_L + CP_RD_EN_L + CP_BANK_RD_ADDR_L + CP_CROSS_SEL_L + CP_INVLD_L;\n\n CP_8_OPCODE = CP_8 \n CP_8_EN_S = OPCODE_L \n CP_8_EN_L = N_BANKS \n CP_8_BANK_RD_ADDR_S = CP_8_EN_S + CP_8_EN_L \n CP_8_BANK_RD_ADDR_L = BANK_ADDR_L * 8 \n CP_8_CROSS_SEL_S = CP_8_BANK_RD_ADDR_S + CP_8_BANK_RD_ADDR_L \n CP_8_CROSS_SEL_L = CROSSBAR_SEL_L * 8 \n CP_8_INVLD_S = CP_8_CROSS_SEL_S + CP_8_CROSS_SEL_L \n CP_8_INVLD_L = 8 \n CP_8_L = OPCODE_L + CP_8_EN_L + CP_8_BANK_RD_ADDR_L + CP_8_CROSS_SEL_L + CP_8_INVLD_L \n\n CP_4_OPCODE = CP_4 \n CP_4_EN_S = OPCODE_L \n CP_4_EN_L = N_BANKS \n CP_4_BANK_RD_ADDR_S = CP_4_EN_S + CP_4_EN_L \n CP_4_BANK_RD_ADDR_L = BANK_ADDR_L * 4 \n CP_4_CROSS_SEL_S = CP_4_BANK_RD_ADDR_S + CP_4_BANK_RD_ADDR_L \n CP_4_CROSS_SEL_L = CROSSBAR_SEL_L * 4 \n CP_4_INVLD_S = CP_4_CROSS_SEL_S + CP_4_CROSS_SEL_L \n CP_4_INVLD_L = 4 \n CP_4_L = OPCODE_L + CP_4_EN_L + CP_4_BANK_RD_ADDR_L + CP_4_CROSS_SEL_L + CP_4_INVLD_L \n\n CP_2_OPCODE = CP_2 \n CP_2_EN_S = OPCODE_L \n CP_2_EN_L = N_BANKS \n CP_2_BANK_RD_ADDR_S = CP_2_EN_S + CP_2_EN_L \n CP_2_BANK_RD_ADDR_L = BANK_ADDR_L * 2 \n CP_2_CROSS_SEL_S = CP_2_BANK_RD_ADDR_S + CP_2_BANK_RD_ADDR_L \n CP_2_CROSS_SEL_L = CROSSBAR_SEL_L * 2 \n CP_2_INVLD_S = CP_2_CROSS_SEL_S + CP_2_CROSS_SEL_L \n CP_2_INVLD_L = 2 \n CP_2_L = OPCODE_L + CP_2_EN_L + CP_2_BANK_RD_ADDR_L + CP_2_CROSS_SEL_L + CP_2_INVLD_L \n\n BB_OPCODE = BB \n BB_INVLD_S = OPCODE_L \n BB_INVLD_L = N_BANKS \n BB_BANK_RD_ADDR_S = BB_INVLD_S + BB_INVLD_L \n BB_BANK_RD_ADDR_L = BANK_ADDR_L * N_BANKS \n BB_CROSS_SEL_S = BB_BANK_RD_ADDR_S + BB_BANK_RD_ADDR_L \n BB_CROSS_SEL_L = CROSSBAR_SEL_L * N_BANKS \n BB_ARITH_OP_S = BB_CROSS_SEL_S + BB_CROSS_SEL_L \n BB_ARITH_OP_L = ARITH_L * N_ALU \n BB_BANK_WR_SEL_S = BB_ARITH_OP_S + BB_ARITH_OP_L \n BB_BANK_WR_SEL_L = BANK_WR_SEL_L * N_BANKS \n BB_L = OPCODE_L + BB_INVLD_L + BB_BANK_RD_ADDR_L + BB_CROSS_SEL_L + BB_ARITH_OP_L + BB_BANK_WR_SEL_L\n \n INSTR_L = max(BB_L, NOP_L, NOP_STALL_L, LD_L, ST_L, ST_4_L, ST_8_L, CP_L, CP_8_L)\n MIN_COMPRESSION_FACTOR = 4; \n MAX_COMPRESSED_BANKS = N_BANKS/MIN_COMPRESSION_FACTOR\n\n #-------------------------------------------\n # Actual instr_ls processing\n #-------------------------------------------\n \n # instr binary for the whole instr_ls\n i_bin= \"\" \n\n # replace \"sh\" instructions to compatible shorter sh instructions types\n use_shorter_st_instr(hw_details, MAX_COMPRESSED_BANKS, instr_ls)\n use_shorter_sh_instr(hw_details, MAX_COMPRESSED_BANKS, instr_ls)\n\n for instr in instr_ls:\n temp_instr_idx = len(i_bin)/INSTR_L\n start_temp_idx= 24\n last_temp_idx= 33\n\n curr_bin= \"\"\n # instr is an object of type common_classes.instr\n if instr.is_type('nop'):\n curr_bin= bin_update(curr_bin, NOP_OPCODE, OPCODE_L)\n elif instr.is_type('nop_stall'):\n curr_bin= bin_update(curr_bin, NOP_STALL_OPCODE, OPCODE_L)\n elif instr.is_type('ld') or instr.is_type('ld_sc'):\n curr_bin= bin_update(curr_bin, LD_OPCODE, OPCODE_L)\n\n # ld_en\n ld_en= 0\n for io_node_obj in list(instr.node_details_dict.values()):\n ld_en |= (1<<(io_node_obj.bank)) \n curr_bin= bin_update(curr_bin, ld_en, LD_EN_L)\n \n # mem addr\n assert in_range(instr.mem_addr, LD_MEM_ADDR_L)\n curr_bin= bin_update(curr_bin, instr.mem_addr, LD_MEM_ADDR_L)\n\n # if temp_instr_idx > start_temp_idx and temp_instr_idx < last_temp_idx:\n # printcol(temp_instr_idx, 'blue')\n # printcol('ld', 'red')\n # print(bin(ld_en))\n # for io_node_obj in list(instr.node_details_dict.values()):\n # print(io_node_obj.bank, io_node_obj.pos)\n\n elif instr.is_type('st') or instr.is_type('st_8') or instr.is_type('st_4'):\n if instr.is_type('st_8'):\n OPCODE = ST_8_OPCODE\n EN_L = ST_8_EN_L\n BANK_RD_ADDR_L = ST_8_BANK_RD_ADDR_L\n CURR_MEM_ADDR_L= ST_8_MEM_ADDR_L\n TOTAL_L = ST_8_L\n elif instr.is_type('st_4'):\n OPCODE = ST_4_OPCODE\n EN_L = ST_4_EN_L\n BANK_RD_ADDR_L = ST_4_BANK_RD_ADDR_L\n CURR_MEM_ADDR_L= ST_4_MEM_ADDR_L\n TOTAL_L = ST_4_L\n elif instr.is_type('st'):\n OPCODE = ST_OPCODE\n EN_L = ST_EN_L\n BANK_RD_ADDR_L = ST_BANK_RD_ADDR_L\n CURR_MEM_ADDR_L= ST_MEM_ADDR_L\n TOTAL_L = ST_L\n else:\n assert 0\n\n curr_bin= bin_update(curr_bin, OPCODE, OPCODE_L)\n\n # st_en\n # st bank addr\n st_en= 0\n all_bank_addr= 0\n assert len(instr.node_set) == len(instr.node_details_dict)\n sorted_nodes= list(sorted(list(instr.node_set), key= lambda x : instr.node_details_dict[x].bank))\n # for io_node_obj in list(instr.node_details_dict.values()):\n for idx, node in enumerate(sorted_nodes):\n io_node_obj= instr.node_details_dict[node]\n bank= io_node_obj.bank\n pos= io_node_obj.pos\n st_en |= (1 << bank) \n if instr.is_type('st'):\n all_bank_addr |= pos << (bank * BANK_ADDR_L)\n else:\n all_bank_addr |= pos << (idx * BANK_ADDR_L)\n\n curr_bin= bin_update(curr_bin, st_en, EN_L)\n curr_bin= bin_update(curr_bin, all_bank_addr, BANK_RD_ADDR_L)\n \n # mem addr\n assert in_range(instr.mem_addr, CURR_MEM_ADDR_L)\n curr_bin= bin_update(curr_bin, instr.mem_addr, CURR_MEM_ADDR_L)\n\n assert len(curr_bin) <= TOTAL_L, f\"{len(curr_bin), TOTAL_L}\"\n \n # TODO: Following instruction types\n# elif instr.is_type('st_2'):\n# elif instr.is_type('st_4'):\n# elif instr.is_type('sh_2'):\n# elif instr.is_type('sh_4'):\n \n elif instr.is_type('sh'):\n curr_bin= bin_update(curr_bin, CP_OPCODE, OPCODE_L)\n\n # wr_en and rd_en\n # bank addr, crossbar sel, invalids\n wr_en= 0\n rd_en= 0\n all_bank_addr= 0\n all_crossbar_sel= 0\n all_invalids= 0\n for node, src_dst_tup in list(instr.sh_dict_bank.items()):\n src_bank= src_dst_tup[0]\n dst_bank= src_dst_tup[1]\n rd_en |= (1<< src_bank) \n wr_en |= (1<< dst_bank) \n\n src_pos = instr.sh_dict_pos[node][0]\n all_bank_addr |= src_pos << (src_bank * BANK_ADDR_L)\n\n if node in instr.invalidate_node_set:\n all_invalids |= (1 << src_bank)\n\n all_crossbar_sel |= src_bank << (dst_bank * CROSSBAR_SEL_L)\n\n curr_bin= bin_update(curr_bin, wr_en, CP_WR_EN_L)\n curr_bin= bin_update(curr_bin, rd_en, CP_RD_EN_L)\n curr_bin= bin_update(curr_bin, all_bank_addr, CP_BANK_RD_ADDR_L)\n curr_bin= bin_update(curr_bin, all_crossbar_sel, CP_CROSS_SEL_L)\n curr_bin= bin_update(curr_bin, all_invalids, CP_INVLD_L)\n\n # elif instr.is_type('sh') or instr.is_type('sh_8'):\n elif instr.is_type('sh_8') or instr.is_type('sh_4') or instr.is_type('sh_2'):\n # instr.print_details()\n\n # if temp_instr_idx > start_temp_idx and temp_instr_idx < last_temp_idx:\n # print(temp_instr_idx, 'cp')\n if instr.is_type('sh_8'):\n OPCODE = CP_8_OPCODE\n EN_L = CP_8_EN_L\n BANK_RD_ADDR_L = CP_8_BANK_RD_ADDR_L\n CROSS_SEL_L = CP_8_CROSS_SEL_L\n INVLD_L = CP_8_INVLD_L\n elif instr.is_type('sh_4'):\n OPCODE = CP_4_OPCODE\n EN_L = CP_4_EN_L\n BANK_RD_ADDR_L = CP_4_BANK_RD_ADDR_L\n CROSS_SEL_L = CP_4_CROSS_SEL_L\n INVLD_L = CP_4_INVLD_L\n elif instr.is_type('sh_2'):\n OPCODE = CP_2_OPCODE\n EN_L = CP_2_EN_L\n BANK_RD_ADDR_L = CP_2_BANK_RD_ADDR_L\n CROSS_SEL_L = CP_2_CROSS_SEL_L\n INVLD_L = CP_2_INVLD_L\n else:\n assert 0\n\n curr_bin= bin_update(curr_bin, OPCODE, OPCODE_L)\n\n assert len(instr.sh_dict_bank) <= 8\n\n # cp_en\n cp_en= 0\n for src_dst_tup in list(instr.sh_dict_bank.values()):\n dst_bank= src_dst_tup[1]\n cp_en |= (1<< dst_bank) \n curr_bin= bin_update(curr_bin, cp_en, EN_L)\n \n \n # -- sort according to dst banks\n dst_to_node_dict={}\n dst_to_invl_dict={}\n for node, src_dst_tup in list(instr.sh_dict_bank.items()):\n dst_bank= src_dst_tup[1]\n dst_to_node_dict[dst_bank]= node\n if node in instr.invalidate_node_set:\n dst_to_invl_dict[dst_bank] = True\n else: #NOTE: newly added, not sure if this correct. This is to remove the error occuring in \"if dst_to_invl_dict[dst_bank]:\" statement below\n dst_to_invl_dict[dst_bank] = False\n assert len(dst_to_node_dict) == len(instr.sh_dict_bank),\" Repeating dst_bank\"\n \n # bank addr, crossbar sel, invalids\n all_bank_addr= 0\n all_crossbar_sel= 0\n all_invalids= 0\n for idx, dst_bank in enumerate(sorted(dst_to_node_dict.keys())):\n node= dst_to_node_dict[dst_bank]\n\n src_bank= instr.sh_dict_bank[node][0]\n all_crossbar_sel |= src_bank << (idx * CROSSBAR_SEL_L)\n\n src_pos= instr.sh_dict_pos[node][0]\n all_bank_addr |= src_pos << (idx * BANK_ADDR_L)\n \n if dst_to_invl_dict[dst_bank]:\n all_invalids |= (1 << idx)\n\n curr_bin= bin_update(curr_bin, all_bank_addr, BANK_RD_ADDR_L)\n curr_bin= bin_update(curr_bin, all_crossbar_sel, CROSS_SEL_L)\n curr_bin= bin_update(curr_bin, all_invalids, INVLD_L)\n \n elif instr.is_type('bb'):\n curr_bin= bin_update(curr_bin, BB_OPCODE, OPCODE_L)\n \n # invalids\n all_invalids= 0\n for node in instr.invalidate_node_set:\n all_invalids |= (1 << instr.in_node_details_dict[node].bank)\n assert in_range(all_invalids, BB_INVLD_L)\n curr_bin= bin_update(curr_bin, all_invalids, BB_INVLD_L)\n\n # bank addr, crossbar_sel\n debug_reg_re= 0\n all_bank_addr= 0\n all_crossbar_sel= 0\n for pe_tup, pe_details in list(instr.pe_details.items()):\n if pe_details.is_leaf():\n tree = pe_tup[0]\n pe = pe_tup[2]\n\n # input 0\n if pe_details.input_0_reg != None:\n input_idx= tree * (2**TREE_DEPTH) + pe * 2\n bank= pe_details.input_0_reg[0]\n pos= pe_details.input_0_reg[1]\n\n all_crossbar_sel |= bank << (input_idx * CROSSBAR_SEL_L)\n all_bank_addr |= pos << (bank * BANK_ADDR_L)\n debug_reg_re |= (1 << bank)\n \n assert not pe_details.is_pass_1()\n else:\n assert not pe_details.is_sum()\n assert not pe_details.is_prod()\n \n # input 1\n if pe_details.input_1_reg != None:\n input_idx= tree * (2**TREE_DEPTH) + pe * 2 + 1\n bank= pe_details.input_1_reg[0]\n pos= pe_details.input_1_reg[1]\n\n all_crossbar_sel |= bank << (input_idx * CROSSBAR_SEL_L)\n all_bank_addr |= pos << (bank * BANK_ADDR_L)\n debug_reg_re |= (1 << bank)\n\n assert not pe_details.is_pass_0()\n else:\n assert not pe_details.is_sum()\n assert not pe_details.is_prod()\n \n curr_bin= bin_update(curr_bin, all_bank_addr, BB_BANK_RD_ADDR_L)\n curr_bin= bin_update(curr_bin, all_crossbar_sel, BB_CROSS_SEL_L)\n\n # arith_op, \n all_arith_op = 0\n for pe_tup, pe_details in list(instr.pe_details.items()):\n tree = pe_tup[0]\n lvl = pe_tup[1] \n pe = pe_tup[2]\n if pe_details.is_sum():\n arith_bin= SUM\n elif pe_details.is_prod():\n arith_bin= PROD \n elif pe_details.is_pass_0():\n arith_bin= PASS_0\n elif pe_details.is_pass_1():\n arith_bin= PASS_1\n else:\n assert 0\n\n arith_idx= tree * N_ALU_PER_TREE + (2**(TREE_DEPTH-lvl)) - 1 + pe\n arith_bin <<= (arith_idx * ARITH_L)\n all_arith_op |= arith_bin\n \n curr_bin= bin_update(curr_bin, all_arith_op, BB_ARITH_OP_L)\n\n # bank_wr_sel\n all_bank_wr_sel = 0\n for pe_tup, pe_details in list(instr.pe_details.items()):\n if pe_details.output_reg != None:\n assert (pe_details.is_sum() or pe_details.is_prod())\n lvl = pe_tup[1] \n bank= pe_details.output_reg[0]\n all_bank_wr_sel |= lvl << (bank * BANK_WR_SEL_L)\n curr_bin= bin_update(curr_bin, all_bank_wr_sel, BB_BANK_WR_SEL_L)\n\n # if temp_instr_idx > start_temp_idx and temp_instr_idx < last_temp_idx:\n # if temp_instr_idx == 29:\n # printcol(temp_instr_idx, 'blue')\n # printcol('bb', 'red')\n # print(bin(all_bank_addr))\n # print(hex(debug_reg_re))\n \n if verb: \n printlog('bb')\n printlog('invalid ' + bin(all_invalids), 'red')\n printlog('bank_addr ' + bin(all_bank_addr), 'red')\n printlog('crossbar_sel ' + bin(all_crossbar_sel), 'red')\n printlog('arith ' + bin(all_arith_op), 'red')\n printlog('bank_wr_sel ' + bin(all_bank_wr_sel), 'red')\n\n else:\n assert False\n \n i_bin= str_update(i_bin, curr_bin)\n \n if verb: printlog(instr.name + ' ' + curr_bin)\n\n # make i_bin multiples of INSTR_L\n ciel_multiple= (len(i_bin) + INSTR_L - 1) // INSTR_L\n i_bin= i_bin.zfill(ciel_multiple * INSTR_L)\n \n # split i_bin in multiples of INSTR_L\n i_bin_chunks= [i_bin[i : i+INSTR_L] for i in range(0, len(i_bin), INSTR_L)]\n \n # reverse to bring the first instruction first\n i_bin_chunks.reverse()\n\n with open(file_name, 'w+') as f:\n printcol(f\"Writing to file {file_name}\", 'red')\n for bin_str in i_bin_chunks:\n f.write(bin_str)\n f.write('\\n')", "def rewrite_lp(f_lp, statement):\r\n f_lp.write(statement.logic_program_form())", "def dummy():\n\t\t\tself.save()", "def _write_to_file(self):\n with open(self.filename + \".ir\", \"w+\") as file:\n file.writelines(\n [\"\\n\" + l if p != 0 else l for p, l in enumerate(self.lines)]\n )", "def _write (self, in_tree, dest):\n\t\t## Preparation:\n\t\tself._src_tree = in_tree\n\t\tself._dest_strm = dest\n\t\t## Main:\n\t\troot = in_tree.root\n\t\tif (not root):\n\t\t\troot = in_tree.get_centroid_nodes()[0]\n\t\tself._writeNode (root)", "def data_callback(data_in):\n global data\n global cnt\n global outfile\n # print(data_in[0][0])\n data = np.roll(data, -1, axis=0)\n data[-1,:] = data_in\n outfile.write(\"{},{},{},{},{},{},{},{}\\n\".format(data[0][0], data[0][1], data[0][2], data[0][3], data[0][4], data[0][5], data[0][6], data[0][7]))\n cnt += 1", "def save_pdb(self, fname):\n return", "def save_states(self, checkpoint):\n raise NotImplementedError()", "def save(self, output, data):\n return", "def writer_loop():\n dbconn = DatabaseConnection()\n\n NODES = {}\n rows = dbconn.select(\"SELECT hostname, id FROM nm_node\")\n for row in rows:\n NODES[row[0]] = row[1]\n\n while True:\n in_line = sys.stdin.readline()\n\n if not in_line: #EOF occured\n break\n\n host, facility, priority, level, tag, program, isodate, msg = in_line.split('[-]')\n\n host = host.strip()\n node_id = NODES.get(host, None)\n\n if node_id is None:\n rows = dbconn.select(\"SELECT id FROM nm_node WHERE hostname=%s\",(host,))\n if rows:\n NODES[host] = rows[0][0]\n node_id = rows[0][0]\n\n dbconn.modify(\"INSERT INTO logs (node_id, host, facility, priority, level, tag, program, log_timestamp, msg) \\\n VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s)\", (node_id, host, facility.strip(), priority.strip(),\n level.strip(), tag.strip(), program.strip(), isodate.strip(), msg.strip()))", "def write(self, a):\n #print('write', a)\n return self.assign([a[0], Tree('write', a)])", "def write_out(c2ptmk, ofn):\n print \"Writing out to [{}]\".format(ofn)\n with codecs.open(ofn, \"w\", \"utf8\") as ofd:\n for co, infos in sorted(c2ptmk.items()):\n ofd.write(u\"{}\\t{}\\t{}\\n\".format(\n co, infos[\"uri\"], \",\".join(\n [unicode(x) for x in infos[\"ptmks\"]])))", "def out_for_debug(df_input, fname, modus=''):\n \n print(f\"modus is: {modus}\")\n if modus == 'DEBUG':\n fname = fname+'.pkl'\n print(f\"DEBUG: Writing file {fname} to the debug folder ...\")\n write_file(df_input, fname, 'debug/', ftype='pkl')", "def write_states():\n loc_file = open(loc_file_path, 'w')\n prev_file = open(prev_file_path, 'w')\n\n loc_file.write(json.dumps(loc, indent=2))\n prev_file.write(json.dumps(tuple(new_prev), indent=2))\n\n loc_file.close()\n prev_file.close()", "def write(self, records, ptr):\n\n # Process records here\n \n for record in records:\n event=feed.parse_record(record)\n \n if not self.process_event(event):\n utils.log_error(\"Couldn't process event {code:02x} at ({file:4d}, {offset:08x})\".format(code=record['code'],file=record['file'],offset=record['offset']))\n return False\n \n self.pointer=ptr\n \n return utils.write_file_ptr(self.config,ptr)", "def save():", "def __exit__(self, *_):\n with self._info_yaml_file_path.open(\"w\") as info:\n self._yml.dump(self._info, info)", "def _write_leader_optime(self, last_operation):", "def write_database(self,data):\n \n if not os.path.exists(self.database):\n output = FileTools.safe_hdf5_open(self.database,'w')\n else:\n output = FileTools.safe_hdf5_open(self.database,'a')\n\n obsid = self.getObsID(data)\n if obsid in output:\n grp = output[obsid]\n else:\n grp = output.create_group(obsid)\n\n grp.attrs['level3_filename'] = self.outfile\n\n if self.name in grp:\n del grp[self.name]\n lvl3 = grp.create_group(self.name)\n\n lvl3.attrs['version'] = __level3_version__\n lvl3.attrs['calibrator_obsid'] = self.nearest_calibrator\n lvl3.attrs['calibrator_source'] = self.cal_source\n output.close()" ]
[ "0.5688454", "0.54053", "0.52774614", "0.52668023", "0.52515334", "0.5238117", "0.5215203", "0.5178974", "0.50876176", "0.506635", "0.5011479", "0.4995191", "0.49629214", "0.49451622", "0.49426115", "0.4934632", "0.48918885", "0.48890415", "0.48604503", "0.48549697", "0.4845882", "0.4840126", "0.48374406", "0.48093122", "0.47999227", "0.47686547", "0.47631454", "0.4760687", "0.4760687", "0.47562462", "0.47534007", "0.4744778", "0.47446096", "0.473863", "0.4727946", "0.47249222", "0.4716824", "0.47014755", "0.46964705", "0.46889225", "0.46878168", "0.4685426", "0.46563563", "0.4643766", "0.46314454", "0.4616031", "0.46100518", "0.4608329", "0.45956603", "0.45900905", "0.45821396", "0.4577489", "0.4576222", "0.45740193", "0.45713663", "0.45687598", "0.45563814", "0.45537284", "0.4552396", "0.4530535", "0.4525534", "0.4522367", "0.45203578", "0.4517471", "0.45172897", "0.45168748", "0.45159495", "0.45128065", "0.4512648", "0.45076326", "0.45039698", "0.45023015", "0.45021078", "0.44977808", "0.44965717", "0.4496334", "0.44930807", "0.44907656", "0.44865605", "0.44861683", "0.44797662", "0.44716266", "0.44710076", "0.44705838", "0.4465512", "0.44639412", "0.44602358", "0.44582674", "0.4455964", "0.44534445", "0.44453892", "0.4442967", "0.44408903", "0.4434159", "0.4426473", "0.44260994", "0.44224942", "0.44208786", "0.44195777", "0.44188115", "0.44182968" ]
0.0
-1
function used for marking deducted Late checkin request.
def action_payslip_done(self): for recd in self.late_check_in_ids: recd.state = 'deducted' return super(PayslipLateCheckIn, self).action_payslip_done()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def loan(self):", "def checkin(self):\n folio = self.folio_id\n if folio.payment_deposits <= 0:\n raise UserError(_(\"\"\"No record of security deposit found on folio {}\n \"\"\".format(folio.name)))\n if folio.state != 'on_queue':\n raise UserError(_(\n 'Folio {} is not yet to be processed'.format(self.folio_id.name)))\n hours, minutes = decimal_to_time(self.env.user.company_id.checkin_hour)\n can_check_in = datetime.combine(\n date.today(), tm(hours, minutes)) < datetime.now()\n if not can_check_in:\n raise UserError(\n 'Guest(s) cannot be checked in earlier than {}'.format(\n self.env.user.company_id.checkin_hour))\n if self.folio_id.room_id.occupy():\n self.folio_id.write({'state': 'checkin'})", "def checkin(self, checkin):\n\n self._checkin = checkin", "def Daysleftverification():\n pass", "def leave_request_decline(self, token, **kwargs):\n cr, uid, context = self._get_cr_uid_context()\n res = self._check_leave_request(\n cr, uid, request, token, context=context\n )\n if isinstance(res, http.Response):\n return res\n if res:\n res.signal_workflow('refuse')\n if res.state == 'refuse':\n return request.website.render(\n \"tk_hr_approve_request.leave_request_refused\"\n )", "def loan(self):\n self.rent_date = datetime.datetime.now()\n self.back_date = datetime.datetime.now() + datetime.timedelta(14)\n self.book.loan()\n self.book.save()\n self.save()", "def check_leave_request_holiday(self, cr, uid, att, context=None):\n if att:\n # check have overtime yet?\n att_name = datetime.strptime(att.name, DEFAULT_SERVER_DATETIME_FORMAT)\n param_obj = self.pool.get('ir.config_parameter') \n max_early = param_obj.get_param(cr, uid, 'maximum_early_minutes', default=60)\n max_late = param_obj.get_param(cr, uid, 'maximum_late_minutes', default=60)\n try:\n max_early = int (max_early)\n max_late = int (max_late)\n except:\n raise except_osv(_(\"Warning !\"),_(\"maximum_early_minutes or maximum_late_minutes in config parameter is incorrect\"))\n \n time_early = att_name + timedelta(minutes = max_early)\n time_late = att_name - timedelta(minutes = max_late)\n \n overtime_obj = self.pool.get('hr.overtime')\n overtime_confirmed_ids = overtime_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('mode', '=', 'by_employee'),\n ('name', '=', att.day_tz),\n ('datetime_start', '<=', time_early.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('datetime_stop', '>=', time_late.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('state', 'in', ['confirmed'])\n ])\n if overtime_confirmed_ids:\n return False\n \n public_holiday_obj = self.pool.get('trobz.hr.public.holidays')\n public_holiday_ids = public_holiday_obj.search(cr, uid, [('date', '=', att.day_tz), ('state', '=', 'approved')], context=context)\n if public_holiday_ids:\n return True\n sql = '''\n SELECT line.first_date_type, line.first_date, line.last_date_type, line.last_date\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND line.first_date <= '%s' AND line.last_date >= '%s'\n AND h.state = 'validate'\n '''% (att.employee_id.id, att.day_tz, att.day_tz)\n cr.execute(sql)\n for leave in cr.fetchall():\n if att.action == 'sign_out':\n afternoon = datetime.strptime(att.name_tz, DEFAULT_SERVER_DATETIME_FORMAT).hour >= 13\n else:\n afternoon = datetime.strptime(att.name_tz, DEFAULT_SERVER_DATETIME_FORMAT).hour >= 12\n if att.day_tz == leave[1]:\n if leave[0] == 'afternoon' and afternoon:\n return True\n if leave[0] == 'morning' and not afternoon:\n return True\n if leave[0] == 'full':\n return True\n if att.day_tz == leave[3]:\n if leave[2] == 'afternoon' and afternoon:\n return True\n if leave[2] == 'morning' and not afternoon:\n return True\n if leave[2] == 'full':\n return True\n if datetime.strptime(att.day_tz, '%Y-%m-%d') > datetime.strptime(leave[1], '%Y-%m-%d')\\\n and datetime.strptime(att.day_tz, '%Y-%m-%d') < datetime.strptime(leave[3], '%Y-%m-%d'):\n return True\n return False", "def _check_leave_request(self, cr, uid, request, token, context=None):\n holidays_obj = request.registry['hr.holidays']\n holidays_ids = holidays_obj.search(cr, uid, [\n ('token', '=', token)\n ])\n\n if len(holidays_ids) == 0:\n return request.website.render(\n \"tk_hr_approve_request.leave_request_not_found\"\n )\n\n _id = holidays_ids[0] if len(holidays_ids) else None\n if _id:\n leave_request = holidays_obj.browse(\n cr, uid, _id, context=context\n )\n return leave_request", "def _tick(self):\n\t\tself.pay_tax()\n\t\tself.inhabitant_check()\n\t\tself.level_check()", "def _check_approval_update(self, state):\n\t\tcurrent_employee = self.env['hr.employee'].search([('user_id', '=', self.env.uid)], limit=1)\n\t\t# is_officer = self.env.user.has_group('hr_holidays.group_hr_holidays_user')\n\t\tis_manager = self.env.user.has_group('hr_holidays.group_hr_holidays_manager')\n\t\tfor holiday in self:\n\t\t\tval_type = holiday.holiday_status_id.validation_type\n\t\t\tif state == 'confirm':\n\t\t\t\tcontinue\n\n\t\t\tif state == 'draft':\n\t\t\t\tif holiday.employee_id != current_employee and not is_manager:\n\t\t\t\t\traise UserError(_('Only a Leave Manager can reset other people leaves.'))\n\t\t\t\tcontinue\n\n\t\t\t# if not is_officer:\n\t\t\t# \traise UserError(_('Only a Leave Officer or Manager can approve or refuse leave requests.'))\n\n\t\t\t# if is_officer:\n\t\t\t# \t# use ir.rule based first access check: department, members, ... (see security.xml)\n\t\t\tholiday.check_access_rule('write')\n\n\t\t\tif holiday.employee_id == current_employee and not is_manager:\n\t\t\t\traise UserError(_('Only a Leave Manager can approve its own requests.'))\n\n\t\t\tif (state == 'validate1' and val_type == 'both') or (state == 'validate' and val_type == 'manager'):\n\t\t\t\tmanager = holiday.employee_id.parent_id or holiday.employee_id.department_id.manager_id\n\t\t\t\tif (manager and manager != current_employee) and not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'):\n\t\t\t\t\traise UserError(_('You must be either %s\\'s manager or Leave manager to approve this leave') % (holiday.employee_id.name))\n\n\t\t\tif state == 'validate' and val_type == 'both':\n\t\t\t\tif not self.env.user.has_group('hr_holidays.group_hr_holidays_manager'):\n\t\t\t\t\traise UserError(_('Only an Leave Manager can apply the second approval on leave requests.'))", "def check_absent_pre_date(self, cr, uid, att, context=None):\n if att:\n # check employee absent pre date\n pre_att_ids = self.search(cr, uid, [('employee_id', '=', att.employee_id.id), \n ('name', '<', att.name), \n ('action', 'in', ('sign_in', 'sign_out'))], \n limit=1)\n param_obj = self.pool.get('ir.config_parameter')\n working_hour_obj = self.pool.get('hr.payroll.working.hour')\n max_early = param_obj.get_param(cr, uid, 'maximum_early_minutes', default=60)\n max_late = param_obj.get_param(cr, uid, 'maximum_late_minutes', default=60)\n trobz_base_obj = self.pool.get('trobz.base')\n att_name = datetime.strptime(att.name_tz, DEFAULT_SERVER_DATETIME_FORMAT)\n try:\n max_early = int (max_early)\n max_late = int (max_late)\n except:\n raise except_osv(_(\"Warning !\"),_(\"maximum_early_minutes or maximum_late_minutes in config parameter is incorrect\"))\n \n time_late = att_name - timedelta(minutes = max_late)\n \n working_hour_ids=[] #Payroll Working Hours (Only read working PWH, Not Leave or Overtime PWH) \n if not pre_att_ids:\n working_hour_ids = working_hour_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('expected_end', '<', time_late.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('plan_line_id', '!=', False)\n ], \n context=context)\n else:\n pre_time_early = self.read(cr, uid, pre_att_ids[0], ['name_tz'], context=context)['name_tz']\n time_start_early = datetime.strptime(pre_time_early, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(minutes = max_early)\n working_hour_ids = working_hour_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('expected_start', '>', time_start_early.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('expected_end', '<', time_late.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('plan_line_id', '!=', False)\n ], context=context, order='date DESC')\n if not working_hour_ids:\n return False\n else:\n for working in working_hour_obj.browse(cr, uid, working_hour_ids, context=context):\n # check public holiday\n holiday_ids = self.pool.get('trobz.hr.public.holidays').search(cr, uid, [('date','=', working.date)], context=context) \n if holiday_ids:\n return False\n # full\n sql = '''\n SELECT line.id\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND line.first_date < '%s' AND line.last_date > '%s'\n AND h.state = 'validate'\n '''% (working.employee_id.id, working.date, working.date)\n cr.execute(sql)\n if cr.fetchall():\n continue\n else:\n sql = False\n expected_start = trobz_base_obj.convert_from_utc_to_current_timezone(cr, uid, working.expected_start, False, DEFAULT_SERVER_DATETIME_FORMAT, False, context=context)\n time_start = expected_start.hour\n expected_end = trobz_base_obj.convert_from_utc_to_current_timezone(cr, uid, working.expected_end, False, DEFAULT_SERVER_DATETIME_FORMAT, False, context=context)\n time_end = expected_end.hour\n # wh afternoon\n if time_start >= 12 and time_end >=12:\n sql = '''\n SELECT line.id\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND (line.first_date = '%s' OR line.last_date = '%s')\n AND h.state = 'validate'\n AND (line.last_date_type = 'afternoon' OR line.first_date_type = 'afternoon')\n '''% (working.employee_id.id, working.date, working.date)\n # wh morning\n elif time_start < 12 and time_end <= 12:\n sql = '''\n SELECT line.id\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND (line.first_date = '%s' OR line.last_date = '%s')\n AND h.state = 'validate'\n AND (line.last_date_type = 'morning' OR line.first_date_type = 'morning')\n '''% (working.employee_id.id, working.date, working.date)\n \n if sql:\n cr.execute(sql)\n if cr.fetchall():\n continue\n # wh full\n sql = '''\n SELECT line.id\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND (line.first_date = '%s' OR line.last_date = '%s')\n AND h.state = 'validate'\n AND (line.last_date_type = 'full' OR line.first_date_type = 'full')\n '''% (working.employee_id.id, working.date, working.date)\n cr.execute(sql)\n res = cr.fetchall()\n if res or (time_late >= expected_start and time_late <= expected_end):\n continue\n return True\n return False", "def awaiting_payment(self):", "def already_spent_redemption() -> UnsuccessfulRedemption:\n return UnsuccessfulRedemption(\"double-spend\")", "def set_in_check(self, state):\n\n self._in_check = state", "def mdm_checkin(request):\n data = json.loads(request.body)\n laptop = get_object_or_404(Laptop, api_key_hash=sha256(data['APIKey'].encode('utf-8')).hexdigest(),\n mdm_enrolled=True)\n system_profiles = []\n user_profiles = []\n system_profiles_remove = []\n user_profiles_remove = []\n password = None\n\n for record in InstallationRecord.objects.filter(device=laptop, profile__isnull=False, version=\"RM\", active=True):\n profile = record.profile\n if profile.scope == 'System':\n system_profiles_remove.append(profile.pk)\n else:\n user_profiles_remove.append(profile.pk)\n password = settings.MDM_PASS\n\n for profile in laptop.pending.all():\n if profile.pk not in system_profiles_remove and profile.pk not in user_profiles_remove:\n if profile.scope == 'System':\n system_profiles.append(profile.pk)\n else:\n user_profiles.append(profile.pk)\n\n if len(system_profiles) > 0 or len(user_profiles) > 0 or len(system_profiles_remove) > 0 or \\\n len(user_profiles_remove) > 0:\n response_data = {\"status\": 100, \"system_profiles\": system_profiles, \"user_profiles\": user_profiles,\n \"system_profiles_remove\": system_profiles_remove, \"user_profiles_remove\": user_profiles_remove,\n \"removal_password\": password, \"password\": laptop.admin_password}\n else:\n response_data = {\"status\": 200}\n laptop.last_checkin = timezone.now()\n laptop.last_ip = data['networkIP']\n laptop.save()\n return JsonResponse(response_data)", "def tickets(number, day, premium_seating):\n #fill in your code here. \n return 0.0", "def use(self):\n if self.flag:\n if self.credit < self.price_of_trip:\n return \"Your credit is not enough, please increase your credit\"\n else:\n self.credit -= self.price_of_trip\n return \"Done\"\n else:\n return \"Sorry, your card has expired.\"", "def landlord_button_deposite_received(self):\n payment_id = False\n acc_pay_form = self.env.ref(\n 'account.view_account_payment_form')\n account_jrnl_obj = self.env['account.journal'].search(\n [('type', '=', 'sale')], limit=1)\n payment_obj = self.env['account.payment']\n payment_method_id = self.env.ref(\n 'account.account_payment_method_manual_in')\n for tenancy_rec in self:\n if tenancy_rec.acc_pay_dep_rec_id and \\\n tenancy_rec.acc_pay_dep_rec_id.id:\n return {\n 'view_type': 'form',\n 'view_id': acc_pay_form.id,\n 'view_mode': 'form',\n 'res_model': 'account.payment',\n 'res_id': tenancy_rec.acc_pay_dep_rec_id.id,\n 'type': 'ir.actions.act_window',\n 'target': 'current',\n 'context': self._context,\n }\n if tenancy_rec.deposit == 0.00:\n raise Warning(_('Please Enter Deposit amount.'))\n if tenancy_rec.deposit < 0.00:\n raise Warning(\n _('The deposit amount must be strictly positive.'))\n vals = {\n 'partner_id': tenancy_rec.property_owner_id.parent_id.id,\n 'partner_type': 'customer',\n 'journal_id': account_jrnl_obj.id,\n 'payment_type': 'inbound',\n 'communication': 'Deposit Received',\n 'tenancy_id': tenancy_rec.id,\n 'amount': tenancy_rec.deposit,\n 'property_id': tenancy_rec.property_id.id,\n 'payment_method_id': payment_method_id.id\n }\n payment_id = payment_obj.create(vals)\n return {\n 'view_mode': 'form',\n 'view_id': acc_pay_form.id,\n 'view_type': 'form',\n 'res_id': payment_id and payment_id.id,\n 'res_model': 'account.payment',\n 'type': 'ir.actions.act_window',\n 'nodestroy': True,\n 'target': 'current',\n 'domain': '[]',\n 'context': {\n 'close_after_process': True,\n }\n }", "async def legsubmit(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_submit\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} \" \\\n f\"and someone submits a Bill or Motion. \" \\\n f\"Note that you will never get a DM when a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} is the one submitting.\"\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone submits a Bill or Motion.\"\n\n await ctx.send(message)", "def checkbalance(self, dt):\n return", "def pre_approve(self, cr, uid, ids, context={}):\n \tfor voucher in self.browse(cr, uid, ids, context=context):\n \t if not voucher.department_id.analytic_account_id:\n \t raise osv.except_osv(_('Configration Check!'), _(\"Please add cost center for your department!\"))\n \t periods = self.pool.get('account.period').search(cr, uid, [('date_start','<=',voucher.date),('date_stop','>=',voucher.date),('company_id','=',voucher.company_id.id)], context=context)\n\n\n res=0.0\n if voucher.purpose:\n if not voucher.purpose.account_id: raise osv.except_osv(_('Warning!'), _('Please configure account for this purpose!')) \n voucher_line = {\n \t\t'voucher_id': voucher.id,\n \t\t'partner_id': voucher.partner_id.id,\n \t\t'untax_amount': voucher.amount,\n \t\t'amount': voucher.amount,\n 'name': voucher.narration,\n \t\t'type': 'dr',\n \t\t'account_analytic_id': voucher.department_id.analytic_account_id and voucher.department_id.analytic_account_id.id,\n 'account_id': voucher.purpose.account_id.id,\n \t }\n new_amount = res and res or voucher.amount \n voucher_line.update({'amount':new_amount,'untax_amount':new_amount})\n \t if voucher.line_ids :\n for line in voucher.line_ids:\n \t\t self.pool.get('account.voucher.line').write(cr, uid, line.id, {\n \t\t'voucher_id': voucher.id,\n \t\t'partner_id': voucher.partner_id.id,\n \t\t'untax_amount': res or line.amount,\n \t\t'amount': line.amount,\n 'name': voucher.narration,\n \t\t'type': 'dr',\n \t\t'account_analytic_id': line.account_analytic_id and line.account_analytic_id.id or voucher.department_id.analytic_account_id.id,\n 'account_id': voucher.purpose.account_id.id or line.account_id.id,\n \t }, context=context)\n \t else:\n\n \t\t new_voucher_line = self.pool.get('account.voucher.line').create(cr, uid, voucher_line, context=context)\n context.update({'purchase':True})\n self.create_budget_confirmation(cr, uid, [voucher.id], context)\n \tself.write(cr, uid, ids,{'state': 'preapprove','type':'purchase','ratification':True}, context=context)\n #cxt = context.copy()\n #cxt.update({'type':'ratification'})\n if not super(account_voucher, self).create_budget_confirmation(cr, uid, ids, context=context):\n self.write(cr, uid, ids, {'state': 'approved'}, context=context)\n\n \t'''self.write(cr, uid, ids, {'state': 'preapprove'})\n if not super(account_voucher, self).create_budget_confirmation(cr, uid, ids, context=context):\n self.write(cr, uid, ids, {'state': 'approve','type':'purchase','ratification':True}, context=context)'''\n return True", "def pay_off_fully(balance, annualInterestRate):\n\n #variable assignment\n currentBalance = balance\n monthlyInterestRate = annualInterestRate/12", "def ToggleApprovalTracker(self, event):\n pass", "def _lend(self, \n\t\t\t borrower, \n\t\t\t asked_value):\n\t\tif self.strategy == 1:\n\t\t\tloan_value = min(self.stock, asked_value)\n\t\t\tself.stock -= loan_value\n\t\t\tdebt_link = DebtLink(self, borrower, loan_value * (1.0 + self.interest_rate))\n\t\t\tself.loans.append(debt_link)\n\t\t\tborrower.debt_link = debt_link\n\t\t\treturn loan_value\n\t\telse: return 0.0", "def out_chky(uid):\n\tx = db.checkouts_today(uid)\n\tif x == None: return False\n\telse: return True", "async def passing(self, check, *, note=None):\n return await self.mark(check, \"passing\", note=note)", "def setInDownTime(self, downtime):\n self.adParams['GLIDEIN_In_Downtime'] = str(downtime)", "def check_to_Done(self,cr,uid,ids,context=None):\n\n for rec in self.browse(cr, uid, ids, context):\n if not rec.maintenance_id: \n raise osv.except_osv(_('ValidateError'), _(\"There Is NO maintenace request refrence to this accident.\"))\n return False\n return True", "def evaluate_cancellation_pending_due_to_non_pay(self, date_cursor=None):\n pass", "def on_update(self):\n if self.get('update_request') and not self.is_pending_approval():\n if self.is_revert:\n self.set_as_reverted()\n else:\n self.set_as_success()", "def uncheck(self, roommate_instance):\n if self.status == Item.PROCESSING_CODE and self.check_who == roommate_instance:\n self.status = Item.UNCHECKED_CODE\n self.check_who = None\n else:\n raise PermissionDenied", "def future_deceiveddown(self, a):\n nfav, succfav = self.control[a.name] \n #f_n = n+1;\n # f_worldround = self.world.round+1\n f_successRate = float(a.success) / float(self.world.round+1)\n f_successRatefav = float(succfav) / float(nfav+1)\n if hardrule:\n return self.nsucc(a) > (f_successRatefav + epsilonD) and \\\n (nfav+1 > 5) and ((self.world.round - nfav) > 5) \n else:\n return nfav+1 > 5 and (f_successRate > f_successRatefav + epsilonD \\\n or f_successRate < epsilonD)", "def revise_agreements_expirations_planned(self):\n for agreement in self.search([('prolong', '=', 'unlimited')]):\n if agreement.next_expiration_date <= fields.Date.today():\n # force recalculate next_expiration_date\n agreement.write({'prolong': 'unlimited'})\n return True", "def test_unpaid_penalty_prevents_borrow(self):\n ten_days_ago = timezone.now() - timezone.timedelta(days=10)\n Borrow.objects.create(\n book_id=1,\n student=self.students[0],\n requested_at=ten_days_ago,\n borrowed_at=ten_days_ago,\n duration=6,\n )\n client1 = APIClient()\n client1.login(username=self.manager.username, password=\"salam*123\")\n client1.post(\"/borrows/1/terminate/\")\n client2 = APIClient()\n client2.login(username=self.students[0].username, password=\"salam*123\")\n response = client2.post(\"/borrows/\", data={\"book\": 5})\n self.assertEqual(response.status_code, 400)", "def place_call_offhold(self) -> None:", "def holidays_validate(self, cr, uid, ids, context=None):\n \n super(hr_holidays ,self).holidays_validate(cr, uid, ids, context=context)\n \n grant_order_obj = self.pool.get(\"granted.rights.order\")\n grant_order_lines_obj = self.pool.get(\"granted.rights.order.lines\") \n department_obj = self.pool.get('hr.department')\n \n \n manager = False\n for rec in self.browse(cr,uid,ids):\n\t if rec.holiday_status_id.alternative_emp:\n\t\t dep_ids = department_obj.search(cr,uid,[('manager_id','=',rec.employee_id.id)])\n\t\t #if rec.employee_id.id == department_obj.browse(cr,uid,rec.department_id.id).manager_id.id :\n\t\t if dep_ids:\n\t\t\t\t manager = True\n\n\t\t \n\t\t grant_date = datetime.strptime(rec.date_to, '%Y-%m-%d %H:%M:%S')\n\t\t \n\t\t grant_date = grant_date + timedelta(days=1)\n\n\n\t\t if rec.date_to >= time.strftime('%Y-%m-%d'):\n\t\t\t order_id = grant_order_obj.create( cr, uid,{\n\t\t\t\t \n\t\t\t\t 'delegation_type' : 'holiday',\n\t\t\t\t 'holiday_order_id' :rec.id,\n\t\t\t\t 'employee_donor' : rec.employee_id.id,\n\t\t\t\t 'employee_candidate' : rec.alternative_employee.id,\n\t\t\t\t 'start_grant_date' : rec.date_from, \n\t\t\t\t 'end_grant_date' : grant_date,\n\t\t\t\t 'department_id' : rec.department_id.id,\n\t\t\t\t 'is_a_amanger' : manager,\n\n\t\t\t\t })\n\t\t\t \n\t\t\t res = grant_order_obj.on_change_donor_employee(cr, uid, order_id , rec.employee_id.id , context=context)\n\t\t\t for rec in res['value']['donor_groups_ids']:\n\t\t\t\t rec.update({ 'order_id' : order_id})\n\t\t\t\t grant_order_lines_obj.create( cr, uid,rec )\n \n return True", "def life_insurance_to_recive_total(self):\n pass", "def approve(self):\n self.approved = True\n self.quest_node['approved'] = True\n graph.push(self.quest_node)\n self.payout()", "def withdraw(self, responder):\n self._apply_decision(self.Status.WITHDRAWN, responder)", "def test_debt_target_not_expired_when_new_issue_added(self):\n measurement = self.measurement(\n self.metric(accept_debt=True, debt_target=\"100\", issue_ids=[\"FOO-41\", \"FOO-42\"]),\n count={\"debt_target\": \"100\"},\n issue_status=[{\"status_category\": \"done\", \"issue_id\": \"FOO-41\"}],\n )\n self.assertFalse(measurement.debt_target_expired())", "def dr_approve(self):\n print \"DR approved this form. Current state:\", self.state", "def check4(self, x, y, mark, d = 0):", "def future_deceivedup(self, a): \n nfav, succfav = self.control[a.name]\n #f_n = n+1;\n # f_worldround = self.world.round+1\n f_successRate = float(a.success +1) / float(self.world.round+1)\n if hardrule:\n return (nfav+1 > 5) and ((self.world.round - nfav) > 5) and \\\n float(a.success+1-succfav)/(self.world.round+1 - nfav) > \\\n (float(succfav)/nfav) + epsilonD\n else:\n return nfav > 5 and (f_successRate > (float(succfav)/nfav) + epsilonD \\\n or f_successRate < epsilonD)", "def on_update_after_submit(self):\n if self.get('update_request') and not self.is_pending_approval():\n if self.is_revert:\n self.set_as_reverted()\n else:\n self.set_as_success()", "def use(self):\n if self.credit < self.price_of_trip:\n print(\"Your credit is not enough, please increase your credit\")\n else:\n self.credit -= self.price_of_trip\n print(\"Done\")", "def _borrow(self):\n\t\tif self.debt_link == None:\n\t\t\tchoice_list = [a for s in self.site.neighbors for a in s.agents_in_site if a.stock >= (self.consumption_demanded - self.consumed)]\n\t\t\tif len(choice_list) > 0: \n\t\t\t\tchoosed = numpy.random.choice(choice_list)\n\t\t\t\tloan_value = choosed._lend(self, self.consumption_demanded - self.consumed)\n\t\t\t\tself.consumed += loan_value\n\t\t\t\tself.consumption_deficit -= loan_value", "def assign_dose(self, centre_id):\n\n cent = Centre.objects.filter(id=centre_id).get()\n if cent.doses_available < 1:\n raise ValueError(\"Centre does not have any doses available\")\n\n self.assigned_centre_id = cent.id\n cent.doses_available -= 1\n cent.save()\n self.save()\n\n msg = f\"Hi, this is VaxiTrack. We have found you a vaccine at {cent.name}, {cent.postcode}. Please attend at {cent.available_at} to receive your dose. Thank you, VaxiTrack\"\n send_mail('VaxiTrack', msg, settings.EMAIL_HOST_USER,[self.email], fail_silently=False)", "def confirm_meal(request, e_id):\n enrolment = Enrolment.objects.get(pk=e_id)\n total_meal = enrolment.day_meal_count + enrolment.night_meal_count\n price = enrolment.plan.price\n extended_user = ExtendedUser.objects.get(user=request.user)\n extended_user.balance -= price * total_meal\n if extended_user.balance >= 0:\n extended_user.save()\n owner = enrolment.plan.store.owner\n owner = ExtendedUser.objects.get(user=owner)\n owner.balance += price * total_meal\n owner.save()\n return view_enrolments(request)", "def get_in_check(self):\n\n return self._in_check", "def passengers(not_checked_in, checked_in):\n while not_checked_in:\n current_passenger = not_checked_in.pop() # remove last item on the list\n\n # Simulate checking a passenger inself.\n print(\"Checking in passenger: \" + current_passenger)\n checked_in.append(current_passenger) # Add to the check in list", "def chky(uid):\n\tx = db.checkins_today(uid)\n\tif x == None: return False\n\telse: return True", "def make_eligible(self):\n pass", "def purchase_indent_dr_approve(request, request_id):\n # Check if logged in user is DR\n if not request.user.groups.filter(name='DR_AccountsDepartment').exists():\n raise PermissionDenied\n\n current_employee = request.user.employee_set.all()[0]\n purchase_indent_request = get_object_or_404(PurchaseIndentRequest, pk=request_id)\n\n if request.POST.get('Approve'):\n if not can_proceed(purchase_indent_request.dr_approve):\n raise PermissionDenied\n\n purchase_indent_request.dr_approve()\n purchase_indent_request.save()\n\n remark = request.POST.get('remark')\n transition_record = TransitionHistory(\n approver=current_employee,\n form=purchase_indent_request,\n from_state=STATE.APPROVED_BY_JAO,\n to_state=STATE.APPROVED_BY_DR,\n remark=remark\n )\n transition_record.save()\n messages.success(request, 'The Purchase Indent form was Approved')\n\n elif request.POST.get('Reject'):\n if not can_proceed(purchase_indent_request.reject):\n raise PermissionDenied\n\n purchase_indent_request.reject()\n purchase_indent_request.save()\n\n remark = request.POST.get('remark')\n transition_record = TransitionHistory(\n approver=current_employee,\n form=purchase_indent_request,\n from_state=STATE.APPROVED_BY_JAO,\n to_state=STATE.REJECT,\n remark=remark\n )\n transition_record.save()\n messages.warning(request, 'The Purchase Indent form was Rejected')\n\n return redirect('purchase:purchase-requests-pending')", "def DissolutionAfterReinstatement(obj):\n if (obj.dissolution_date is None) or (obj.reinstatement_date is None): \n return\n if obj.dissolution_date > obj.reinstatement_date:\n raise interface.Invalid(\n _(\"A committee must be disolved before it can be reinstated\"), \n \"dissolution_date\", \n \"reinstatement_date\")", "def do(self):\n self.message += 'Searching for unpayed loans \\n'\n today = dt.now()\n tomorrow = today + timedelta(days=1)\n\n expired = Loan.objects.filter(due_date=today)\n almost = Loan.objects.filter(due_date=tomorrow)\n\n self.charge(expired)\n self.notify(almost)\n\n self.log_success()", "def test_debt_target_expired(self):\n measurement = self.measurement(\n self.metric(accept_debt=True, debt_target=\"100\", issue_ids=[\"FOO-40\"]),\n count={\"debt_target\": \"100\"},\n issue_status=[{\"status_category\": \"done\", \"issue_id\": \"FOO-40\"}],\n )\n self.assertTrue(measurement.debt_target_expired())", "def confirmed(self):", "def use(self):\n if self.price_of_trip == 0:\n print(\"Sorry your card has been used\")\n else:\n self.price_of_trip -= self.price_of_trip\n print(\"Done\")", "def deposit(amt) :\r\n\tglobal bal\r\n\tbal_in = bal\r\n\t#PREMISES FOR NEXT LINE: \r\n\t# (amt >= 0)\r\n\t# (bal >= 0)\r\n\t# (bal == bal_in)\r\n\tbal = bal + amt\r\n\t#PREMISES FOR ATTACHED PROOF, IF ANY: \r\n\t# (bal == (bal_old + amt))\r\n\t# (amt >= 0)\r\n\t# (bal_old >= 0)\r\n\t# (bal_old == bal_in)\r\n\t#PREMISES FOR NEXT LINE: \r\n\t# (amt >= 0)\r", "def UpdateQuarantine(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')", "def end_restrict(self, state, date, time):\n current_state = Logic(self.driver).check_state(folder_options.END_RESTRICT_CHECK)\n if current_state != state:\n Logic(self.driver).set_state(xpath=folder_options.END_RESTRICT_CHECK, dstate=state)\n if state:\n self.driver.find_element_by_xpath(folder_options.END_RESTRICT_DATE).clear()\n self.driver.find_element_by_xpath(folder_options.END_RESTRICT_TIME).clear()\n self.driver.find_element_by_xpath(folder_options.END_RESTRICT_TIME).send_keys(time)\n self.driver.find_element_by_xpath(folder_options.END_RESTRICT_DATE).send_keys(date)", "def test_not_redeemed_while_redeeming(\n self, get_config: GetConfig, now: datetime, voucher: bytes\n ) -> None:\n store = self.useFixture(TemporaryVoucherStore(lambda: now, get_config)).store\n controller = PaymentController(\n Clock(),\n store,\n NonRedeemer(),\n default_token_count=100,\n allowed_public_keys=set(),\n )\n self.assertThat(\n Deferred.fromCoroutine(controller.redeem(voucher)),\n has_no_result(),\n )\n\n persisted_voucher = store.get(voucher)\n self.assertThat(\n persisted_voucher.state,\n Equals(model_Pending(counter=0)),\n )", "def invoice_onsettled(invoice):\n\n db = current.db\n s3db = current.s3db\n\n # Look up claim, invoice number, program and billing\n btable = s3db.fin_voucher_billing\n ctable = s3db.fin_voucher_claim\n itable = s3db.fin_voucher_invoice\n ptable = s3db.fin_voucher_program\n join = [ptable.on(ptable.id == ctable.program_id),\n btable.on(btable.id == ctable.billing_id),\n itable.on(itable.id == ctable.invoice_id),\n ]\n query = (ctable.invoice_id == invoice.id) & \\\n (ctable.deleted == False)\n row = db(query).select(ctable.id,\n ctable.program_id,\n ctable.billing_id,\n ctable.pe_id,\n btable.date,\n itable.invoice_no,\n ptable.name,\n ptable.organisation_id,\n join = join,\n limitby = (0, 1),\n ).first()\n if not row:\n return\n program = row.fin_voucher_program\n billing = row.fin_voucher_billing\n claim = row.fin_voucher_claim\n invoice_no = row.fin_voucher_invoice.invoice_no\n\n error = None\n\n # Look up the provider organisation\n pe_id = claim.pe_id\n otable = s3db.org_organisation\n provider = db(otable.pe_id == pe_id).select(otable.id,\n otable.name,\n limitby = (0, 1),\n ).first()\n\n from .helpers import get_role_emails\n provider_accountants = get_role_emails(\"PROVIDER_ACCOUNTANT\", pe_id)\n if not provider_accountants:\n error = \"No provider accountant found\"\n\n if not error:\n # Lookup the template variables\n base_url = current.deployment_settings.get_base_public_url()\n appname = current.request.application\n data = {\"program\": program.name,\n \"date\": btable.date.represent(billing.date),\n \"invoice\": invoice_no,\n \"organisation\": provider.name,\n \"url\": \"%s/%s/fin/voucher_claim/%s\" % (base_url, appname, claim.id),\n }\n\n # Send the email notification\n from .notifications import CMSNotifications\n error = CMSNotifications.send(provider_accountants,\n \"InvoiceSettled\",\n data,\n module = \"fin\",\n resource = \"voucher_invoice\",\n )\n if error:\n msg = \"%s could not be notified about invoice settlement: %s\"\n current.log.error(msg % (provider.name, error))\n else:\n msg = \"%s notified about invoice settlement\"\n current.log.debug(msg % provider.name)", "def disability_specify(self, instance):\r\n return instance.user.profile.disability_specify", "def set_not_complete(request):\n current_user = UserInformation.objects.get(user=User.objects.get(email=request.user.email))\n print(\"Set not complete: \", request)\n if current_user.current_lesson_set is None:\n return False\n\n current_set = current_user.current_lesson_set.lessons.all()\n if current_set.exists():\n\n if request.method == 'POST':\n print(\"Not complete call from POST\")\n # need a variation of what to do if the last lesson was completed\n if current_user.current_lesson_index < len(current_set) - 1:\n # increase index of lesson set depending on if user is on a previously completed lesson\n if current_user.current_lesson_index < current_user.completed_lesson_index:\n current_user.current_lesson_index = current_user.current_lesson_index + 1\n else:\n current_user.completed_lesson_index = current_user.completed_lesson_index + 1\n current_user.current_lesson_index = current_user.current_lesson_index + 1\n current_user.save()\n return True\n else:\n # remove set from current set\n return False\n elif request.method == 'GET':\n print(\"Not complete call from GET\")\n if current_user.current_lesson_index < len(current_set):\n print(\"TEST PRINT\")\n return True\n return False", "async def legwithdraw(self, ctx):\n\n new_value = await self.toggle_dm_setting(ctx.author.id, \"leg_session_withdraw\")\n\n if new_value:\n message = f\":white_check_mark: You will now receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion. \" \\\n f\"Note that you will never get a DM when a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} is the one withdrawing.\"\n\n else:\n message = f\":white_check_mark: You will no longer receive DMs when you are a member of the \" \\\n f\"{self.bot.mk.LEGISLATURE_CABINET_NAME} and someone withdraws their Bill or Motion.\"\n\n await ctx.send(message)", "def MarkRecommendationClaimed(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details(\"Method not implemented!\")\n raise NotImplementedError(\"Method not implemented!\")", "def checkin(self, guest_name):\n pass", "def test_penalty_is_made_on_delay(self):\n twenty_days_ago = timezone.now() - timezone.timedelta(days=20)\n borrow = Borrow.objects.create(\n book_id=1,\n student=self.students[0],\n requested_at=twenty_days_ago,\n borrowed_at=twenty_days_ago,\n duration=10,\n )\n client = APIClient()\n client.login(username=self.manager.username, password=\"salam*123\")\n client.post(\"/borrows/1/terminate/\", data={\"duration\": 5})\n borrow.refresh_from_db()\n self.assertIsNotNone(borrow.returned_at)\n self.assertIsNotNone(borrow.delaypenalty)\n self.assertEqual(borrow.delaypenalty.amount, 11 * 1000)", "def decline(self):\n self.is_active = False\n self.save()", "def decline(self):\n self.is_active = False\n self.save()", "def getStatus(entry, vType):\n data = db.Database(filename=\"IFB299.db\")\n fine = data.retrieveFine(vType, entry['Citation_Number'])\n if fine['Payment_status'] == \"Pending\":\n date = dt.datetime.strptime(entry['Date'],'%d/%m/%Y')\n overdue = dt.datetime.now().date() - date.date()\n if overdue.days >= 7:\n return \"Overdue\"\n else:\n return \"Pending\"\n else:\n return \"Paid\"", "def mark_item(report):\n if report['pass'] == True:\n report['mark'] = 1 \n else:\n report['mark'] = 0 \n \n report['mark-max'] = 1 \n \n return report['mark'], report['mark-max']", "async def _toggle_holidays(\n self, update: Update, context: ContextTypes.DEFAULT_TYPE\n ) -> None:\n self.system_status_lock.acquire()\n self.system_status_proxy[\"holidays\"] = int(\n not self.system_status_proxy[\"holidays\"]\n )\n await update.message.reply_text(\n f\"{__name__.split('.')[-1]}: Request Holidays Mode to {self.system_status_proxy['holidays']}\"\n )\n self.system_status_lock.release()", "def decline(self):\n self._check_if_open()\n # decline endpoint needs data, but it's not possible to set a decline reason by api (frontend only)\n data = {\"id\": self.id}\n return self.post(\"decline\", data)", "def home_checkin():\n\tcheckpremenu()", "def checkin(self, message):\n if not validate_notification_message(message, CHECKIN_MESSAGE_FIELDS):\n raise ValueError('invalid message')\n\n self._submit('articlepkg_checkins', message)", "def mark_missed(self):\n if self.state == TrackState.Tentative:\n self.state = TrackState.Deleted\n elif self.time_since_update > self._max_age:\n self.state = TrackState.Deleted", "def mark_missed(self):\n if self.state == TrackState.Tentative:\n self.state = TrackState.Deleted\n elif self.time_since_update > self._max_age:\n self.state = TrackState.Deleted", "def process(self, roommate):\n if self.status == Item.UNCHECKED_CODE:\n self.status = Item.PROCESSING_CODE\n self.check_who = roommate\n else:\n raise PermissionDenied", "def daffodils(flNeeded,amtPaid, dzCost):\n\n\n import math\n\n dz = flNeeded / 12\n dozens = math.ceil (dz) #Rounds up to the nearest dozen\n\n totCost = dzCost * dozens\n toPay = totCost - amtPaid\n\n print (\"You will need to contribute\", toPay)", "def confirmed(self, cr, uid, ids, context=None):\n\tallow_archive_line_obj = self.pool.get('services.contracts.allowances.lines')\n for record in self.browse(cr, uid, ids, context=context):\n\t\tif not record.allowances_lines_before :\n \traise osv.except_osv(_('Partner Lines !'), _('Sorry no partner Lines!'))\n\n\t \tlines_ids = [line.id for line in record.allowances_lines_after]\n \tallow_archive_line_obj.unlink(cr,uid,lines_ids,context=context)\n\n\t\tfor lines in record.allowances_lines_before:\n\t\t\tif lines.percentage_rating < 0 or lines.percentage_rating > 100 :\n \t\traise osv.except_osv(_('Rate Error !'), _('Sorry you insert wrong rate ... rate is between (0,100)!'))\n \t\tamount_after_rate_id = allow_archive_line_obj.create(cr, uid, {\n \t\t\t\t'cost_of_rent':lines.cost_of_rent,\n \t\t\t\t'amount_untaxed':round (lines.amount_untaxed*lines.percentage_rating/100,2),\n \t\t\t\t'amount_tax':round(lines.amount_tax*lines.percentage_rating/100,2),\n \t\t\t\t'amount_total':round(lines.amount_total*lines.percentage_rating/100,2),\n \t\t\t\t'deduct_days':lines.deduct_days,\n \t\t\t\t'deduct_amount':lines.deduct_amount,\n \t\t\t\t'contract_id':lines.contract_id.id,\n\t\t\t\t\t'env_allow_id_after_rate':record.id,\n\t\t\t\t\t'type': 'after',\n 'category_id':lines.category_id.id,\n\t\t\t\t\t'percentage_rating':lines.percentage_rating,\n\n })\n\t\t\n \n self.write(cr, uid, ids, {'state':'confirmed'})\n return True", "def not_complete(request):\n print(\"not_complete method in tutor_helper.py\")\n if user_auth(request):\n user = User.objects.get(email=request.user.email)\n print(\"\\t\", user)\n current_user = UserInformation.objects.get(user=user)\n if current_user.current_main_set is None:\n return False\n if current_user.completed_sets is not None:\n if current_user.current_main_set not in current_user.completed_sets.all():\n print(\"not complete\")\n print(current_user.current_main_set)\n return True\n else:\n if current_user.completed_sets is None:\n return True\n return False", "def new_oneoff_donation(sender, instance, **kwargs):\n # Only process the donation if it is of type \"one off\".\n if instance.donation_type != Donation.DonationTypes.one_off:\n return\n\n # If the instance has no PK the previous status is unknown.\n if instance.pk:\n # NOTE: We cannot check the previous and future state of the ready attribute since it is set in the\n # Donation.save function.\n\n existing_donation = Donation.objects.get(pk=instance.pk)\n # If the existing donation is already pending, don't mail.\n if existing_donation.status == DonationStatuses.pending:\n return\n\n # If the donation status will be pending, send a mail.\n if instance.status == DonationStatuses.pending:\n mail_new_oneoff_donation(instance)", "def setPaysLivraison(self, in_data):\n pays = in_data\n panier = Commande.objects.getUserPanier(self.request)\n panier.setPaysLivraison(pays)\n out_data = {\n 'panier': PanierApiSerializer(panier, context={'request': self.request}).data,\n 'success': True,\n }\n return out_data", "def checkinall(self): # 3\n res = self.__obj.checkinall()\n if res != 0:\n raise Error(rescode(res),\"\")", "def ilk(self, in_call):\n # print('ilk', self.dname, self.dname + '.' + self.dchan, self.values[self.dname + '.' + self.dchan])\n flag = False\n for chan in self.cnd['chans']:\n flag = flag or self.values[self.dname + '.' + chan]\n if not flag:\n if self.fail_count['ilk']:\n self.error_code = self.dchan + '|' + self.cnd['err_code'] + '|' + 'user_turned_on'\n self.fail_count['ilk'] = 0\n self.log_manager('ilk')\n elif self.values[self.dname + '.' + self.dchan]:\n self.error_code = self.dchan + '|' + self.cnd['err_code']\n self.fail_count['ilk'] = 1\n self.log_manager('ilk')\n elif not self.values[self.dname + '.' + self.dchan]:\n self.error_code = self.dchan + '|' + self.cnd['err_code'] + '|' + 'user_turned_on'\n self.log_manager('ilk')\n else:\n pass\n # print('whats up, I shouldnt be here!', flag, self.values[self.dname + '.' + self.dchan])", "def markUndrafted(name, pool):\n #TODO: Create logic that removes a player from the masterList when he's ineligible\n splitStr = name.split()\n if len(splitStr) == 1:\n print (\"Please specify either a last name or a last initial\")\n return\n\n for positionList in pool:\n for player in positionList:\n if name == player.name:\n player.status = 1\n print (\"Player\", player.name, \" is now eligible at \", player.pos)", "def unequal_paid(self, unequal_paid):\n allowed_values = [\"Allow\", \"Disallow\", \"AdminReview\"]\n if unequal_paid.lower() not in map(str.lower, allowed_values):\n # print(\"Invalid value for unequal_paid -> \" + unequal_paid)\n self._unequal_paid = \"outdated_sdk_version\"\n else:\n self._unequal_paid = unequal_paid", "def holdingpenreview():\n objectid = request.values.get('objectid', 0, type=int)\n approved = request.values.get('approved', False, type=bool)\n ticket = request.values.get('ticket', False, type=bool)\n if not objectid:\n abort(400)\n workflow_object = workflow_object_class.get(objectid)\n workflow_object.extra_data[\"approved\"] = approved\n workflow_object.extra_data[\"ticket\"] = ticket\n workflow_object.save()\n db.session.commit()\n\n resume.delay(workflow_object.id)\n\n return render_template('authors/forms/new_review_accepted.html',\n approved=approved)", "def reservation_mark_entrance(user: User, reservation: Reservation):\n owns_restaurant = reservation.restaurant.operator == user\n if owns_restaurant and reservation.status is ReservationState.ACCEPTED and reservation.reservation_time <= datetime.datetime.now():\n #Might want to add user notification\n reservation.entrance_time = datetime.datetime.now()\n reservation.status = ReservationState.SEATED\n db.session.commit()\n return True\n\n return False", "def payments(self, loan):\n self.currency_interest = \"XBT\"\n \n \"\"\"The lender agrees to provide the borrower half of the loan amount\n on the initial loan on the initial date\"\"\"\n loan.fund(on=self.initial_loan_date,\n amount=self.total_loan_amount * \\\n Decimal(0.5))\n \"\"\"The lender agrees to pledge the remaining loan amount toward\n the kickstarter campaign of the borrower.\"\"\"\n loan.fund(on=self.kickstarter_payment_date,\n amount=self.total_loan_amount * \\\n Decimal(0.5))\n \"\"\" Standard payment schedule - The borrower intends to\n payback period will be separated into 8 installments and\n completed in 8 months. The payback will begin in the 5th\n month. However, unless the special conditions are triggered,\n the borrower is required to only pay the interest on the loan\n until the final payment date.\"\"\"\n\n \"\"\" Special payment schedule - If First campaign funded over\n USD 65,000, the borrower must pay back entire loan including\n one year interest within the two months after Crowd Funding\n Platform pay the fund.\"\"\"\n\n \"\"\" If First campaign funded over USD 58,000, will pay back 4\n Installment in advance, after Crowd Funding Platform pay the\n fund. The rest of the loan will keep paying followed the\n standard schedule until all loan including interest is paid\n back.\"\"\"\n\n if (self.kickstarter_revenue > Money(65000, \"USD\")):\n payment_date = self.kickstarter_payment_date + \\\n relativedelta(months=2)\n loan.add_to_balance(on=payment_date,\n amount = loan.interest(payment_date,\n self.final_payment_date,\n loan.remaining_balance()))\n loan.payment(on=payment_date,\n amount = loan.remaining_balance())\n else:\n if (self.kickstarter_revenue > Money(58000, \"USD\")):\n payment_date = self.kickstarter_payment_date + \\\n relativedelta(months=2)\n loan.payment(on=payment_date,\n amount = lambda : loan.remaining_principal()() * Decimal(0.5))\n start_payment_date = self.initial_loan_date + \\\n relativedelta(months=4)\n loan.amortize(on=start_payment_date,\n amount = loan.remaining_balance(),\n payments=8,\n interval=relativedelta(months=1))\n \"\"\"The borrower agrees to pay back the any remaining principal\n and accrued interest one year after the loan is issued.\"\"\"\n loan.payment(on=self.final_payment_date,\n amount= loan.remaining_balance())", "def test_charge_correct_for_novel_after_close_4_days(self):\n rental = create_test_rental(\n book=self.book1,\n customer=self.user1,\n date_borrowed=\"2019-05-21 00:00:00.400952+00:00\",\n )\n close_rental_url = reverse(\"close_rental\", kwargs={\"pk\": rental.pk})\n\n data = {\"date_returned\": \"2019-05-25 13:46:57.249145+03:00\"}\n response = self.client.put(close_rental_url, data=data, format=\"json\")\n\n self.assertEqual(response.data[\"amount_charged\"], \"6.00\")\n self.assertEqual(response.data[\"rental_status\"], \"Closed\")\n self.assertEqual(response.data[\"currency\"], CURRENCY)", "def unreturnbank(self):\n pass", "def onDealCanceled(argsList):\r\n\tCyInterface().setDirty(InterfaceDirtyBits.Score_DIRTY_BIT, True)", "async def debit(ctx, *args):\n users_mentioned = ctx.message.mentions\n user_mention = ctx.author.mention\n debit = 0\n for arg in args:\n try:\n debit = float(arg)\n await ctx.message.channel.send(user_mention+\", we have successfully debited as you commanded.\")\n break\n except:\n pass\n bals = self.data[\"balances.json\"]\n for user in users_mentioned:\n if user.id in bals:\n bals[user.id] -= debit\n else:\n bals[user.id] = -debit", "def unpaid_redemption() -> UnsuccessfulRedemption:\n return UnsuccessfulRedemption(\"unpaid\")", "def withdrawn(self, withdrawn):\n\n self._withdrawn = withdrawn", "def check3(self, x, y, mark, d = 0):", "def new_check(self, cr, uid, ids, context=None):\n voucher_pool = self.pool.get('account.voucher')\n move = self.pool.get('account.move').browse(cr, uid, context.get('active_id',[]), context=context)\n cr.execute(\"SELECT COALESCE(sum(credit),0) amount,ml.partner_id,COALESCE(date_maturity,%s) date_maturity,ml.id id \" \\\n \"FROM account_move_line ml INNER JOIN account_move m ON m.id = ml.move_id \" \\\n \"INNER JOIN account_account acc ON acc.id = ml.account_id INNER JOIN account_account_type acc_type ON acc_type.id = acc.user_type \" \\\n \"WHERE m.id = %s AND ml.credit > 0 AND acc.type = 'liquidity' GROUP BY ml.partner_id,date_maturity,ml.id\",(move.date,str(move.id),))\n suppliers = cr.dictfetchall()\n voucher_id = False\n for supplier in suppliers:\n voucher = {\n 'account_id':move.journal_id.default_credit_account_id.id,\n 'company_id':move.company_id.id,\n 'period_id':move.period_id.id,\n 'date':move.date,\n 'amount':supplier['amount'],\n 'journal_id':move.journal_id.id,\n 'pay_journal_id':move.journal_id.id,\n 'move_id':int(move.id),\n 'ref': move.name,\n 'partner_id':supplier['partner_id'],\n 'amount_in_word':amount_to_text_ar(supplier['amount'], 'ar'),\n 'type':'payment',\n 'allow_check':1,\n 'chk_status':True,\n 'date_due':supplier['date_maturity']\n }\n voucher_id = voucher_pool.create(cr, uid, voucher, context=context)\n voucher_pool.write(cr, uid, voucher_id, {'state': 'posted'}, context=context)\n self.write(cr, uid, ids, {'payment_id':voucher_id}, context=context)\n return voucher_id" ]
[ "0.56058526", "0.557811", "0.5529027", "0.5488796", "0.52010155", "0.5192642", "0.516891", "0.51390547", "0.5094012", "0.50648564", "0.5062245", "0.5061174", "0.49786085", "0.49332282", "0.4931753", "0.49173915", "0.4892089", "0.4874537", "0.48676074", "0.4837274", "0.48028046", "0.47757152", "0.47741687", "0.47650734", "0.4737783", "0.47371477", "0.47341275", "0.4727474", "0.47264427", "0.4719434", "0.47098443", "0.4691823", "0.46804756", "0.4680019", "0.46792778", "0.46758235", "0.46745253", "0.4673757", "0.46679378", "0.4656983", "0.4647627", "0.4637923", "0.4634971", "0.4630203", "0.46278742", "0.4627149", "0.462403", "0.46204427", "0.46096092", "0.45974985", "0.45907527", "0.4590676", "0.45822185", "0.45814985", "0.4580589", "0.4570197", "0.45607907", "0.4549113", "0.45455116", "0.45431787", "0.4540404", "0.45397776", "0.45363033", "0.45294848", "0.45283648", "0.45268297", "0.45248324", "0.4524602", "0.45239574", "0.4522412", "0.4522412", "0.4522169", "0.45189714", "0.4518312", "0.45144916", "0.45143434", "0.45130634", "0.45079", "0.45079", "0.45066828", "0.4504724", "0.45002532", "0.44967663", "0.44967633", "0.44958532", "0.44928348", "0.44886667", "0.44883978", "0.44873476", "0.44865128", "0.448441", "0.44809216", "0.44794196", "0.44775358", "0.4474916", "0.4473312", "0.44696474", "0.4469576", "0.4468693", "0.44668996" ]
0.5966537
0
Name of current protocol.
def name(self): return 'Null'
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def protocol(self) -> str:\n return __name__", "def protocol(self) -> str:\n return pulumi.get(self, \"protocol\")", "def protocol_name(self):\n self._protocol_name = 'kerberos'\n return self._protocol_name", "def getProtocol(self) -> str:\n ...", "def layer_protocol_name(self) -> str:\n return self._layer_protocol_name", "def in_protocol(self) -> str:\n return pulumi.get(self, \"in_protocol\")", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> str:\n return self.__parameters.protocol", "def name(self) -> str:\n return self.proto.name", "def name(self):\n return self.proto.name", "def protocol(self):\n return helpers.get_protocol()", "def get_name(self):\n \n return 'Socket/IP'", "def protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self):\n return self._host[CONF_PROTOCOL]", "def get_name(self):\n \n return 'TCP/IP Server'", "def protocol(self):\n return self._protocol", "def getProtocol(self, _):\r\n return self._protocol", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def sockname(self):\n return self.socket_.getsockname()", "def protocol(self):\n self._recv_protocol()\n return self._protocol", "def protocol(self) -> Optional[pulumi.Input[Union[str, 'Protocol']]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self):\n return self._info.next # pylint: disable=E1101", "def v_protocol(self):\n return self._protocol", "def v_protocol(self):\n return self._protocol", "def get_name(self):\n \n return 'TCP/IP Client'", "def name(self) -> str:\n return f\"{self._inst} port {self._data[self._sid_data['sid_name']]}\"", "def getsockname(self):\n return self.sock.getsockname()", "def getsockname(self):\r\n return self.sock.getsockname()", "def getName(self):\n return _libsbml.Port_getName(self)", "def __ip_protocol(self, proto_num):\n if proto_num in self.protocols:\n return self.protocols[proto_num]\n return str(proto_num)", "def module_name(self) -> str:\n return to_snake_case(self.name.split('/')[-1][:-len('.proto')])", "def getsockname(self):\r\n return self._fd.getsockname()", "def name(self) -> str:\n return 'oltp'", "def name(self) -> 'Literal[\"Dynamic Reverse Address Resolution Protocol\", \"Inverse Address Resolution Protocol\", \"Reverse Address Resolution Protocol\", \"Address Resolution Protocol\"]': # pylint: disable=line-too-long\n return self._name", "def proto(self):\n return self.sock.proto", "def get_name(self):\n \n return 'UDP/IP Server'", "def protocol(self) -> Optional[pulumi.Input['TargetServerProtocol']]:\n return pulumi.get(self, \"protocol\")", "def connection_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"connection_name\")", "def name(self):\n return '{} {}'.format(self._device,\n self._endpoint)", "def peername(self):\n return self.socket_.getpeername()", "def protocol(self):\n return self._config[\"security.protocol\"]", "def connection_name(self) -> str:\n return pulumi.get(self, \"connection_name\")", "def name(self):\n return self.robot.name + ' ' + SWITCH_TYPES[self.type][0]", "def protocol_details(self) -> pulumi.Output['outputs.ServerProtocolDetails']:\n return pulumi.get(self, \"protocol_details\")", "def ip_protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ip_protocol\")", "def get_protocol(self):\n if self.ssl:\n return \"https\"\n else:\n return \"http\"", "def getpeername(self):\r\n return self.__proxypeername", "def getpeername(self):\r\n return self.__proxypeername", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def get_name():\n\n return 'nettools'", "def name(self):\n return self.devname", "def _get_protocol_type(self):\n return self.__protocol_type", "def interface_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"interface_name\")", "def get_name(self):\n \n return 'UDP/IP Client'", "def name(self) -> str:\n return f\"{self._inst} NAT {self._data['name']}\"", "def getpeername(self):\r\n return self.__proxypeername", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def proxy_protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"proxy_protocol\")", "def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"", "def protocol(self):\n\n if '://' in self.host:\n scheme, host = self.host.split('://', 1)\n return scheme\n elif self.port == 21:\n return 'ftp'\n elif self.port == 22:\n return 'sftp'\n elif self.port == 990:\n return 'ftps'\n else:\n # Uncertain, assume FTP.\n return 'ftp'", "def details(self):\n return self.sock.getsockname()", "def name(self):\n return self._config.backend_name", "def getConnectionName(self):\n return self.system", "def get_name(self):\n \n return 'Loop-Back'", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def getVhdlName(self):\n return self.name.replace(TOP_NODE_NAME + '.', '').replace('.', '_')", "def _get_interface_name(self):\n return self.__interface_name", "def name(self):\n return self.__name__", "def get_pipename(self):\n return getattr(self, 'pipename', d6tflow.cache.pipe_default_name)", "def _get_ifname(self):\n return self.__ifname", "def interface_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"interface_name\")", "def protocol(self) -> NetworkProtocol:\n if hasattr(self, \"_protocol\"):\n return self._protocol\n _args: list[Arg] = []\n _ctx = self._select(\"protocol\", _args)\n return _ctx.execute_sync(NetworkProtocol)", "def get_name(self, context: bpy.types.Context) -> str:\n if not self.name:\n connector = self.__create_connector(\n self.name_connector, context=context)\n self.name = connector.get_name()\n return self.name", "def name(self):\n # self._name = \"wyzeapi_\"+self._device_mac+\"_\"+ self._name\n return self._device.nickname", "def getpeername(self):\r\n return self.sock.getpeername()", "def name(self):\n return self.device.name()", "def getpeername(self):\n return self.sock.getpeername()", "def name(self) -> str:\n return self._device.name or self._device.mac", "def getname(self):\n return self.__name", "def ip_protocol(self) -> str:\n protocol = f\"ipv{self.ip_address.version}\"\n\n log.debug(\"Host %s: IP protocol for paramiko is %s.\", self.host)\n return protocol", "def get_name():\n return __name__", "def topology_name(self):\n return self._topology_name", "def getname(self):\n return self.__class__.__name__", "def get_protocol():\n if https():\n protocol = 'https'\n else:\n protocol = 'http'\n return protocol", "def name(self):\n return \"RPCConnection\"", "def PortName(self):\n if self.force_auto_sync:\n self.get('PortName')\n return self._PortName", "def getName(self):\r\n return self.__name__", "def name(self) -> str:\n\t\traise NotImplementedError", "def name(self):\n return self.__name", "def name(self):\n return self.__name", "def name(self):\n return self.__name", "def name(self):\n return self.__name", "def name(self):\n return self.__name", "def name(self):\n return self.__name", "def name(self):\n return self.__name" ]
[ "0.8527097", "0.8032896", "0.8025242", "0.7800471", "0.7785212", "0.7730119", "0.76614213", "0.76614213", "0.75701255", "0.75649047", "0.75599545", "0.7478808", "0.7466269", "0.7439446", "0.7334409", "0.7231231", "0.71754944", "0.715286", "0.70239735", "0.70239735", "0.70239735", "0.70239735", "0.70239735", "0.6994375", "0.6963947", "0.6961776", "0.6953647", "0.6926818", "0.6926818", "0.6914702", "0.68833", "0.6817902", "0.68153226", "0.67914903", "0.6768064", "0.6754752", "0.675395", "0.67526174", "0.6736922", "0.66858846", "0.6685457", "0.6663634", "0.66518146", "0.66459113", "0.66310096", "0.66294616", "0.66212535", "0.6610738", "0.66089475", "0.6597783", "0.6596406", "0.65644515", "0.65644515", "0.6560652", "0.6560652", "0.65537965", "0.65386397", "0.65200406", "0.65180767", "0.6509586", "0.6507529", "0.6504971", "0.65030414", "0.64970666", "0.64935803", "0.64824975", "0.6481387", "0.6456176", "0.6454697", "0.6442443", "0.6440088", "0.6419786", "0.6413756", "0.6399642", "0.6392814", "0.63815516", "0.63794124", "0.63787085", "0.63761777", "0.637306", "0.6359958", "0.6345461", "0.63411254", "0.6320086", "0.6319427", "0.6316102", "0.63131577", "0.6311798", "0.6310994", "0.63054824", "0.6299567", "0.6297171", "0.6294873", "0.6293273", "0.62882906", "0.62882906", "0.62882906", "0.62882906", "0.62882906", "0.62882906", "0.62882906" ]
0.0
-1
Header length of current protocol.
def length(self): raise UnsupportedCall(f"'{self.__class__.__name__}' object has no attribute 'length'")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def length(self):\n return struct.unpack('<H', self.pkt.payload[2:4])[0]", "def length(self):\n return struct.unpack('<H', self.pkt.payload[6:8])[0]", "def calculated_length(self) -> int:\n return TunnellingRequest.HEADER_LENGTH + len(self.raw_cemi)", "def header_len(self):\n if self.num_lines_header is None:\n Nheader = 0\n with self._compression_safe_file_opener(self.input_fname, \"r\") as f:\n for i, l in enumerate(f):\n if (l[0 : len(self.header_char)] == self.header_char) or (\n l == \"\\n\"\n ):\n Nheader += 1\n else:\n break\n\n return Nheader\n else:\n return self.num_lines_header", "def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]", "def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]", "def __len__(self):\n if self._buffer is not None:\n if self._header.value_type in b'ZBH':\n return len(self._buffer)\n else:\n return 1\n else:\n return 0", "def __len__(self):\n # Header + group id + session timeout\n size = self.HEADER_LEN + 2 + len(self.group_id) + 4\n # + member id + protocol type + len(group protocols)\n size += 2 + len(self.member_id) + 2 + len(self.protocol_type) + 4\n # metadata tuples\n for name, metadata in self.group_protocols:\n size += 2 + len(name) + 4 + len(metadata)\n return size", "def length(self):\n return struct.unpack('<B', self.pkt.payload[1:2])[0]", "def __len__(self):\n return len(self._headers)", "def __len__(self):\n # Header + len(self.consumer_group)\n return self.HEADER_LEN + 2 + len(self.consumer_group)", "def network_byte_length(self) -> int:", "def __len__(self):\n # Header + len(group id) + group id\n size = self.HEADER_LEN + 2 + len(self.group_id)\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def payload_length(self):\n return self.total_length - self.headers_length - _PRELUDE_LENGTH - 4", "def __len__(self):\n # Header + len(group id) + group id + generation id\n size = self.HEADER_LEN + 2 + len(self.group_id) + 4\n # + len(member id) + member id\n size += 2 + len(self.member_id)\n return size", "def content_len(self):\n return self.header('CONTENT-LENGTH')", "def header_level(self) -> int:\n return len(self.headers)", "def get_content_length(self, hdr):\n try:\n return int(hdr.data[\"content-length\"])\n except (ValueError, TypeError):\n return -1", "def sum_header_lengths(self):\n\n return len(getattr(self, 'vlans', [])) * self.HEADER_LENGTHS['Dot1Q'] + self.HEADER_LENGTHS['Ether']", "def __len__(self):\n return self.HEADER_LEN + 4 + sum(len(t) + 2 for t in self.topics)", "def __header_size(self):\n return self.SIZE_LINEUPS + self.SIZE_PLAYERS_PER_LINEUP", "def header_size(self):\n return 5", "def get_length(self):\n return self._select_interface(self._rc_get_length,\n self._http_get_length)", "def getHeaderWidth( self ):\n return 0", "def length(self):\n return self._info.length # pylint: disable=E1101", "def getLen(self):\n return len(self.data)", "def size(self):\n # IP header has a minimum size of 20 bytes:\n # - 1 byte for version + IHL\n # - 1 byte for DSCP + ECN\n # - 2 bytes for total length\n # - 2 bytes for identification\n # - 2 bytes for flags + fragment offset\n # - 1 byte for TTL\n # - 1 byte for transport protocol type\n # - 2 bytes for header Checksum\n # - 8 bytes, 2 for each IP address\n return 20 + self.segment.size()", "def __len__(self):\n return len(self.bytes)", "def headers_end(self):\n return _PRELUDE_LENGTH + self.headers_length", "def size(self):\n return struct.calcsize(b\"<ii\") + len(self.body.encode(\"ascii\")) + 2", "def length(self) -> 'int':\n return self._info.len", "def sent_len(self) -> int:\n raise NotImplementedError(\"must be implemented by subclasses\")", "def _get_content_length(headers):\n ctl = 'content-length'\n for k, v in headers.items():\n if k.lower() == ctl:\n return int(v)\n return None", "def __len__(self):\n # TODO: Is this method used?\n return self._info['length']", "def getLen(self):\n return self.len", "def _get_length(self):\n return self._length", "def length(self) -> int:\n return len(self.data)", "def add_length_header(data: bytes) -> bytes:\n length = str(len(data)).encode()\n header = (b'<length ' + length + b'>').ljust(32)\n return header + data", "def size(self):\n if self._buffer is not None:\n length = SIZEOF_TAGHEADER\n if self._header.value_type == b'B':\n # TODO make sure this is right, need data that uses B to verify\n length += SIZEOF_UINT32 + (len(self._buffer))\n elif self._header.value_type in b'HZ':\n length += len(self._buffer)\n else:\n length += SIZEOF_TAG_TYPES[self._header.value_type]\n return length\n else:\n return 0", "def get_length(self):\n return self._length", "def get_length(self):\n return self._length", "def __len__(self) -> int:\n return len(self.buffer)", "def raw_data_length(self):\n return self.unpack_dword(0x4)", "def length(self):\n return len(self.data)", "def length(self):\n return len(self.data)", "def length(self):\n return len(self.data)", "def hash_byte_length(self):\n if self.is_crc():\n return self._crc_byte_len()\n if self.is_md():\n return 16\n if self.is_sha():\n return self._sha_byte_len()\n return 0", "def get_size(self):\n return len(self.get_payload()) + 4", "def get_length(self):\r\n return len(self.hand)", "def frame_length(self):\r\n return self.config.frame_length", "def __len__(self) -> int:\n if self.serialize_data:\n return len(self.data_address)\n else:\n return len(self.data_infos)", "def get_string_length(self):\n return int(self.read('H')[0])", "def data_length(self):\n size = self.unpack_dword(0x4)\n if size >= 0x80000000:\n size -= 0x80000000\n return size", "def sizeof(self):\n\n return self.__format_length__", "def length(self) -> int:\r\n\r\n return self.__length", "def length(self):\n return self.__length", "def length(self):\n return self.__length", "def payload_length(self):\n return self._payload_length", "def get_message_length(self):\n return len(self._payload)", "def get_length(self):\n curr = self.head\n length = 0\n\n while curr != None:\n length += 1\n curr = curr.link\n\n return length", "def len (self):\n\t\treturn len (self.data)", "def __len__(self):\n # Header + replicaId + len(topics)\n size = self.HEADER_LEN + 4 + 4\n for topic, parts in iteritems(self._reqs):\n # topic name + len(parts)\n size += 2 + len(topic) + 4\n # partition + fetch offset + max bytes => for each partition\n size += (4 + 8 + 4) * len(parts)\n return size", "def get_length(self):\n\n return self.length", "def length(self):\n return self._length", "def length(self):\n return self._length", "def getLength(self):\n return self.length", "def format_length( self, key ) :\r\n\r\n return struct.calcsize( self[key] )", "def get_length(self):\n\n return self._length", "def __len__(self) -> int:\n return self._length", "def Length(data):\n return len(data)", "def getHeaderHeight( self ):\n return 0", "def __len__(self):\n return len(self.buffer)", "def __len__(self):\n return len(self.buffer)", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def __len__(self):\n return self._length", "def length(self):\n return self.length", "def setPacketLength(self):\n self.packetLength = len(self) - PRIMARY_HEADER_BYTE_SIZE - 1", "def length(self):\n return len(self._data) if self._data else 0", "def __len__(self):\n return self.__length", "def _get_content_length(self) -> Optional[int]:\n self.mem_map.seek(consts.MEM_MAP_INITIALIZED_FLAG_NUM_BYTES)\n header_bytes = self.mem_map.read(consts.CONTENT_LENGTH_NUM_BYTES)\n content_length = self._bytes_to_long(header_bytes)\n return content_length", "def len(self):\n\t\t\n\t\treturn len(self.line)", "def Length(self) -> int:", "def Length(self) -> int:", "def length(self):\n pass", "def __len__(self):\n # Header + len(group id) + group id + generation id\n size = self.HEADER_LEN + 2 + len(self.group_id) + 4\n # + len(member id) + member id + len(group assignment)\n size += 2 + len(self.member_id) + 4\n # group assignment tuples\n for member_assignment in self.group_assignment:\n # + len(member id) + member id + len(member assignment) + member assignment\n size += 2 + len(member_assignment.member_id) + 4 + len(member_assignment)\n return size", "def __len__(self):\n # Mac address case\n if self.subtype == 3:\n return 7\n # ip address case\n elif self.subtype == 4:\n if self.value.version == 4:\n # ipv4 case\n return 6\n else:\n # ipv6 case\n return 18\n\n # all other cases:\n else:\n return len(self.value) + 1", "def __len__(self) -> int:\n return self._len", "def message_length(self):\n return self._message_length", "def getPacketLengthBytes(self, packet):\n if sys.version[0] == \"2\":\n n = len(packet)\n a = array('c')\n a.append(chr((n >> 24) & 0xFF))\n a.append(chr((n >> 16) & 0xFF))\n a.append(chr((n >> 8) & 0xFF))\n a.append(chr(n & 0xFF))\n return a.tostring()\n elif sys.version[0] == \"3\":\n return (len(packet)).to_bytes(4, byteorder='big')\n else:\n return None", "def length(self):\n return len(self.record)", "def get_headers(self):\n return self.numHeadList", "def content_length(self):\n return self._content_length", "def __len__(self):\n return(self.data_len)", "def length(self):\n\n return self._length", "def preamble_length(self) -> int:\n msb = self._read_u8(_REG_PREAMBLE_MSB)\n lsb = self._read_u8(_REG_PREAMBLE_LSB)\n return ((msb << 8) | lsb) & 0xFFFF", "def data_len(self):\n Nrows_data = 0\n with self._compression_safe_file_opener(self.input_fname, \"r\") as f:\n for i, l in enumerate(f):\n if (l[0 : len(self.header_char)] != self.header_char) and (l != \"\\n\"):\n Nrows_data += 1\n return Nrows_data", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length", "def __len__(self):\n return self.length" ]
[ "0.773718", "0.7697014", "0.74317473", "0.74186736", "0.72751355", "0.72751355", "0.7265553", "0.72438484", "0.7240133", "0.7159681", "0.7159037", "0.7113465", "0.69755954", "0.69566464", "0.69399494", "0.69354415", "0.68519056", "0.6833837", "0.68100524", "0.6744303", "0.672152", "0.66704017", "0.6662102", "0.6655323", "0.66551423", "0.664916", "0.66352034", "0.6632275", "0.66265374", "0.6615237", "0.66106737", "0.65849215", "0.65757346", "0.656582", "0.6558023", "0.65507406", "0.65382206", "0.65201855", "0.6498885", "0.64948726", "0.64948726", "0.64730495", "0.64615285", "0.64607733", "0.64607733", "0.64607733", "0.64434946", "0.6430384", "0.6424811", "0.6415366", "0.63948184", "0.6385293", "0.638282", "0.6380463", "0.6376934", "0.6374498", "0.6374498", "0.63742846", "0.63726294", "0.6370257", "0.63694257", "0.6368451", "0.6367476", "0.63599235", "0.63599235", "0.635668", "0.6356669", "0.63544875", "0.6350486", "0.63494885", "0.63469183", "0.6340579", "0.6340579", "0.6339964", "0.6339964", "0.6339964", "0.63140106", "0.6310046", "0.62771654", "0.6275693", "0.62739944", "0.6270972", "0.6262786", "0.6262786", "0.6261133", "0.62546444", "0.6251238", "0.6247344", "0.6241079", "0.6239915", "0.62391305", "0.6230361", "0.62167245", "0.6210239", "0.620956", "0.6209396", "0.6195098", "0.61898196", "0.61898196", "0.61898196", "0.61898196" ]
0.0
-1
Name of next layer protocol.
def protocol(self): raise UnsupportedCall(f"'{self.__class__.__name__}' object has no attribute 'protocol'")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def layer_protocol_name(self) -> str:\n return self._layer_protocol_name", "def protocol(self):\n return self._info.next # pylint: disable=E1101", "def protocol(self) -> str:\n return __name__", "def get_name(self):\n \n return 'Loop-Back'", "def name(self):\n return self.proto.name", "def name(self) -> 'Literal[\"Dynamic Reverse Address Resolution Protocol\", \"Inverse Address Resolution Protocol\", \"Reverse Address Resolution Protocol\", \"Address Resolution Protocol\"]': # pylint: disable=line-too-long\n return self._name", "def name(self) -> str:\n return self.proto.name", "def getProtocol(self) -> str:\n ...", "def protocol_name(self):\n self._protocol_name = 'kerberos'\n return self._protocol_name", "def protocol(self) -> str:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> str:\n return self.__parameters.protocol", "def protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"protocol\")", "def in_protocol(self) -> str:\n return pulumi.get(self, \"in_protocol\")", "def get_next_rule_name(self):\n next_rule_name = \"R{}\".format(self.next_rule_name_ix)\n self.next_rule_name_ix += 1\n return next_rule_name", "def name(self) -> str:\n return f\"{self._inst} port {self._data[self._sid_data['sid_name']]}\"", "def getProtocol(self, _):\r\n return self._protocol", "def name(self) -> str:\n return f\"{self._inst} NAT {self._data['name']}\"", "def get_name(self):\n \n return 'Socket/IP'", "def __str__(self) -> str:\r\n\t\treturn \"{state}\".format(state=self.NextState.__func__.__qualname__)", "def get_name():\n\n return 'nettools'", "def getVhdlName(self):\n return self.name.replace(TOP_NODE_NAME + '.', '').replace('.', '_')", "def getName(self):\n return _libsbml.Port_getName(self)", "def get_name(self):\n \n return 'TCP/IP Server'", "def name(self):\n return utf82unicode(pn_link_name(self._impl))", "def __ip_protocol(self, proto_num):\n if proto_num in self.protocols:\n return self.protocols[proto_num]\n return str(proto_num)", "def module_name(self) -> str:\n return to_snake_case(self.name.split('/')[-1][:-len('.proto')])", "def layer_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"layer_name\")", "def protocol(self):\n self._recv_protocol()\n return self._protocol", "def node_name(self) -> str:\n op_name = f\"{self.name.name}_{self.name.overload_name}\".lower()\n return \"\".join(word.capitalize() or \"\" for word in op_name.split(\"_\"))", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def protocol(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"protocol\")", "def _get_lsp_frr_out_port_name(self):\n return self.__lsp_frr_out_port_name", "def protocol(self):\n ...", "def protocol(self):\n return helpers.get_protocol()", "def protocol(self) -> Optional[pulumi.Input[Union[str, 'Protocol']]]:\n return pulumi.get(self, \"protocol\")", "def protocol_names(self):\n l = self.protocols()\n retval = [str(k.name) for k in l]\n return retval", "def name(self) -> str:\n return 'oltp'", "def getpeername(self):\r\n return self.__proxypeername", "def protocol(self) -> Optional[pulumi.Input['TargetServerProtocol']]:\n return pulumi.get(self, \"protocol\")", "def protocol_details(self) -> pulumi.Output['outputs.ServerProtocolDetails']:\n return pulumi.get(self, \"protocol_details\")", "def getpeername(self):\r\n return self.__proxypeername", "def getpeername(self):\r\n return self.__proxypeername", "def _get_protocol_type(self):\n return self.__protocol_type", "def protocol(self):\n return self._host[CONF_PROTOCOL]", "def proxy_protocol(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"proxy_protocol\")", "def v_protocol(self):\n return self._protocol", "def v_protocol(self):\n return self._protocol", "def name(self):\n if self.resource.is_client:\n return f\"{self.network.name} {self.resource.name_connection_type} {SWITCH_TYPES[self.variable][0]}\"\n elif self.resource.is_eero or self.resource.is_profile:\n return f\"{self.network.name} {self.resource.name} {SWITCH_TYPES[self.variable][0]}\"\n return f\"{self.resource.name} {SWITCH_TYPES[self.variable][0]}\"", "def __str__(self):\n return \"NextHop(idx:{})\".format(self.nhr_id)", "def protocol(self):\n return self._protocol", "def name(self):\n return \"RPCConnection\"", "def name(self):\n return f\"{NAME} {RES_CONTROLLER} {self._controller.controller_index + 1} {RES_MASTER}\"", "def name(self):\n return '{} {}'.format(self._device,\n self._endpoint)", "def layer_protocol_name(self, layer_protocol_name: str):\n allowed_values = [\"OTSiA\", \"OCH\", \"OTU\", \"ODU\", \"ETH\", \"ETY\", \"DSR\"] # noqa: E501\n if layer_protocol_name not in allowed_values:\n raise ValueError(\n \"Invalid value for `layer_protocol_name` ({0}), must be one of {1}\"\n .format(layer_protocol_name, allowed_values)\n )\n\n self._layer_protocol_name = layer_protocol_name", "def name(self) -> str:\n\t\traise NotImplementedError", "def get_name(self):\n \n return 'UDP/IP Server'", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def protocol(self) -> NetworkProtocol:\n if hasattr(self, \"_protocol\"):\n return self._protocol\n _args: list[Arg] = []\n _ctx = self._select(\"protocol\", _args)\n return _ctx.execute_sync(NetworkProtocol)", "def _decode_next_layer(self, dict_, length=None):\n # make next layer protocol name\n proto = str(self._prot or 'Raw').lower()\n\n # make BytesIO from frame package data\n bytes_ = io.BytesIO(self._file.read(dict_['len']))\n info, protochain = self._import_next_layer(bytes_, length)\n\n # write info and protocol chain into dict\n self._protos = ProtoChain(self._prot, protochain)\n dict_[proto] = info\n dict_['protocols'] = self._protos.chain\n return dict_", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def getproxypeername(self):\r\n return _orgsocket.getpeername(self)", "def dll_name(self):\n return self._name(self.Name)", "def getName():", "def getName():", "def getName():", "def getName():", "def getName():", "def getName():", "def protocol_names(self):\n\n return tuple([k.name for k in self.query(Protocol).order_by(Protocol.name)])", "def name(self):\n return self.robot.name + ' ' + SWITCH_TYPES[self.type][0]", "def get_name(self):\n \n return 'TCP/IP Client'", "def layer_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"layer_name\")", "def protocol_type(self):\n return self._read(MX_PROTOCOL_TYPE)", "def _ns_nextid(self):\n return self._ns(\"nextid\")", "def _create_name(self) -> str:\n return self.stream.__class__.__name__", "def protocol(self, code: str) -> str:\n return 'https'", "def _next_partname(self):\n partname_str = '/ppt/slides/slide%d.xml' % (len(self)+1)\n return PackURI(partname_str)", "def next_link(self) -> str:\n return pulumi.get(self, \"next_link\")", "def ProtocolType(self) -> ProtocolType:", "def name(self) -> str:", "def name(self) -> str:", "def name(self) -> str:", "def name(self) -> str:", "def name(self) -> str:", "def layer_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"layer_name\")", "def tensor_flow_name(self):\n # Just to make sure we don't include characters that aren't allowed in TF\n # opnames, we strip all the non-word characters from the tag.\n tag = re.sub(r'\\W+', '', self.tag)\n if tag: tag = '_'+tag\n return '%s_%s%s' % (self.dtype, '_'.join(str(x) for x in self.shape), tag)", "def interface_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"interface_name\")", "def get_board_name(self):\n return \"OPP {} Board {}\".format(str(self.led.neoCard.chain_serial), \"0x%02x\" % self.led.neoCard.addr)", "def _import_next_layer(self, file_, length):\n if self._prot == 'Ethernet':\n from .link import Ethernet as Protocol\n elif self._prot == 'IPv4':\n from .internet import IPv4 as Protocol\n elif self._prot == 'IPv6':\n from .internet import IPv6 as Protocol\n else:\n data = file_.read(*[length]) or None\n return data, None\n next_ = Protocol(file_, length)\n return next_.info, next_.protochain", "def get_name(self):\n name_str = \"Brain\"\n name_str += \"_\" + self._memory.get_name() \n name_str += \"_ImgSize\" + str(self._img_size[0])\n name_str += \"_Nov\" + self._novelty_loss_type.upper()\n name_str += \"_Train\" + str(self._train_epochs_per_iter)\n name_str += \"_Lrate\" + str(self._learning_rate)\n return name_str", "def _get_interface_name(self):\n return self.__interface_name", "def next_link(self) -> Optional[str]:\n return pulumi.get(self, \"next_link\")", "def protocol(ctx: Context, protocol_specification_path: str):\n _generate_item(ctx, \"protocol\", protocol_specification_path)", "def proto(self):\n return self.sock.proto", "def curr_name(self):\n return self.name_stack[-1]" ]
[ "0.7427126", "0.7015629", "0.7010182", "0.66151756", "0.6591896", "0.6542744", "0.6537509", "0.6536096", "0.6428703", "0.62516314", "0.6208627", "0.6208627", "0.6167117", "0.6059282", "0.6015737", "0.59872663", "0.597741", "0.5912", "0.589437", "0.5888402", "0.58677226", "0.58675516", "0.5844434", "0.58060104", "0.58016914", "0.5798741", "0.5793016", "0.57799894", "0.5778261", "0.57479835", "0.57319367", "0.57276136", "0.57276136", "0.57276136", "0.57276136", "0.57276136", "0.5712322", "0.57086265", "0.5708272", "0.56878555", "0.5608108", "0.5607643", "0.5587892", "0.556804", "0.55444777", "0.55444086", "0.55444086", "0.5539003", "0.55328506", "0.5529572", "0.5524844", "0.5524844", "0.55241084", "0.55187064", "0.5515457", "0.5510255", "0.5509856", "0.54996127", "0.5495879", "0.54949015", "0.5492216", "0.54799676", "0.54785985", "0.54705685", "0.5468787", "0.5468787", "0.54553646", "0.5451942", "0.5451942", "0.5451942", "0.5451942", "0.5451942", "0.5451942", "0.5443838", "0.5436213", "0.5429531", "0.5427659", "0.5410677", "0.5409209", "0.53980637", "0.53972715", "0.5392558", "0.5379308", "0.5375043", "0.5369201", "0.5369201", "0.5369201", "0.5369201", "0.5369201", "0.53663254", "0.53606135", "0.535193", "0.535147", "0.53512156", "0.5346375", "0.5345439", "0.5340725", "0.5335134", "0.53318906", "0.5331498" ]
0.53521055
91
Read (parse) packet data.
def read(self, length=None, **kwargs): # pylint: disable=unused-argument return dict()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def read_packet(self):\n\n\t\t#self.debug(\"READ BUFFER SIZE: %d\" % len(self.buff))\n\t\tbackup = self.buff[:]\n\t\tpacket = Packet()\n\t\ttry:\n\t\t\tpacket.direction = self.node\n\t\t\tpacket.ident = self.unpack('ubyte')\n\t\t\t\n\t\t\t#Defined structs from huge dict\n\t\t\tfor datatype, name in self.get_struct(packet):\n\t\t\t\t# this populates packet.data with {name: value}\n\t\t\t\tpacket.data[name] = self.unpack(datatype)\n\n\t\t\t# I believe the following are packet-type specific fixes for variable-length packets.\n\n\t\t\t#0x17\n\t\t\tif packet.ident == 0x17:\n\t\t\t\tif packet.data['unknown'] > 0:\n\t\t\t\t\tpacket.data['x2'] = self.unpack('short')\n\t\t\t\t\tpacket.data['y2'] = self.unpack('short')\n\t\t\t\t\tpacket.data['z2'] = self.unpack('short')\n\t\t\n\t\t\t#0x33\n\t\t\tif packet.ident in (0x33, 0x34):\n\t\t\t\tpacket.data['data'] = self.unpack_array_fast('byte', packet.data['data_size'])\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n#\t\t\t#0x34\n#\t\t\tif packet.ident == 0x34:\n#\t\t\t\tcoords = self.unpack_array_fast('short', packet.data['data_size'])\n#\t\t\t\tbtype = self.unpack_array_fast('byte', packet.data['data_size'])\n#\t\t\t\tmetadata = self.unpack_array_fast('byte', packet.data['data_size'])\n#\t\t\t\tpacket.data[\"blocks\"] = []\n#\t\t\t\tfor i in zip(coords, btype, metadata):\n#\t\t\t\t\tblock = {}\n#\t\t\t\t\tblock[\"x\"] =\t\ti[0] >> 12\n#\t\t\t\t\tblock[\"z\"] = 0x0F & i[0] >> 8\n#\t\t\t\t\tblock[\"y\"] = 0xFF & i[0]\n#\t\t\t\t\tblock[\"type\"] = i[1]\n#\t\t\t\t\tblock[\"metadata\"] = i[2]\n#\t\t\t\t\tpacket.data[\"blocks\"].append(block)\n#\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n\t\t\t#0x3C\n\t\t\tif packet.ident == 0x3C:\n\t\t\t\trecords = self.unpack_array_fast('byte', packet.data['data_size']*3)\n\t\t\t\ti = 0\n\t\t\t\tpacket.data[\"blocks\"] = []\n\t\t\t\twhile i < packet.data['data_size']*3:\n\t\t\t\t\tpacket.data[\"blocks\"].append(dict(zip(('x','y','z'), records[i:i+3])))\n\t\t\t\t\ti+=3\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n\t\t\t#0x68\n\t\t\tif packet.ident == 0x68:\n\t\t\t\tpacket.data[\"slots_data\"] = self.unpack_array('slot', packet.data[\"data_size\"])\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\t#0x82:\n\t\t\tif packet.ident == 0x82:\n\t\t\t\tpacket.data[\"text\"] = []\n\t\t\t\tfor i in range(4):\n\t\t\t\t\tpacket.data[\"text\"].append(packet.data[\"line_%s\" % (i+1)])\n\t\t\t\t\t\n\t\t\t#0x83\n\t\t\tif packet.ident == 0x83:\n\t\t\t\tpacket.data[\"data\"] = self.unpack_array_fast('byte', packet.data['data_size'])\n\t\t\t\tdel packet.data[\"data_size\"]\n\n\t\t\t# Sets packet.original to the byte string that the packet was decoded from.\n\t\t\tpacket.original = backup[:len(backup) - len(self.buff)]\n\n\t\t\treturn packet\n\n\t\texcept IncompleteData:\n\t\t\tself.buff = backup\n\t\t\treturn None\n\t\texcept Exception, ex:\n\t\t\tself.buff = backup\n\t\t\tex.args += (self.buff[20:],)\n\t\t\traise", "def _read(self):\n \n try:\n d = self._get_byte()\n ts = time.time()\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n ts = time.time()\n packet = [d]\n d = self._get_byte()\n if d == self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n ts = time.time()\n else:\n packet.append(d)\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte()\n packet.append(d)\n if self._debug == True:\n print \"Serial:_read: unescaped\", packet\n packet = self._unescape(packet)\n \n crc = self._crc16(0, packet[1:-3])\n packet_crc = self._decode(packet[-3:-1])\n \n if crc != packet_crc:\n print \"Warning: wrong CRC! %x != %x %s\" % (crc, packet_crc, [\"%2x\" % i for i in packet])\n if self._debug:\n if self._ts == None:\n self._ts = ts\n else:\n print \"Serial:_read: %.4f (%.4f) Recv:\" % (ts, ts - self._ts), self._format_packet(packet[1:-3])\n self._ts = ts\n return RawPacket(ts, packet[1:-3], crc == packet_crc)\n except socket.timeout:\n return None", "def parse_data(self, reading):\n\t\tif len(reading) == 5:\n\t\t\ttry:\n\t\t\t\tpacket = TrollPacket.from_binary_packet(reading)\n\t\t\t\tself.update_listeners(packet)\n\t\t\texcept KeyError as e:\n\t\t\t\terr_msg = 'Arduino metadata %s. Binary packet: %s' % (e, reading.hex().upper())\n\t\t\t\tlogging.exception(err_msg)", "def parse(self):\n try:\n if self.bitstream:\n # Parse message header\n self.bitstream.bytepos = 0\n\n if self.bitstream.endswith(\"\\n\"):\n pass\n\n else:\n raise PacketIncomplete(\"Packet does not end with carriage return\")\n\n if self.bitstream.find('0x 50 52 56 41 54',bytealigned=True): # If 'PRVAT' text in bitstream\n self.dataformat = 'NMEA'\n else:\n self.dataformat = 'TRITECH'\n\n if self.dataformat=='NMEA' and self.id != Message.CONFIGURATION_PARAM:\n # go to first comma\n self.bitstream.bytepos = self.bitstream.find('0x2C', bytealigned = True)[0]/8 + 1\n self.payload = self.bitstream.read('bytes:6')\n #skip comma\n self.bitstream.read('bytes:1')\n self.dataunits = self.bitstream.read('bytes:1')\n\n\n elif self.dataformat=='TRITECH' and self.id != Message.CONFIGURATION_PARAM:\n self.bitstream.bytepos = 0\n self.payload = self.bitstream.read('bytes:6')\n self.dataunits = self.bitstream.read('bytes:1')\n else:\n self.bitstream.bytepos = 0\n length_string = 'bytes:'+ str(len(self.bitstream)/8)\n self.payload = self.bitstream.read(length_string)\n\n else:\n pass\n\n except ValueError as e:\n raise PacketCorrupted(\"Unexpected error\", e)", "def parse(self, data):\n self._readahead.write(data)\n buf = self._readahead.getvalue()\n if len(buf) < 4:\n return\n while len(buf) >= 4:\n size = int(buf[:4], 16)\n if size == 0:\n self.handle_pkt(None)\n buf = buf[4:]\n elif size <= len(buf):\n self.handle_pkt(buf[4:size])\n buf = buf[size:]\n else:\n break\n self._readahead = BytesIO()\n self._readahead.write(buf)", "def read_raw_packet(self):\n\n size = 0\n\n # Read our two-byte header from the debugger...\n while not size:\n size = (self._get_next_byte() << 16) | self._get_next_byte()\n\n # ... and read our packet.\n packet = bytearray([self._get_next_byte() for _ in range(size)])\n\n # Return our packet.\n # TODO: extract and provide status flags\n # TODO: generate a timestamp on-device\n return packet, datetime.now(), None", "def _read_data(self, header):\n _, msg_size = unpack(self.HEADER_PACK_STR, header)\n with self.socket_lock:\n data = self.socket.recv(msg_size)\n return data", "def parse_packet(self, data):\n return data.decode().split('\\x00')", "def read_pkt_line(self):\n if self._readahead is None:\n read = self.read\n else:\n read = self._readahead.read\n self._readahead = None\n\n try:\n sizestr = read(4)\n if not sizestr:\n raise HangupException()\n size = int(sizestr, 16)\n if size == 0:\n if self.report_activity:\n self.report_activity(4, \"read\")\n return None\n if self.report_activity:\n self.report_activity(size, \"read\")\n pkt_contents = read(size - 4)\n except socket.error as e:\n raise GitProtocolError(e)\n else:\n if len(pkt_contents) + 4 != size:\n raise GitProtocolError(\n \"Length of pkt read %04x does not match length prefix %04x\"\n % (len(pkt_contents) + 4, size)\n )\n return pkt_contents", "def _read(self, timeout=None):\n\n # Developer notes:\n #\n # Packet data read from Serial is in this format:\n # [HDLC_FLAG_BYTE][Escaped data][HDLC_FLAG_BYTE]\n #\n # [Escaped data] is encoded so that [HDLC_FLAG_BYTE] byte\n # values cannot occur within it. When [Escaped data] has been\n # unescaped, the last 2 bytes are a 16-bit CRC of the earlier\n # part of the packet (excluding the initial HDLC_FLAG_BYTE\n # byte)\n #\n # It's also possible that the serial device was half-way\n # through transmitting a packet when this function was called\n # (app was just started). So we also neeed to handle this case:\n #\n # [Incomplete escaped data][HDLC_FLAG_BYTE][HDLC_FLAG_BYTE][Escaped data][HDLC_FLAG_BYTE]\n #\n # In this case we skip over the first (incomplete) packet.\n #\n\n if self._s.timeout != timeout and timeout != None:\n if self._debug:\n print \"Set the timeout to %s, previous one was %s\" % (timeout, self._s.timeout)\n self._s.timeout = timeout\n\n try:\n # Read bytes until we get to a HDLC_FLAG_BYTE value\n # (either the end of a packet, or the start of a new one)\n d = self._get_byte(timeout)\n ts = time.time()\n if self._debug and d != self.HDLC_FLAG_BYTE:\n print \"Skipping incomplete packet\"\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte(timeout)\n ts = time.time()\n\n # Store HDLC_FLAG_BYTE at the start of the retrieved packet\n # data:\n packet = [d]\n\n # Is the next byte also HDLC_FLAG_BYTE?\n d = self._get_byte(timeout)\n if d == self.HDLC_FLAG_BYTE:\n # Yes. This means that the previous byte was for\n # the end of the previous packet, and this byte is for\n # the start of the next packet.\n\n # Get the 2nd byte of the new packet:\n d = self._get_byte(timeout)\n ts = time.time()\n\n # We are now on the 2nd byte of the packet. Add it to\n # our retrieved packet data:\n packet.append(d)\n\n # Read bytes from serial until we read another\n # HDLC_FLAG_BYTE value (end of the current packet):\n while d != self.HDLC_FLAG_BYTE:\n d = self._get_byte(timeout)\n packet.append(d)\n\n # Done reading a whole packet from serial\n if self._debug:\n print \"SimpleSerial:_read: unescaped\", packet\n\n # Decode the packet, and check CRC:\n packet = self._unescape(packet)\n\n crc = self._crc16(0, packet[1:-3])\n packet_crc = self._decode(packet[-3:-1])\n\n if crc != packet_crc:\n print \"Warning: wrong CRC! %x != %x %s\" % (crc, packet_crc, [\"%2x\" % i for i in packet])\n raise ReadCRCError\n if self._debug:\n if self._ts == None:\n self._ts = ts\n else:\n print \"Serial:_read: %.4f (%.4f) Recv:\" % (ts, ts - self._ts), self._format_packet(packet[1:-3])\n self._ts = ts\n\n # Packet was successfully retrieved, so return it in a\n # RawPacket wrapper object (but leave out the\n # HDLC_FLAG_BYTE and CRC bytes)\n return RawPacket(ts, packet[1:-3])\n except socket.timeout:\n raise ReadTimeoutError", "def _read_data_into_packet(self, p):\n\n length = p.length * self.disc.audio_format.bytes_per_frame\n\n if p.file_pos is None:\n # Silence, so send on null bytes to player\n p.data = '\\0' * length\n\n else:\n file_pos = p.file_pos * self.disc.audio_format.bytes_per_frame\n self.audio_file.seek(file_pos)\n\n p.data = self.audio_file.read(length)\n length -= len(p.data)\n file_pos += len(p.data)\n\n # If we didn't get all data, iterate with a timeout until\n # it's all been read or the ripping process has stopped.\n # This is not very efficient, and there's a small race\n # condition at the end of the disc, but this should be\n # very rare so keep it unoptimised for now.\n\n while length > 0 and self.is_ripping and self.is_ripping.is_set():\n time.sleep(1)\n\n self.audio_file.seek(file_pos)\n d = self.audio_file.read(length)\n\n length -= len(d)\n file_pos += len(d)\n\n p.data += d\n\n # Still didn't get all data, treat it as an exception\n if length > 0:\n raise SourceError('unexpected end of file, expected at least {0} bytes'\n .format(length))", "def recv_data(self):\n data = \"\"\n size = 0\n\n try:\n size = self.sockObj.recv(4) #Get metadata\n except:\n raise Exception(\"Error while receiving data. Probably broken pipe\")\n\n if len(size) == 0:\n raise Exception(\"No data recivied. Probably broken pipe\")\n\n size = struct.unpack('>I',size)[0]\n\n data = self.sockObj.recv(size)\n\n if len(data) != size:\n raise Exception(\"Partiala data recivied\")\n\n return self.decode(data)", "def read(self):\n self._read_into_buffer()\n # print([hex(i) for i in self._buffer])\n\n # check packet header\n if not self._buffer[0:2] == b\"BM\":\n raise RuntimeError(\"Invalid PM2.5 header\")\n\n # check frame length\n frame_len = struct.unpack(\">H\", self._buffer[2:4])[0]\n if frame_len != 28:\n raise RuntimeError(\"Invalid PM2.5 frame length\")\n\n checksum = struct.unpack(\">H\", self._buffer[30:32])[0]\n check = sum(self._buffer[0:30])\n if check != checksum:\n raise RuntimeError(\"Invalid PM2.5 checksum\")\n\n # unpack data\n (\n self.aqi_reading[\"pm10 standard\"],\n self.aqi_reading[\"pm25 standard\"],\n self.aqi_reading[\"pm100 standard\"],\n self.aqi_reading[\"pm10 env\"],\n self.aqi_reading[\"pm25 env\"],\n self.aqi_reading[\"pm100 env\"],\n self.aqi_reading[\"particles 03um\"],\n self.aqi_reading[\"particles 05um\"],\n self.aqi_reading[\"particles 10um\"],\n self.aqi_reading[\"particles 25um\"],\n self.aqi_reading[\"particles 50um\"],\n self.aqi_reading[\"particles 100um\"],\n ) = struct.unpack(\">HHHHHHHHHHHH\", self._buffer[4:28])\n\n return self.aqi_reading", "def readpacket(self, n):\n try:\n msg = self.sock.recv(n)\n except BaseException:\n msg = ''\n return msg", "def parse_payload(self):\n while len(self.buffer) >= 10:\n \"\"\" check magic word \"\"\"\n if self.buffer[0:2] != self.mw:\n #LogDebug(\"drop all buffer due to incorrect magic word\")\n self.buffer = b\"\" # drop entire buffer\n\n \"\"\" extract the value from length field \"\"\"\n length = struct.unpack(\"I\", self.buffer[2:6])[0] + 1\n #print \"packet len\", length, \"buffer len\", len(self.buffer)\n if len(self.buffer) < length:\n #LogDebug(\"imcompleted packet will be processed later\")\n break\n\n \"\"\" verify the packet CRC \"\"\"\n calculated_crc = struct.pack(\"I\", binascii.crc32(self.buffer[:length-4]) & 0xFFFFFFFF)\n if calculated_crc != self.buffer[length-4:length]:\n pass\n else:\n payload = self.buffer[6:length-4]\n self.payloads.append(payload)\n self.buffer = self.buffer[length:]", "def read(self):\n packet = None\n while packet is None:\n packet = self.async_read()\n return packet", "def readData(self):\n if (self.model == 'GDS'):\n self.write(':ACQ'+str(ch)+':MEM?\\n')\n elif (self.model == 'TDS'):\n self.write('CURVe?\\n')\n\n # Check for the initial '#'; if not present, raise error.\n if (self.read(1) != '#'):\n raise Exception, \"Expected header not present\"\n\n # Read the data length indicator\n dataSize = int(self.read(int(self.read(1))))\n\n # extra steps for GDS\n if (self.model == 'GDS'):\n # subtract the 8 bytes we will read.\n dataSize -= 8\n # Read the sampling period\n hstep = struct.unpack('>f', self.read(4))[0]\n # also, fix hoff so it corresponds with that for TDS\n # FIXME: check with the scope at some point.\n hoff = hoff - float(dataSize/4) * hstep\n # Read 4 bytes to advance to the actual data: first byte\n # contains the channel and the three are not used,\n # according to the GDS800 manual.\n self.read(4)\n \n # Read data; TDS expects a 1-byte data, GDS expects 2-byte one.\n if (self.model == 'TDS'):\n data = list(struct.unpack('>'+str(dataSize)+'b',\n self.read(dataSize)))\n # TDS has a trailing '\\n' that should be drained.\n self.read(1)\n elif (self.model == 'GDS'):\n data = list(struct.unpack('>'+str(dataSize/2)+'h',\n self.read(dataSize)))\n\n return data", "def Read(self):\n if not self._mem: return self.data\n\n logger.info(\"Read %s\" % self)\n self.data = self.hostmemmgr.read(self._mem, self.size)\n\n logger.info(\"=\" * 30, \"READ BUFFER\", \"=\" * 30)\n scapyfactory.Parse(self.data).Show()\n logger.info(\"=\" * 30, \"END READ BUFFER\", \"=\" * 30)\n\n return self.data", "def packet_read(self):\n bytes_received = 0\n \n if self.sock == NC.INVALID_SOCKET:\n return NC.ERR_NO_CONN\n \n if self.in_packet.command == 0:\n ba_data, errnum, errmsg = nyamuk_net.read(self.sock, 1)\n if errnum == 0 and len(ba_data) == 1:\n bytes_received += 1\n byte = ba_data[0]\n self.in_packet.command = byte\n \n if self.as_broker:\n if self.bridge is None and self.state == NC.CS_NEW and (byte & 0xF0) != NC.CMD_CONNECT:\n print \"RETURN ERR_PROTOCOL\"\n return NC.ERR_PROTOCOL, bytes_received\n else:\n if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:\n return NC.ERR_SUCCESS, bytes_received\n elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:\n return NC.ERR_CONN_LOST, bytes_received\n else:\n evt = event.EventNeterr(errnum, errmsg)\n self.push_event(evt)\n return NC.ERR_UNKNOWN, bytes_received\n \n if not self.in_packet.have_remaining:\n loop_flag = True\n while loop_flag:\n ba_data, errnum, errmsg = nyamuk_net.read(self.sock, 1)\n \n if errnum == 0 and len(ba_data) == 1: \n byte = ba_data[0]\n bytes_received += 1\n self.in_packet.remaining_count += 1\n if self.in_packet.remaining_count > 4:\n return NC.ERR_PROTOCOL, bytes_received\n \n self.in_packet.remaining_length += (byte & 127) * self.in_packet.remaining_mult\n self.in_packet.remaining_mult *= 128\n else:\n if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:\n return NC.ERR_SUCCESS, bytes_received\n elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:\n return NC.ERR_CONN_LOST, bytes_received\n else:\n evt = event.EventNeterr(errnum, errmsg)\n self.push_event(evt)\n return NC.ERR_UNKNOWN, bytes_received\n \n if (byte & 128) == 0:\n loop_flag = False\n \n if self.in_packet.remaining_length > 0:\n self.in_packet.payload = bytearray(self.in_packet.remaining_length)\n if self.in_packet.payload is None:\n return NC.ERR_NO_MEM, bytes_received\n self.in_packet.to_process = self.in_packet.remaining_length\n \n self.in_packet.have_remaining = True\n \n if self.in_packet.to_process > 0:\n ba_data, errnum, errmsg = nyamuk_net.read(self.sock, self.in_packet.to_process)\n if errnum == 0 and len(ba_data) > 0:\n readlen = len(ba_data)\n bytes_received += readlen\n for idx in xrange(0, readlen):\n self.in_packet.payload[self.in_packet.pos] = ba_data[idx]\n self.in_packet.pos += 1\n self.in_packet.to_process -= 1\n else:\n if errnum == errno.EAGAIN or errnum == errno.EWOULDBLOCK:\n return NC.ERR_SUCCESS, bytes_received\n elif errnum == 0 and len(ba_data) == 0 or errnum == errno.ECONNRESET:\n return NC.ERR_CONN_LOST, bytes_received\n else:\n evt = event.EventNeterr(errnum, errmsg)\n self.push_event(evt)\n return NC.ERR_UNKNOWN, bytes_received\n\n #all data for this packet is read\n self.in_packet.pos = 0\n \n ret = self.packet_handle()\n \n self.in_packet.packet_cleanup()\n \n self.last_msg_in = time.time()\n \n return ret, bytes_received", "def decode_data(self, msg):\n if len(msg) < 6:\n raise ValueError(\"Data message is too short - minimum length 6 bytes, got %d bytes\" % len(msg))\n\n (x, TIME) = struct.unpack(\"<HL\", msg[0:6])\n\n if x & (2**15) != 0:\n raise ValueError(\"Expected a data message, found a command message instead\")\n\n ID = (x & self.ID_MASK) >> 4\n LEN = x & self.LEN_MASK\n\n if LEN < 0 or LEN > 8:\n raise ValueError(\"Invalid CAN payload length - %d bytes not in [0,8] bytes\" % LEN)\n \n if ID in self.descriptors:\n desc = self.descriptors[ID]\n if \"format\" not in desc:\n raise ValueError(\"No format specified for %#x:%s\" % (ID, desc[\"name\"]))\n if LEN != struct.calcsize(\"<\" + str(desc[\"format\"])):\n raise ValueError(\"Error in decoding message id=%#x name=%s - length field %d mismatches descriptor %d\"\n % (ID, desc[\"name\"], LEN, struct.calcsize(\"<\" + str(desc[\"format\"]))))\n\n DATA = struct.unpack(\"<\" + str(desc[\"format\"]), msg[6:6+LEN])\n \n return (TIME, ID, desc, DATA)\n else:\n raise ValueError(\"Unknown message id=%#x, time=%d, len=%d, data=%r\" % (ID, TIME, LEN, msg[6:]))", "def read_data(self):\r\n # Verify length of response data\r\n length = self.read_until_null()\r\n message = self.read_until_null()\r\n if int(length) == len(message):\r\n return message\r\n else:\r\n raise ProtocolException(\"Length mismatch encountered while reading the Xdebug message\")", "def async_read(self):\n self.lock.acquire()\n\n # append data\n self.rx_buffer += self.interface.read()\n\n # ensure first byte start with 0xbc\n if len(self.rx_buffer) > 0:\n if self.rx_buffer[0] != 0xbc:\n try:\n pkt_start = self.rx_buffer.index(0xbc)\n self.rx_buffer = self.rx_buffer[pkt_start:]\n except ValueError:\n self.rx_buffer = bytes()\n\n # check if we got a valid packet\n if len(self.rx_buffer) >= 4:\n pkt_size = unpack('<H', self.rx_buffer[2:4])[0]\n # check if we got a complete packet\n if len(self.rx_buffer) >= (pkt_size + 5):\n # yep, parse this packet\n packet = Packet.fromBytes(self.rx_buffer[:pkt_size+5])\n self.rx_buffer = self.rx_buffer[pkt_size+5:]\n self.lock.release()\n return packet\n\n # otherwise, return None\n self.lock.release()\n return None", "def readData(self):\n self._readHeader()\n self._readSize()\n self._readComments()\n self._readAllROI()\n self._readDate()\n self._readArray()", "def _readString(self, rawData, offset=0):\n\n strLen, = unpack(\n self.byteFormat, rawData[\n offset:offset + self.byteFormatLen])\n\n return rawData[self.byteFormatLen:][:strLen]", "def parse(data: bytes, port: int, origin: helpers.ConnectionType):\n # Ignore packets from master server... game server is more interesting\n if port == helpers.MASTER_PORT:\n return\n # Iteratively parse packet data until nothing is left to parse\n reads = 0\n while len(data) >= 2:\n reads += 1\n pid = data[:2]\n handler = PACKET_HANDLERS.get(pid, None)\n if handler:\n # Parse data without packet id prepended\n # Returned data will be parsed next iteration\n data = handler(data[2:], origin=origin)\n else:\n # This packet doesn't have a handler\n # Print it once for inspection\n if reads <= 1:\n print(f'[{pid}] - {data}\\n')\n # Remove the first byte and try parsing again later\n data = data[1:]", "def parse_data(self, data):\n\t\tname, value = self.parse_from_dref(data)\n\t\tpacket = TrollPacket.from_name(name, value)\n\t\tself.update_listeners(packet)", "def readCommand(self):\n while (True):\n time.sleep(1)\n # At least a package of 4 bytes (minimum)\n # [ Head | Length | Address | Data[0…N] | Check ]\n if (self._serial.inWaiting()>=4):\n # Gets only the first byte of the packet (it should be HEAD)\n packet_header = self._serial.read(1)\n if (packet_header != Ind903Packet.PACKET_HEAD):\n # the next one is the length of the packet\n packet_length_bytes = self._serial.read(1)\n packet_length = int.from_bytes(packet_length_bytes, byteorder='big')\n if (packet_length > 0):\n raw_packet = b\"\".join([packet_header, packet_length_bytes, self._serial.read(packet_length)]) \n result_packet = Ind903Packet.parsePacket(raw_packet)\n return (result_packet)", "def read_packetlen(self):\n packetlen = int(struct.unpack('!I', b\"\".join(self.__input))[0])\n self.__input = []\n self.set_terminator(packetlen)\n self.found_terminator = self.read_milter_data", "def parseBuffer(self):\n idx = self.buf.find(DELIMITER)\n while idx > -1:\n packet = self.buf[0:idx]\n if len(packet) > 4:\n if packet[0:3] == 'DATA':\n self.factory.setData(packet[4:idx])\n else:\n print \"%s is a malformed packet, header %s not recognized\" % (packet, packet[0:3])\n else:\n print \"%s attempting to send a packet of invalid length %s\" % (packet, len(packet))\n self.buf = self.buf[(idx + len(DELIMITER)):]\n idx = self.buf.find(DELIMITER)", "def _read_packet(self, packet_id, data_bytes):\n self._serial_conn.send_command(_SENSORS_OPCODE+\" \"+str(packet_id))\n return self._serial_conn.read_data(data_bytes)", "def handle_receive(self, data):\n alldata = \"\"\n for item in data:\n alldata += item\n\n # Wrap data in a StringIO buffer\n cur_buffer = stringio.StringIO(alldata)\n\n with self.__buffer_lock:\n # While the buffer is not fully processed\n while True:\n # Read header, if no length for metadata and binary data is\n # available\n if self.__meta_length is None:\n if self.read_part(cur_buffer, \"__header_buffer\", self.__header_length):\n header = struct.unpack(\"!II?\", self.__header_buffer)\n self.__meta_length, self.__binary_length, self.__binary_compressed = header\n total = len(self.__header_buffer) + self.__meta_length + self.__binary_length\n else:\n break\n\n # Read metadata bytes when the metadata buffer does not have the\n # correct length yet.\n if len(self.__meta_buffer) < self.__meta_length:\n if not self.read_part(cur_buffer, \"__meta_buffer\", self.__meta_length):\n break\n\n # Read binary bytes when the binary buffer does not have the\n # correct length yet.\n if len(self.__binary_buffer) < self.__binary_length:\n if not self.read_part(cur_buffer, \"__binary_buffer\", self.__binary_length):\n break\n\n # Read a complete 'packet', unpack\n if self.__binary_compressed:\n self.__binary_buffer = zlib.decompress(self.__binary_buffer, 15, self.__binary_length * 2)\n self.__meta_buffer = zlib.decompress(self.__meta_buffer, 15, self.__meta_length * 2)\n\n binary_data = self.__binary_buffer\n meta_data = pickle.loads(self.__meta_buffer)\n self.__binary_buffer = \"\"\n self.__meta_buffer = \"\"\n self.__header_buffer = \"\"\n self.__meta_length = None\n self.__binary_length = None\n self.__binary_compressed = False\n\n # Append to the receive buffer (of ThreadedSocket class)\n with self._ThreadedSocket__receive_lock:\n self._ThreadedSocket__receive_buffer.append([meta_data, binary_data])", "def read(self, *args):\n if len(args) == 1:\n data = args[0]\n else:\n peer, data = args\n\n data = data.strip().decode(\"utf-8\")\n\n print(data)", "def _readData(self):\n # Debug. This fn should be called only after checking canRead()\n if not self._canRead():\n raise Exception(\"Trying to read more data than there is.\")\n\n data = self.buffer[:self._expectedByteCount]\n self.buffer = self.buffer[self._expectedByteCount:]\n\n return data", "def receive_data(self):\n chunks = []\n bytes_recd = 0\n while bytes_recd < 8:\n #I'm reading my data in byte chunks\n try:\n chunk = self.sockfd.recv(min(8 - bytes_recd, 4))\n chunks.append(chunk)\n bytes_recd = bytes_recd + len(chunk)\n except:\n print(f'{self.ip} socket failed')\n break\n # if chunk == '':\n # raise RuntimeError(\"Socket connection broken\")\n\n stat_tuple = struct.unpack('L', chunks[0])\n data_tuple = struct.unpack('L', chunks[1])\n stat = stat_tuple[0]\n data = data_tuple[0]\n return stat, chunks[1]", "def serial_read(useParse=False, header='$', tail='#'):\n global ser, recvBuff, startRecord\n retData = ''\n if useParse:\n if ser.readable():\n while ser.inWaiting():\n c = ser.read(1)\n if c == header:\n startRecord = True\n recvBuff = ''\n elif c == tail:\n startRecord = False\n if recvBuff != '':\n #print 'I get: ', recvBuff\n retData = recvBuff\n elif startRecord:\n recvBuff += c\n else:\n pass\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n else:\n if ser.readable():\n while ser.inWaiting():\n retData += ser.read(1)\n else:\n print 'The serial', ser.portstr, 'cannot be read.'\n pass\n return retData", "def read_msg(self, _mid_exp):\n tStart = time.time()\n _in = self.waitforAndRead(4)\n while (time.time()-tStart) < self._timeout:\n # search for preamble\n if not _in[:-1] == self._preamble+_mid_exp:\n _in = _in[2:] + self.device.read(2)\n else:\n # if the header if found, proceed on reading ID and length\n mid = ord(_in[2])\n length = ord(_in[3])\n # read contents and checksum\n _in += self.waitforAndRead(length+1)\n\n if length == 0:\n data = ''\n else:\n data = _in[4:-1]\n\n \"\"\"If checksum is not zero, the packet is invalid.\"\"\"\n if 0xFF & (sum([ord(c) for c in _in[1:]])):\n _in = self.waitforAndRead(4)\n continue # start over from the while loop\n\n # return mid and the data\n return data\n\n else:\n return -1", "def read(self, fmt):\n fmt = '>' + fmt\n if not PY3:\n fmt = fmt.encode('utf-8')\n return struct.unpack(fmt, self.bin.read(format_byte_size(fmt)))", "def _read_record(self, stream):\n header = stream.read(4)\n if len(header) < 4:\n return None\n size, rec_type = struct.unpack('>HH', header)\n data_type = (rec_type & 0x00ff)\n rec_type = rec_type // 256\n data = None\n if size > 4:\n if data_type == 0x01:\n data = numpy.array(\n struct.unpack('>{0}H'.format((size - 4) // 2),\n stream.read(size - 4)),\n dtype='uint')\n elif data_type == 0x02:\n data = numpy.array(\n struct.unpack('>{0}h'.format((size - 4) // 2),\n stream.read(size - 4)),\n dtype='int')\n elif data_type == 0x03:\n data = numpy.array(\n struct.unpack('>{0}l'.format((size - 4) // 4),\n stream.read(size - 4)),\n dtype='int')\n elif data_type == 0x05:\n data = numpy.array([\n _eight_byte_real_to_float(stream.read(8))\n for _ in range((size - 4) // 8)\n ])\n else:\n data = stream.read(size - 4)\n if str is not bytes:\n if data[-1] == 0:\n data = data[:-1].decode('ascii')\n else:\n data = data.decode('ascii')\n elif data[-1] == '\\0':\n data = data[:-1]\n return [rec_type, data]", "def parse(self, data: bytes, parse_ts=True) -> tuple:\n sync_offset = data.find(b'\\x47')\n if sync_offset == -1: # No sync bit in packet\n return None, None, len(data)\n if sync_offset != 0: # Resync\n data = data[sync_offset:]\n for i in range(int(len(data) / self.__psize)):\n if sync_offset != 0:\n self.__resync = sync_offset\n sync_offset = 0\n else:\n self.__resync = 0\n packet = data[:self.__psize]\n data = data[self.__psize:]\n if len(packet) < self.__psize:\n yield None, None, len(packet)\n parsed = None\n if parse_ts:\n parsed = self.__parse(packet)\n yield packet, parsed, self.__resync", "def read_msg(self):\n if self.state == 'connected':\n if 0 == len(self.buf):\n self.buf = self.inout.recv(Mtcpfns.TCP_MAX_PACKET)\n if 0 == (self.buf):\n self.state = 'disconnected'\n raise EOFError\n pass\n self.buf, data = Mtcpfns.unpack_msg(self.buf)\n return data\n else:\n raise IOError(\"read_msg called in state: %s.\" % self.state)", "def handle_read(self):\n data, ancdata, _, _ = recvmsg(self.socket, 65536, 4096)\n\n if not data:\n return self.handle_close()\n\n fds, cred = parse_ancdata(ancdata)\n\n if self.header_buffer is not None:\n self.header_buffer += data.decode()\n\n linefeed = self.header_buffer.find('\\n')\n if linefeed == -1:\n return\n\n header = json.loads(self.header_buffer[:linefeed])\n data = self.header_buffer[linefeed + 1:]\n self.header_buffer = None\n\n self.handle_header(fds, header)\n\n if not data:\n return\n\n meta = Metadata(time=datetime.now(), pid=cred.pid,\n comm=(comm_for_pid(cred.pid) or \"unknown\"),\n unit=(unit_for_pid(cred.pid) or self.unit))\n\n if meta.unit is not None:\n log = self.log_manager.get(meta.unit, \"stdio\")\n log.write(data, meta)", "def parsePacket(self, packet):\n \n pcktParts = packet.split()\n \n # needs exactly 4 parts\n if len(pcktParts) != 4:\n raise PacketException(\"Packet malformed.\")\n \n direction = pcktParts[0]\n ip = pcktParts[1]\n port = pcktParts[2]\n flag = pcktParts[3]\n\n try:\n pckt = Packet(direction, ip, port, flag)\n except Exception as ex:\n eprint(\"Corrupt Packet:{0} Ignoring packet:\\n{1}\".format(ex, packet.__str__()))\n return None\n \n return pckt", "def data_received(self, data):\n self._log.debug(\"recv %s\", data)\n self.incomingMessageBuffer += data\n\n if not self.started:\n # Need to check the startByte to see if we can receive\n if not self.startByte in self.incomingMessageBuffer:\n # We cut the buffer to size, removing data that can't be part of start byte\n if len(self.startByte) < len(self.incomingMessageBuffer):\n self.incomingMessageBuffer = self.incomingMessageBuffer[\n -len(self.startByte) :\n ]\n self._log.debug(\"Ignoring: start byte %s not found\", self.startByte)\n return\n else:\n self._log.debug(\"startBytes %s found - starting read\", self.startByte)\n _, self.incomingMessageBuffer = self.incomingMessageBuffer.split(\n self.startByte, 1\n )\n self.started = True\n self.onReady(True)\n\n if self.readStruct is not None:\n while len(self.incomingMessageBuffer) >= self.readStruct.size:\n msg = self.readStruct.unpack(\n self.incomingMessageBuffer[: self.readStruct.size]\n )\n self.incomingMessageBuffer = self.incomingMessageBuffer[\n self.readStruct.size :\n ]\n\n if self.readKeys is not None:\n msg = dict(zip(self.readKeys, msg))\n self._log.debug(\"recvmsg: %s\", msg)\n self.putter(msg)\n elif self.readFormat is None:\n self.putter(self.incomingMessageBuffer)\n self.incomingMessageBuffer = bytes()\n else:\n # We split by line:\n outputArray = self.incomingMessageBuffer.split(b\"\\n\")\n self.incomingMessageBuffer = outputArray[-1]\n for i in range(len(outputArray) - 1):\n # This returns the bytes object of the line.\n # We don't convert to string, since people might be sending non-ascii characters.\n # When receiving, the user should use .decode('ascii') to get a a string.\n self._log.debug(\"recvmsg: %s\", outputArray[i])\n self.putter(outputArray[i])", "def parse_packet(data):\n ip = IPPacket(data)\n icmp = ICMPPacket(ip.payload)\n print('ICMP message from %s, type %d (%s), code %d, %d byte payload.') % (\n ip.src_addr, icmp.type, ICMP_TYPES[icmp.type], icmp.code,\n len(icmp.payload))\n return len(icmp.payload)", "def read():\n # TODO", "def read_message_from_connection(self, conn):\n raw_msglen = self._recvall(conn, 4)\n if not raw_msglen:\n return None\n\n # We are unpacking a big endian struct which includes\n # the length of the packet, struct makes sure that the header\n # which includes the length is always 4 bytes in length. '>I'\n # indicates that the struct is a unsigned integer big endian\n # CS2110 game strong\n\n msglen = struct.unpack('>I', raw_msglen)[0]\n # Read the message data\n return self._recvall(conn, msglen)", "def read_serial_data(self):\n qdata = list(get_all_from_queue(self.data_q))\n if len(qdata) > 0:\n data = self.data+''.join(qdata)\n while data.find(\"Id: \")!=-1:\n msgStart = data.find(\"Id: \")\n msgEnd = data.find(\"\\n\",msgStart)\n if msgEnd == -1:\n break\n\n packet = data[msgStart:msgEnd-1]\n # print \"msg: [%s]\" % packet\n msgId = int(packet[4:8],16)\n # print \"msgId: %d [%x]\" % (msgId, msgId)\n msgData = map(lambda x: int(x,16) ,packet[16:].split(\" \"))\n # print \"data: \", msgData\n self.update_data(msgId, msgData)\n\n data = data[msgEnd:]\n self.data = data", "def doRead(self):\n return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)", "def readData(self, rawstring, datatype):\n data = rawstring[:-1] #remove last NULL byte\n\n if datatype == ERROR:\n if is_python3():\n data2 = data.tobytes()\n data = data2.decode('utf-8')\n return data\n elif datatype == STRING or datatype == DOUBLE:\n # try to convert data to a more appropriate type\n if is_python3():\n data2 = data.tobytes()\n data = data2.decode('utf-8')\n\n try:\n data = int(data)\n except:\n try:\n data = float(data)\n except:\n pass\n\n return data\n elif datatype == ASSOC:\n return rawtodictonary(rawstring)\n elif SpecArray.isArrayType(datatype):\n #Here we read cols and rows... which are *supposed* to be received in the header!!!\n #better approach: data contains this information (since it is particular to that data type)\n return SpecArray.SpecArray(rawstring, datatype, self.rows, self.cols)\n else:\n raise TypeError", "def handle_read(self):\n packet = self.recv(8192)\n if packet == \"\":\n #print \"[WARNING] Socket closed by remote host %s:%s\" % (\n # self.address,self.port)\n self.close()\n return\n packet_list = messages.separate_messages(packet)\n #received_types = \" + \".join(\n # messages.get_message_type(messages.parse(packet))\n # for packet in packet_list)\n #print \"From %s:%s received: \" % (self.address, self.port), received_types\n # Process a single message at a time\n for packet in packet_list:\n message = messages.parse(packet)\n if messages.get_message_type(message) == \"OFPT_ECHO_REQUEST\":\n self.buffer.append(messages.of_echo_reply)\n else:\n self.handle_message(message)", "def recieve_data(self):\r\n try:\r\n while True:\r\n try:\r\n data, self.addr = self.sock.recvfrom(1024)\r\n return data\r\n except socket.timeout:\r\n print(\"There is no packet at all!\")\r\n break\r\n except Exception:\r\n print(\"Can't recieve a package\")", "def _readByte(self, rawData, offset=0):\n val, = unpack(\n self.byteFormat, rawData[\n offset:offset + self.byteFormatLen])\n \n return val", "def reader(self):\n while self.alive:\n try:\n data = self.serial.read_until(b'~')[:-1]\n packet = ethernet.Ethernet(data)\n if packet[icmp.ICMP]:\n packet[ethernet.Ethernet].dst_s = \"dc:a6:32:00:a7:8b\"\n packet[ip.IP].dst_s = \"192.168.1.35\"\n packet[icmp.ICMP].sum = b'0x1783'\n print(\"\\n\\n__________________RESPONSE FROM VISIBLE PI__________________\")\n print(packet)\n if data:\n self.write(packet.bin())\n except socket.error as msg:\n break\n self.alive = False", "def _read(self):\n # because protocol has no termination chars the read reads the number\n # of bytes in the buffer\n bytes_in_buffer = self.visa_handle.bytes_in_buffer\n # a workaround for a timeout error in the pyvsia read_raw() function\n with(self.visa_handle.ignore_warning(visa.constants.VI_SUCCESS_MAX_CNT)):\n mes = self.visa_handle.visalib.read(\n self.visa_handle.session, bytes_in_buffer)\n mes = str(mes[0].decode()) # cannot be done on same line for some reason\n # if mes[1] != 0:\n # # see protocol descriptor for error codes\n # raise Exception('IVVI rack exception \"%s\"' % mes[1])\n return mes", "def _get_data(self, read_size):\n return self._character_device.read(read_size)", "def _receive_packet(self):\n report = self._serial_read(1)\n if len(report) != 1:\n self.log(\"ERROR: Didn't read back a report!\")\n report = -1\n else:\n report = report[0]\n retval = self._serial_read(1)\n if len(retval) != 1:\n self.log(\"ERROR: Didn't read back a return value!\")\n retval = -1\n else:\n retval = retval[0]\n\n return_payload_len = self._serial_read(1)\n if len(return_payload_len) != 1:\n self.log(\"ERROR: Didn't read back a return payload length!\")\n return_payload_len = 0\n else:\n return_payload_len = return_payload_len[0]\n\n if return_payload_len != 0:\n return_payload = self._serial_read(return_payload_len)\n else:\n return_payload = []\n checksum = self._serial_read(1)\n if len(checksum) != 1:\n self.log(\"ERROR: Didn't read back a checksum!\")\n checksum = -1\n else:\n checksum = checksum[0]\n\n data = self.MAGIC_HEADER + [report, retval, return_payload_len] + return_payload\n data.append(checksum)\n\n our_checksum = self.generate_checksum(data[:-1])\n if our_checksum != checksum:\n self.log(\"ERROR: Our checksum didn't calculate properly! \"\n \"(Calculated {}, expected {})\".format(our_checksum, checksum))\n return -1, checksum, []\n else:\n if self.verbose:\n self.log(\"Checksum match! ({} == {})\".format(our_checksum, checksum))\n\n return report, retval, return_payload", "def read_message(self):\n text_length_bytes = self.input_fh.read(4)\n logging.debug(\"raw 4: %s\", text_length_bytes)\n if not text_length_bytes:\n # this means exit\n shutdown()\n\n text_length = struct.unpack(\"i\", text_length_bytes)[0]\n logging.debug(\"reading message of length: %s\", text_length)\n msg = self.input_fh.read(text_length).decode()\n logging.debug(\"message is %s\", msg)\n return msg", "def _read_data(self, fh, byteorder='>'):\r\n fh.seek(len(self.header))\r\n data = fh.read()\r\n dtype = 'u1' if self.maxval < 256 else byteorder + 'u2'\r\n depth = 1 if self.magicnum == b\"P7 332\" else self.depth\r\n shape = [-1, self.height, self.width, depth]\r\n size = numpy.prod(shape[1:])\r\n if self.magicnum in b\"P1P2P3\":\r\n data = numpy.array(data.split(None, size)[:size], dtype)\r\n data = data.reshape(shape)\r\n elif self.maxval == 1:\r\n shape[2] = int(math.ceil(self.width / 8))\r\n data = numpy.frombuffer(data, dtype).reshape(shape)\r\n data = numpy.unpackbits(data, axis=-2)[:, :, :self.width, :]\r\n else:\r\n data = numpy.frombuffer(data, dtype)\r\n data = data[:size * (data.size // size)].reshape(shape)\r\n if data.shape[0] < 2:\r\n data = data.reshape(data.shape[1:])\r\n if data.shape[-1] < 2:\r\n data = data.reshape(data.shape[:-1])\r\n if self.magicnum == b\"P7 332\":\r\n rgb332 = numpy.array(list(numpy.ndindex(8, 8, 4)), numpy.uint8)\r\n rgb332 *= [36, 36, 85]\r\n data = numpy.take(rgb332, data, axis=0)\r\n return data", "def retrieveData():\n\n\t# My terribad first attempt at this based off of outflank example\n\t# I honestly have no idea what I was doing, but leaving it here just in case\n\t########\n\t# try:\n\t# \tdata = transSock.recv(4)\n\t# except:\n\t# \treturn(\"\")\n\t# if len(data) < 4:\n\t# \treturn()\n\t# slen = struct.unpack('<I', data)[0]\n\t# data = transSock.recv(slen)\n\t# while len(data) < slen:\n\t# \tdata = data + transSock.recv(slen - len(data))\n\t# return(data)\n\t########\n\n\t# Realizing that I have to unpack the buffer length first:\n\n\tframeSize = \"\"\n\twhile len(frameSize) != 4:\n\t\tframeSize = connSock.recv(4)\n\n\tdataSize = struct.unpack('<I', frameSize)[0]\n\tdata = connSock.recv(dataSize)\n\n\treturn data", "def read_message(self):\n def read_data(lnth):\n data = self.pipe_in.read(lnth)\n if len(data) < lnth:\n raise EofError\n return data\n \n data = read_data(struct.calcsize(\"i\"))\n msgLnth = struct.unpack(\"i\", data)[0]\n data = read_data(msgLnth)\n\n # Ack\n try: self.pipe_out.write('a')\n except IOError: pass\n\n import cPickle\n obj = cPickle.loads(data)\n return obj", "async def read(self) -> Union[dictwrapper, str]:\n while True:\n await self.connect()\n try:\n rx_timeout = self.alive_opts.get('rx_timeout', None)\n reader = self.reader.readuntil(separator=b'\\n')\n self.bresponse = await asyncio.wait_for(reader,\n rx_timeout)\n self.response = polystr(self.bresponse)\n if self.response.startswith(\n \"{\") and self.response.endswith(\"}\\r\\n\"):\n self.unpack(self.response)\n self._oldstyle_shim()\n self.valid |= PACKET_SET\n return self.data\n return self.response\n except asyncio.CancelledError:\n self.close()\n raise\n except Exception as exc: # pylint: disable=W0703\n error = 'timeout' if isinstance(\n exc, asyncio.TimeoutError) else exc\n self.logger.warning(\n f'Failed to get message from GPSD: {error}')\n self.close()\n if self.reconnect:\n # Try again later\n await asyncio.sleep(self.reconnect)\n else:\n raise", "def _read(self, register):\n\n addr, num_bytes = register\n data = response = error = None\n if num_bytes == 1:\n data, response, error = self.packet_handler.read1ByteTxRx(\n self.port_handler, self._id, addr\n )\n elif num_bytes == 2:\n data, response, error = self.packet_handler.read2ByteTxRx(\n self.port_handler, self._id, addr\n )\n else:\n data, response, error = self.packet_handler.read4ByteTxRx(\n self.port_handler, self._id, addr\n )\n\n # Check response\n self._error_handler(response, error)\n\n return data", "def recv_bundle(self):\n hdr_raw = sock_recv_raw(self.sock, 6, self.timeout)\n data_length, has_secondary = SPPPacketHeader.preparse_data_length(\n hdr_raw\n )\n data_plus_secondary = sock_recv_raw(\n self.sock,\n data_length,\n self.timeout,\n )\n packet, _ = SPPPacket.parse(\n hdr_raw + data_plus_secondary,\n timecode_used=True,\n has_crc=self.use_crc,\n )\n if self.use_crc:\n assert packet.crc_provided == packet.crc()\n return packet.payload", "def read(self):\r\n try:\r\n if not self.connected:\r\n self._connect()\r\n\r\n (length, encoding, chunked) = self._send_request()\r\n\r\n if chunked:\r\n data = self._read_chunked()\r\n else:\r\n data = self._read_num_bytes(length)\r\n\r\n if encoding == \"gzip\":\r\n data = self._unzip(data)\r\n\r\n data = json.loads(data)\r\n self.timestamp = int(data[1])\r\n if len(data[0]):\r\n if self.cipher:\r\n msg_list = [self._decrypt(m) for m in data[0]]\r\n else:\r\n msg_list = data[0]\r\n\r\n if len(data) > 2:\r\n chan_list = data[2].split(\",\")\r\n else:\r\n chan_list = [self.chan for m in msg_list]\r\n\r\n return zip(chan_list, msg_list)\r\n else:\r\n return []\r\n\r\n except:\r\n self.connected = False\r\n self.sock.close()\r\n raise", "def _get_data(self, read_size):\n return self._pipe.recv_bytes()", "def receive_data():\n\n # Receive the first message (the header),\n # which indicates the incoming data length\n data_length = int(pickle.loads(conn.recv(HEADER_SIZE)))\n \n if data_length:\n # Receive the data itself\n data = pickle.loads(conn.recv(data_length))\n\n return data", "def _read_packet(self, data, jarm_details):\n try:\n if not data:\n raise Exception(\"No data\")\n\n jarm = \"\"\n # Server hello error.\n if data[0] == 21:\n raise Exception(\"Server hello error\")\n # Check for server hello.\n elif (data[0] == 22) and (data[5] == 2):\n counter = data[43]\n # Find server's selected cipher.\n selected_cipher = data[counter+44:counter+46]\n # Find server's selected version.\n version = data[9:11]\n jarm += str(selected_cipher.hex())\n jarm += \"|\"\n jarm += str(version.hex())\n jarm += \"|\"\n extensions = (self._extract_extension_info(data, counter))\n jarm += extensions\n return jarm\n else:\n raise Exception(\"Unexpected result\")\n except Exception:\n return \"|||\"", "def _read_message(self):\n if self.__eof:\n return None\n result = {}\n line = sys.stdin.readline()\n while line == '\\n':\n line = sys.stdin.readline()\n if not line:\n self.__eof = True\n return None\n s = line.split(\" \", 1)\n result['_number'] = int(s[0])\n result['_text'] = s[1].strip()\n\n while not self.__eof:\n line = sys.stdin.readline()\n if not line:\n self.__eof = True\n return result\n if line == '\\n':\n return result\n s = line.split(\":\", 1)\n result[s[0]] = s[1].strip()", "def read_pdu(self):\n\n logging.debug('Waiting for PDU...')\n\n try:\n raw_len = self._socket.recv(4)\n except socket.timeout:\n raise\n except socket.error as e:\n logging.warning(e)\n raise exceptions.ConnectionError()\n if not raw_len:\n raise exceptions.ConnectionError()\n\n try:\n length = struct.unpack('>L', raw_len)[0]\n except struct.error:\n logging.warning('Receive broken pdu... {raw_len}'.format(raw_len=repr(raw_len)))\n raise exceptions.PDUError('Broken PDU')\n\n raw_pdu = raw_len\n while len(raw_pdu) < length:\n raw_pdu += self._socket.recv(length - len(raw_pdu))\n\n logging.debug('<<{pdu} ({length} bytes) ({uid})'.format(pdu=binascii.b2a_hex(raw_pdu),\n length=len(raw_pdu),\n uid=self.uid))\n p = smpp.parse_pdu(raw_pdu, client=self)\n\n logging.debug('Read {command} PDU from {host} ({uid})'.format(command=p.command,\n host=self.host,\n uid=self.uid))\n if p.is_error():\n return p\n elif p.command in consts.STATE_SETTERS:\n self.state = consts.STATE_SETTERS[p.command]\n\n return p", "def read(self):\n\n clock = self.pins[\"clock\"]\n data = self.pins[\"data\"]\n\n # read bitstream\n self._req(True)\n for i in range(52):\n # wait for clock to go low\n while clock.value:\n continue\n\n self.bits[i] = data.value\n\n if i == 0: # deassert req after first bit read, so we only get one response\n self._req(False)\n\n # wait for clock to go up again\n while not clock.value:\n continue\n\n # assemble nibbles\n for n in range(13): # iterate over each nibble\n idx = n * 4\n self.nibbles[n] = (\n (self.bits[idx + 0] << 0)\n + (self.bits[idx + 1] << 1)\n + (self.bits[idx + 2] << 2)\n + (self.bits[idx + 3] << 3)\n )\n\n # parse preamble\n # TODO: check if this contains useful data.\n for n in range(4):\n if self.nibbles[n] != 15:\n return None # invalid data\n\n # sign\n if self.nibbles[4] != 0 and self.nibbles[4] != 8:\n return None # invalid data\n sign_pos = self.nibbles[4] == 0\n\n # convert bcd sequence to integer\n number = 0\n bcd = self.nibbles[5:11]\n for i in range(6):\n number += bcd[i] * (10 ** (5 - i))\n\n # decimal point\n number = number / 10 ** self.nibbles[11]\n\n # unit\n unit = self.UNITS.get(self.nibbles[12])\n\n value = number if sign_pos else -number\n if number == 0:\n value = 0.0 # don't like negative zeros.\n\n return self.Reading(value, unit)", "def read(self):\n buff = self.conn.recv(4096)\n if (self.algo == \"rsa\"):\n buff = self.rsa_decrypt(buff)\n if (self.algo == \"des\"):\n buff = self.des_decrypt(buff)\n if (self.algo == \"3des\"):\n buff = self.triple_des_decrypt(buff)\n if (self.algo == \"aes\"):\n buff = self.aes_decrypt(buff)\n\n while buff.strip() != self.exitcode and len(buff) > 0:\n print 'Message received: ', buff.strip()\n #buff = self.rsa_decrypt(buff)\n buff = self.conn.recv(4096)\n\n if (self.algo == \"rsa\"):\n buff = self.rsa_decrypt(buff)\n if (self.algo == \"des\"):\n buff = self.des_decrypt(buff)\n if (self.algo == \"3des\"):\n buff = self.triple_des_decrypt(buff)\n if (self.algo == \"aes\"):\n buff = self.aes_decrypt(buff)\n # client disconnected\n self.stopWrite", "def _record_reader(stream):\n while True:\n header = stream.read(4)\n if len(header) < 4:\n return\n size, rec_type = struct.unpack(\">HH\", header)\n data_type = rec_type & 0x00FF\n rec_type = rec_type // 256\n data = None\n if size > 4:\n if data_type == 0x01:\n data = numpy.array(\n struct.unpack(\n \">{0}H\".format((size - 4) // 2), stream.read(size - 4)\n ),\n dtype=\"uint\",\n )\n elif data_type == 0x02:\n data = numpy.array(\n struct.unpack(\n \">{0}h\".format((size - 4) // 2), stream.read(size - 4)\n ),\n dtype=\"int\",\n )\n elif data_type == 0x03:\n data = numpy.array(\n struct.unpack(\n \">{0}l\".format((size - 4) // 4), stream.read(size - 4)\n ),\n dtype=\"int\",\n )\n elif data_type == 0x05:\n data = numpy.array(\n [\n _eight_byte_real_to_float(stream.read(8))\n for _ in range((size - 4) // 8)\n ]\n )\n else:\n data = stream.read(size - 4)\n if str is not bytes:\n if data[-1] == 0:\n data = data[:-1].decode(\"ascii\")\n else:\n data = data.decode(\"ascii\")\n elif data[-1] == \"\\0\":\n data = data[:-1]\n yield [rec_type, data]", "def read(self, msg, ans_len):\n self.write(msg)\n # Length is sum of header(2), length, check, cmd, ans_len and end\n length = 6 + ans_len\n ans = self.sock.recv(length)\n if self.__check(ans):\n return ans[4:-2]\n return None", "def readPacket(stream):\n header = readPacketHeader(stream)\n md5 = stream.read(16)\n data = stream.read(header.length)\n p = Packet(header, data)\n if p.md5.digest() != md5:\n raise errors.NetworkError(\n 'Wrong MD5-checksum! (expected: %s, got: %s)' % (\n p.md5.hexdigest(),\n binascii.b2a_hex(md5)))\n return p", "def extract_packet(_buffer):\n if len(_buffer)>=5:\n mtype=_buffer[0]\n msglen=struct.unpack('!L',_buffer[1:5])[0]\n if len(_buffer)>=msglen+1:\n return _buffer[5:msglen+1],mtype,_buffer[msglen+1:]\n return None,None,_buffer", "def read_and_parse():\n\t# read\n\tline = D.gps_serial.readline()\n\n\t# break into components\n\tdata = line.split(\",\")\n\t#print data\n\t# identify and parse. Indicies are from datasheet \n\tif(data[0] == \"$GPGGA\"):\n\t\tgps_msg = RosGPS()\n\t\tif (data[1] != \"\"):\n\t\t\tgps_msg.gps_time = float(data[1])\n\t\tif (data[2] != \"\"):\n\t\t\tgps_msg.latitude = float(data[2])\n\t\tif (data[4] != \"\"):\n\t\t\tgps_msg.longitude = float(data[4])\n\t\tif (data[9] != \"\"):\n\t\t\tgps_msg.altitude = float(data[9])\n\t\tif (data[7] != \"\"):\n\t\t\tgps_msg.NSatellites = int(data[7])\n\t\t\n\t\tD.gpsPub.publish(gps_msg)", "def __tcp_recv(self):\n total_data = []\n bs = 1024\n try:\n data = self.__sock.recv(bs)\n total_data.append(data)\n while True and data:\n if not re.search(\"L: (\\d+)\",data) and not data[-4:] == '\\r\\n\\r\\n':\n data = self.__sock.recv(bs)\n total_data.append(data)\n elif not re.search(\"L: (\\d+)\",data) and data[-4:] == '\\r\\n\\r\\n':\n return total_data\n else:\n break\n \n\n while re.search(\"L: (\\d+)\",data):\n n = len(data)\n L = int(re.findall(\"L: (\\d+)\",data)[-1])\n p = data.rfind('\\r\\n\\r\\n')\n abc = data\n data = ''\n\n p1 = data.rfind(str(L))\n if p < p1:\n log(\"rn before L\")\n left = L + n - (p1 + len(str(L))) + 4\n\n else:\n left = L - (n - p -4)\n if left == L:\n log(\"It happened!\")\n break\n\n #if more bytes then last L\n #come across another command: BN etc.\n #read until another L come\n if left < 0:\n log('abc')\n d = ''\n left = 0\n while True:\n d = self.__sock.recv(bs)\n data += d\n if re.search(\"L: (\\d+)\",d):\n break\n log(\"read left bytes\")\n log('data:'+data)\n total_data.append(data)\n\n #read left bytes in last L\n while left:\n data = self.__sock.recv(left)\n n = len(data)\n left = left - n\n\n if not data:\n break\n total_data.append(data)\n\n except socket.error,e:\n #self.__sock.close()\n raise PyFetionSocketError(e)\n\n return self.__split(''.join(total_data))\n\n #return ''.join(total_data)", "def _read_data(self):", "def parse_recvd_data(data):\n parts = data.split(b'\\0')\n msgs = parts[:-1]\n rest = parts[-1]\n return (msgs, rest)", "def _get_data(self, read_size):\n if NIX:\n return super(Keyboard, self)._get_data(read_size)\n return self._pipe.recv_bytes()", "def on_data_received(self, data):\n # pylint: disable=too-many-branches,too-many-statements\n\n if self.is_receiving_data is True:\n self._buffer += data\n return\n\n try:\n self.is_receiving_data = True\n self._buffer += data\n\n # Keep looping while we have unprocessed data\n # We start processing only once we have an entire field\n # (e.g. 'id=value') in the buffer, otherwise wait for more\n # data.\n # The problem with the current approach is that if there is a\n # binary field with an incorrect length, we may read past\n # the end of the message.\n # BUGBUG: Need to fix this. A quick hack may be to\n # try to peek to see what the tag id is and do something\n # with that. On the other hand this may just be a problem\n # with the protocol (should probably specify a maximum\n # allowable length of a binary field as a sanity check)\n while (len(self._buffer) > 0 and\n self._buffer.find(b'\\x01', self._binary_length + 1) != -1):\n\n # Need to make sure that we have the entire binary field\n # before continuing the processing\n if (self._binary_length > 0 and\n len(self._buffer) < self._binary_length):\n break\n\n # break up the field\n delim = self._buffer.find(b'\\x01', self._binary_length + 1)\n field = self._buffer[:delim]\n self._buffer = self._buffer[delim+1:]\n\n tag_id, value = self._parse_field(field)\n\n # Is this the start of a message?\n if tag_id == 8:\n if self.is_parsing:\n raise FIXParserError('unexpected tag: 8')\n self.is_parsing = True\n elif not self.is_parsing:\n raise FIXParserError('message must start with tag 8')\n\n if self._debug:\n log_text(self._logger.debug, None,\n f\"tag {tag_id} = {repr(value)}\")\n\n self._update_length(field, tag_id, value)\n self._update_checksum(field, tag_id, value)\n self._update_binary(field, tag_id, value)\n\n # The tag value gets assigned here. Due to grouping\n # the container where the update takes place gets\n # changed\n # self._message[tag_id] = value\n self._update_field(tag_id, value)\n\n # Is this the end of a message?\n if tag_id == 10:\n self._receiver.on_message_received(self._message,\n self._message_length,\n self._checksum)\n self.reset()\n\n except FIXLengthTooLongError as err:\n self.reset(flush_buffer=True)\n self._receiver.on_error_received(err)\n except FIXParserError as err:\n self.reset(flush_buffer=True)\n self._receiver.on_error_received(err)\n finally:\n self.is_receiving_data = False", "def decode_packet(data):\n\n opcodes = [(\"AUTH_LOGON_CHALLENGE\", \"\\x00\"), (\"AUTH_LOGON_PROOF\", \"\\x01\")]\n opcode = data[0] # Opcode of the received packet (First byte)\n if opcode == opcodes[0][1]: # Auth Logon challenge\n srp_rcvd = {\n 'error': data[1], # (you should hope that it is always 0)\n 'B': data[3:35], # Read B and skip 1 field (Length_g)\n 'g': data[36:37], # Read g and skip 1 field (Length_n)\n 'N': data[38:70],\n 's': data[70:102], # Read salt\n 'crc': data[102:] # (useless for private servers)\n }\n return srp_rcvd\n if opcode == opcodes[1][1]:\n # Auth logon proof\n if data[1] == \"\\x00\": # Code error: 0\n srp_rcvd = {'login': 1}\n else:\n srp_rcvd = {'login': 0}\n return srp_rcvd", "def receive(self) -> [Packet, None]:\n packet_size_data = self._stream.read(2)\n if not packet_size_data:\n return None\n packet_size = int.from_bytes(packet_size_data, 'little')\n packet_data = self._stream.read(packet_size)\n return packet_from_bytes(packet_data)", "def read_data(self):\n raise NotImplementedError", "def get_readings(self):\n buf = self._read(0x020001)\n data = decode(buf[1:])\n return data", "def parse(self, data):\r\n\r\n parser.Parser.parse(self, data)\r\n\r\n # in case the current state of the parser is finished, must\r\n # reset the state to the start position as the parser is\r\n # re-starting (probably a new data sequence)\r\n if self.state == FINISH_STATE: self.clear()\r\n\r\n # retrieves the size of the data that has been sent for parsing\r\n # and saves it under the size original variable\r\n size = len(data)\r\n size_o = size\r\n\r\n # iterates continuously to try to process all that\r\n # data that has been sent for processing\r\n while size > 0:\r\n\r\n if self.state <= self.state_l:\r\n method = self.states[self.state - 1]\r\n count = method(data)\r\n if count == -1: break\r\n if count == 0: continue\r\n\r\n size -= count\r\n data = data[count:]\r\n\r\n continue\r\n\r\n elif self.state == FINISH_STATE:\r\n self.clear()\r\n\r\n continue\r\n\r\n else:\r\n raise netius.ParserError(\"Invalid state '%d'\" % self.state)\r\n\r\n # in case not all of the data has been processed\r\n # must add it to the buffer so that it may be used\r\n # latter in the next parsing of the message\r\n if size > 0: self.buffer.append(data)\r\n\r\n # returns the number of read (processed) bytes of the\r\n # data that has been sent to the parser\r\n return size_o - size", "def read_and_unpack(self, fmt):\n try:\n return unpack(\n self.byte_order + fmt,\n self.read(calcsize(self.byte_order + fmt)))\n except Exception as e:\n if e.args[0].startswith('unpack requires a buffer of'):\n raise EOFError(e)\n else:\n raise", "def read(self, timeout=None):\n if timeout is None:\n timeout = self.timeout\n endtime = None\n\n if timeout is not None:\n endtime = time.time() + timeout\n\n while endtime is None or time.time() < endtime:\n remaining = None\n if endtime is not None:\n remaining = endtime - time.time()\n try:\n p = self._simple_serial.read(remaining)\n except ReadError:\n if self._debug:\n print \"Packet read failed. Try again.\"\n else:\n # Was the packet filtered?\n if p is not None:\n # Not filtered, so return it.\n # In the current TinyOS the packets from the mote are\n # always NoAckDataFrame\n return NoAckDataFrame(p.data)\n\n # Read timeout expired\n raise ReadTimeoutError", "def recv(self) -> tuple:\n (data, c) = self.socket.recvfrom(Rudp.Packet.buffer())\n # print(data)\n (packet, validity) = Rudp.Packet.unpack(data)\n if(validity):\n print(\"Valid Packet Received From: \", c)\n else:\n raise Rudp.InvalidPacket(\"Invalid Packet Received\")\n\n return (packet, validity, c)", "def parse_frame(data):\n test = binascii.hexlify(data)\n # defines the format of received LoRa frame header\n tap_header_format = 'bbhiibbbbib'\n phy_header_format = 'bbb'\n header_format = tap_header_format + phy_header_format\n print header_format\n header_len = struct.calcsize(header_format)\n data_len = len(data)\n if header_len > data_len:\n print 'packet too short'\n return (0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,)\n else:\n # defines the frame format based on header and length of frame\n data_format = header_format + str(data_len - header_len) + 's'\n print data_format\n # print \"tap header: \", header_len\n # print \"data length: \", data_len\n # print \"test length: \", len(test)\n\n unpacked = struct.unpack(data_format, data)\n print unpacked\n # print '-----------------------------------------------------'\n # print \"bin \" + data\n # print 'hex ' + test\n return unpacked", "def read(self, length=1024):\n\n return self.socket.recv(length)", "def recv_packet(self):\r\n self.recv_bytes()\r\n\r\n packet_length_index = 0\r\n \r\n amount_data = len(self.recvBuffer) # available amount of data to read\r\n \r\n if amount_data <= packet_length_index: # just 0's in the buffer\r\n return None\r\n\r\n if len(self.recvBuffer) <= packet_length_index + 2: # length not received\r\n return None\r\n \r\n packet_length = unpack(self.recvBuffer, packet_length_index, 'H')\r\n \r\n if packet_length > len(self.recvBuffer): # packet not fully received\r\n return None\r\n \r\n if packet_length == 0: # some wrong generated packet by server, inc position of reading packet length\r\n packet_length_index += 1\r\n return None\r\n\r\n\t\t# extract packet data\r\n packet = self.recvBuffer[packet_length_index:packet_length_index+packet_length]\r\n\r\n # remaining recv buffer\r\n self.recvBuffer = self.recvBuffer[packet_length_index + packet_length:]\r\n packet_length_index = 0 # next packet length should be at pos 0 again\r\n\r\n return packet", "def read(self, s):\n pass", "def read_packet(self, blocking=True, timeout=None):\n return self.data_queue.get(blocking, timeout=timeout)", "def _parse_packet(packet: StreamMessageResponse) -> Packet:\n if packet is None:\n raise TypeError(\"Packet cannot be None!\")\n\n packet = MessageToDict(packet)\n\n # Decoding Header\n ingress_port_base64 = packet['packet']['metadata'][0]['value'].encode()\n ingress_port = base64.decodebytes(ingress_port_base64) # retrieving ingress_port; not used, yet\n\n # Decoding Payload\n packet = _scapy_parse(packet)\n\n return packet", "def parse_data(self, byte_stream: BytesIO, header: Header) -> Dict[Any, Any]:\n return self.packet_type_to_parser[header.subpacket_id](byte_stream, header)", "def parse(self):\n i = 1\n times = []\n while 1:\n byte = yield\n if byte== 0xaa:\n byte = yield # This byte should be \"\\aa\" too\n if byte== 0xaa:\n # packet synced by 0xaa 0xaa\n packet_length = yield\n packet_code = yield\n if packet_code == 0xd4:\n # standing by\n self.state = \"standby\"\n elif packet_code == 0xd0:\n self.state = \"connected\"\n elif packet_code == 0xd2:\n data_len = yield\n headset_id = yield\n headset_id += yield\n self.dongle_state = \"disconnected\"\n else:\n self.sending_data = True\n left = packet_length - 2\n while left>0:\n if packet_code ==0x80: # raw value\n row_length = yield\n a = yield\n b = yield\n value = struct.unpack(\"<h\",chr(b)+chr(a))[0]\n self.dispatch_data(\"raw\", value)\n left -= 2\n elif packet_code == 0x02: # Poor signal\n a = yield\n\n left -= 1\n elif packet_code == 0x04: # Attention (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"attention\", v)\n left-=1\n elif packet_code == 0x05: # Meditation (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"meditation\", v)\n left-=1\n elif packet_code == 0x16: # Blink Strength\n self.current_blink_strength = yield\n \n left-=1\n elif packet_code == 0x83:\n vlength = yield\n self.current_vector = []\n for row in range(8):\n a = yield\n b = yield\n c = yield\n value = a*255*255+b*255+c\n left -= vlength\n self.dispatch_data(\"bands\", self.current_vector)\n packet_code = yield\n else:\n pass # sync failed\n else:\n pass # sync failed", "def _read_message(self):\n header = self._read_amt(9)\n msg_size = struct.unpack_from(\">q\", header, 1)[0]\n return header + self._read_amt(msg_size - 9)", "def parse_data(fp):\n pass", "def read_udp_message(socket):\n data, address = socket.recvfrom(4096)\n data = data.decode('utf-8')\n return json.loads(data), address", "def recv(self):\n self.buf = self.sock_in.recvfrom(65565)\n p = Packet(data=self.buf)\n return p" ]
[ "0.73247546", "0.718705", "0.6907943", "0.6843252", "0.68329656", "0.6731588", "0.66780645", "0.6584551", "0.6576974", "0.6557971", "0.6532748", "0.64306426", "0.6358723", "0.6336314", "0.6299943", "0.62733495", "0.62719905", "0.6264791", "0.6221513", "0.62090373", "0.6196162", "0.6178036", "0.61763775", "0.61639", "0.61509454", "0.6148804", "0.6147878", "0.61356604", "0.6132788", "0.6121392", "0.61079234", "0.6087177", "0.60400647", "0.60398334", "0.60395825", "0.6034079", "0.60236514", "0.6016114", "0.6014966", "0.59972847", "0.59962463", "0.5976903", "0.5969097", "0.59628934", "0.5960542", "0.5957922", "0.59529173", "0.59458077", "0.5945357", "0.59409255", "0.5940762", "0.59384984", "0.5914093", "0.5913789", "0.58965236", "0.5891072", "0.5887645", "0.58871484", "0.5878803", "0.5877804", "0.58617485", "0.5857682", "0.5853763", "0.5843315", "0.5842752", "0.5838613", "0.5838345", "0.58328956", "0.582485", "0.5817228", "0.57937145", "0.57920545", "0.5790446", "0.5789875", "0.5783611", "0.57798296", "0.5778875", "0.57659644", "0.5757146", "0.5752022", "0.575049", "0.5750041", "0.5748603", "0.57377714", "0.57355314", "0.57333094", "0.57162166", "0.5715914", "0.5714279", "0.57123077", "0.57114464", "0.5711222", "0.57108086", "0.5706584", "0.57060087", "0.5702305", "0.57012624", "0.5694126", "0.568779", "0.5666987", "0.56558985" ]
0.0
-1
Make (construct) packet data.
def make(self, **kwargs): return bytes()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_packet(self, type, data): \n return (\"{}\\x00{}\\x00{}\".format(type, data, self.ID)).encode()", "def _make_data(cls, data: 'Data_ARP') -> 'dict[str, Any]': # type: ignore[override]\n return {\n 'htype': data.htype,\n 'ptype': data.ptype,\n 'hlen': data.hlen,\n 'plen': data.plen,\n 'oper': data.oper,\n 'sha': data.sha,\n 'spa': data.spa,\n 'tha': data.tha,\n 'tpa': data.tpa,\n 'payload': cls._make_payload(data),\n }", "def _pack_data( self, data ) : \r\n \r\n # hints = self._translation_table.get( type(data), None )\r\n hints = self._get_hints( data ) \r\n \r\n if hints is None : \r\n \r\n ## #debug: \r\n ## print \"_pack_data(): no hints for data type %s (data repr: %s)\" % (type(data), repr(data))\r\n \r\n # \"one-level recursion\" : \r\n # return self._pack_data( repr(data) )\r\n return self._pack_data( str(data) )\r\n \r\n ## # our special case ( grep 'bugfix' to see why we want a zero block ) \r\n ## if data is None: data = 0\r\n \r\n # else ... \r\n \r\n # 'DescType' + 'length' + 'data'\r\n desctype = hints[0]\r\n if desctype == 'TEXT' : \r\n length = len(data)\r\n data_str = data \r\n else :\r\n length = struct.calcsize( hints[1] )\r\n data_str = struct.pack( hints[1], data )\r\n \r\n length_str = struct.pack('=H', length)\r\n \r\n \r\n return _cat(desctype, length_str, data_str)", "def create_empty_pack_tcp(type, data):\n camps = ['', '', '', '']\n llargada_camps = (7, 13, 7, 50)\n index_camps = 0\n for llargada in llargada_camps:\n camps[index_camps] = camps[index_camps].zfill(llargada)\n index_camps += 1\n return struct.pack('c7s13s7s150s', chr(type), '', '', '', data)", "def createPacket(id):\n\n # Header is type (8), code (8), checksum (16), id (16), sequence (16)\n header = getHeaderData(0, id)\n\n data = 192 * 'Q'\n\n checksum = getChecksum(header + data)\n\n header = getHeaderData(socket.htons(checksum), id)\n\n return header + data", "def _create_packet(self, request):\n\n data_len = struct.pack('<Q', len(request))\n packet = b'ZBXD\\x01' + data_len + request\n\n def ord23(x):\n if not isinstance(x, int):\n return ord(x)\n else:\n return x\n\n logger.debug('Packet [str]: %s', packet)\n logger.debug('Packet [hex]: %s', ':'.join(hex(ord23(x))[2:] for x in packet))\n return packet", "def to_buffer(self) -> bytearray:\n packet = bytearray()\n packet.extend(\n struct.pack(\n \"!ccccHH\",\n \"D\".encode(\"ascii\"),\n \"L\".encode(\"ascii\"),\n \"E\".encode(\"ascii\"),\n \"P\".encode(\"ascii\"),\n int(self.type),\n self.len,\n )\n )\n return packet", "def _make_cpp_data(id, timestamp, instrument, exchange, data):\n return DataCpp(id, timestamp, instrument, exchange, data)", "def create(self):\n\t\t\n\t\tflagbyte = 0\n\t\tif self.synf: flagbyte += 1\n\t\tif self.ackf: flagbyte += 2\n\t\t\n\t\tself.header = struct.pack(\">IBIII\", self.connid, flagbyte, self.seqn, self.ackn, self.recv)\n\t\t\n\t\tself.data = self.header+self.payload", "def __init__(self, sequence_number=None, packet_id=0, data_length=None,\n p1=0, p2=0, p3=0, p4=0, p5=0, data=b''):\n if sequence_number is None:\n sequence_number = StenoPacket.sequence_number\n StenoPacket._increment_sequence_number()\n if data is not None:\n # Data is padded to 8 bytes\n remainder = len(data) % 8\n if remainder:\n data += b'\\x00' * (8 - remainder)\n if data_length is None:\n data_length = len(data)\n self.sequence_number = sequence_number\n self.packet_id = packet_id\n self.data_length = data_length\n self.p1 = p1\n self.p2 = p2\n self.p3 = p3\n self.p4 = p4\n self.p5 = p5\n self.data = data", "def _create_data_header(self, data):\n return pack(self.HEADER_PACK_STR, self.HEADER_VERSION, len(data))", "def __init__(self, bytes = None):\n version = pcs.Field(\"version\", 4, default = 4)\n hlen = pcs.Field(\"hlen\", 4)\n tos = pcs.Field(\"tos\", 8)\n length = pcs.Field(\"length\", 16)\n id = pcs.Field(\"id\", 16)\n flags = pcs.Field(\"flags\", 3)\n offset = pcs.Field(\"offset\", 13)\n ttl = pcs.Field(\"ttl\", 8, default = 64)\n protocol = pcs.Field(\"protocol\", 8)\n checksum = pcs.Field(\"checksum\", 16)\n src = pcs.Field(\"src\", 32)\n dst = pcs.Field(\"dst\", 32)\n pcs.Packet.__init__(self,\n [version, hlen, tos, length, id, flags, offset,\n ttl, protocol, checksum, src, dst],\n bytes = bytes)\n # Description MUST be set after the PCS layer init\n self.description = \"IPv4\"\n\n\n if (bytes != None):\n offset = self.hlen << 2\n self.data = self.next(bytes[offset:len(bytes)])\n else:\n self.data = None", "def create_packet_definition(packet_to_send):\n source_mac = \"00:00:00:00:00:01\"\n destination_mac = \"00:00:00:00:00:02\"\n source_ip = \"10.10.10.1\"\n destination_ip = \"10.10.10.2\"\n source_ip6 = 'fe80::214:f2ff:fe07:af0'\n destination_ip6 = 'ff02::1'\n sport = 1\n dport = 2\n tos = 4\n if packet_to_send[\"type\"] == \"ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"TCP\": {}})\n elif packet_to_send[\"type\"] == \"tagged_ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"vlan\"],\n \"prio\": packet_to_send[\"priority\"]}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}})\n elif packet_to_send[\"type\"] == \"tcp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"TCP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"udp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}},\n {\"UDP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"double_tagged_ip\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"outer_vlan\"], \"type\": 0x8100,\n \"prio\": packet_to_send[\"outer_priority\"]}},\n {\"Dot1Q\": {\"vlan\": packet_to_send[\"inner_vlan\"], \"type\": 0x0800,\n \"prio\": packet_to_send[\"inner_priority\"]}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"tos\": tos}})\n elif packet_to_send[\"type\"] == \"arp\":\n packet_definition = (\n {\"Ether\": {\"src\": source_mac, \"dst\": 'FF:FF:FF:FF:FF:FF', \"type\": 0x0806}},\n {\"ARP\": {\"op\": 1, \"hwsrc\": source_mac,\n \"psrc\": source_ip, \"pdst\": destination_ip}},)\n elif packet_to_send[\"type\"] == \"arp_reply_tagged\":\n packet_definition = ({\"Ether\": {\"src\": source_mac, \"dst\": destination_mac, \"type\": 0x8100}},\n {\"Dot1Q\": {\"vlan\": 2}},\n {\"ARP\": {\"op\": 2, \"hwsrc\": source_mac, \"hwdst\": destination_mac,\n \"pdst\": destination_ip, \"psrc\": source_ip}}, )\n elif packet_to_send[\"type\"] == \"icmp\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x0800}},\n {\"IP\": {\"dst\": destination_ip, \"src\": source_ip, \"proto\": 1}},\n {\"ICMP\": {\"type\": 8, \"code\": 0}})\n elif packet_to_send[\"type\"] == \"ipv6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"plen\": 64, \"tc\": 225}})\n elif packet_to_send[\"type\"] == \"tcp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 6}},\n {\"TCP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"udp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 17}},\n {\"UDP\": {\"sport\": sport, \"dport\": dport}})\n elif packet_to_send[\"type\"] == \"icmp6\":\n packet_definition = ({\"Ether\": {\"dst\": destination_mac, \"src\": source_mac, \"type\": 0x86dd}},\n {\"IPv6\": {\"dst\": destination_ip6, \"src\": source_ip6, \"version\": 6,\n \"hlim\": 255, \"tc\": 224, \"nh\": 1}},\n {\"ICMP\": {\"type\": 8, \"code\": 0}})\n return packet_definition", "def post_build(self, pkt, pay):\n if self.length is None:\n pkt = struct.pack(\"!I\", len(pay) + 2) + pkt[4:]\n return pkt + pay", "def _make_data(self, approximate_length):\n fragments = []\n so_far = 0\n while so_far < approximate_length:\n fragment = ('%d:' % so_far).encode('utf-8')\n so_far += len(fragment)\n fragments.append(fragment)\n return six.b('').join(fragments)", "def makePacketHeader(bytes):\n id = struct.unpack('!H', bytes[0:2])[0]\n length = struct.unpack('!H', bytes[2:4])[0]\n packet_count = struct.unpack('!I',bytes[4:8])[0]\n return PacketHeader(id, length, packet_count)", "def create_empty_pack(type, data):\n camps = ['', '', '', '']\n llargada_camps = (7, 13, 7, 50)\n index_camps = 0\n for llargada in llargada_camps:\n camps[index_camps] = camps[index_camps].zfill(llargada)\n index_camps += 1\n return struct.pack('c7s13s7s50s', chr(type), '', '', '', data)", "def pack(self):\n \n data = \"\".join(map(lambda arg: arg.pack(), self))\n data = struct.pack(\">2H\", self.type, len(data)) + data\n \n return data", "def __init__(self, bytes = None, timestamp = None):\n from socket import IPPROTO_TCP\n src = pcs.Field(\"src\", 32)\n dst = pcs.Field(\"dst\", 32)\n reserved = pcs.Field(\"reserved\", 8, default = 0)\n protocol = pcs.Field(\"protocol\", 8, default = IPPROTO_TCP)\n length = pcs.Field(\"length\", 16)\n pcs.Packet.__init__(self, [src, dst, reserved, protocol, length],\n bytes = bytes)\n # Description MUST be set after the PCS layer init\n self.description = inspect.getdoc(self)\n if timestamp == None:\n self.timestamp = time.time()\n else:\n self.timestamp = timestamp\n\n self.data = None", "def packetize(cls, source, raw_data):\n pkt = cls(source, raw_data)\n\n if pkt.type not in DGTL.descriptors.keys():\n raise Warning('Unsupported packet type! (%s)' % pkt.type)\n\n pkt.set_decoder(DGTL.descriptors[pkt.type][2])\n\n return pkt", "def build_packets(self):\n from scapy.all import IP, TCP\n return IP()/TCP()", "def create_packet(id, seq, data_size):\n\n # Random sequence of characters.\n payload = ''\n for k in range(data_size):\n payload += chr(random.randint(65, 65+25))\n\n # Create ICMP echo packet.\n echo = dpkt.icmp.ICMP.Echo()\n echo.id = id\n echo.seq = seq\n echo.data = payload\n\n icmp = dpkt.icmp.ICMP()\n icmp.type = dpkt.icmp.ICMP_ECHO\n icmp.data = echo\n\n # Return data packet as string representation.\n packet = str(icmp)\n\n # Done.\n return (payload, packet)", "def encode_packet(self, packet):\n\n\t\ttry:\n\t\t\toutput = self.pack('ubyte', packet.ident)\n\t\t\tappend = ''\n\t\t\t#0x17\n\t\t\tif packet.ident == 0x17:\n\t\t\t\tif packet.data['unknown'] > 0:\n\t\t\t\t\tfor i in ('x2','y2','z2'):\n\t\t\t\t\t\tappend += self.pack('short', packet.data[i])\n\t\t\t#0x33\n\t\t\tif packet.ident in (0x33, 0x34):\n\t\t\t\tpacket.data['data_size'] = len(packet.data['data'])\n\t\t\t\tappend += self.pack_array_fast('byte', packet.data['data'])\n\t\t\t\n#\t\t\t#0x34\n#\t\t\tif packet.ident == 0x34:\n#\t\t\t\tcoords = []\n#\t\t\t\tbtypes = []\n#\t\t\t\tmetadata = []\n#\t\t\t\tfor i in packet.data['blocks']:\n#\t\t\t\t\tcoords.append(i['x'] << 12 | i['z'] << 8 | i['y'])\n#\t\t\t\t\tbtypes.append(i['type'])\n#\t\t\t\t\tmetadata.append(i['metadata'])\n#\t\t\t\t\n#\t\t\t\tpacket.data['data_size'] = len(coords)\n#\t\t\t\tappend += self.pack_array_fast('short', coords)\n#\t\t\t\tappend += self.pack_array_fast('byte', btypes)\n#\t\t\t\tappend += self.pack_array_fast('byte', metadata)\n\t\t\t\n\t\t\t#0x3C\n\t\t\tif packet.ident == 0x3C:\n\t\t\t\tarray = []\n\t\t\t\tfor i in packet.data['blocks']:\n\t\t\t\t\tarray += [i['x'], i['y'], i['z']]\n\t\t\t\tpacket.data['data_size'] = len(packet.data['blocks'])\n\t\t\t\tappend += self.pack_array_fast('byte', array)\n\t\t\t\n\t\t\t#0x68\n\t\t\tif packet.ident == 0x68:\n\t\t\t\tpacket.data['data_size'] = len(packet.data['slots_data'])\n\t\t\t\tappend += self.pack_array('slot', packet.data['slots_data'])\n\t\t\t#0x82: Sign\n\t\t\tif packet.ident == 0x82:\n\t\t\t\tfor i in range(4):\n\t\t\t\t\tpacket.data[\"line_%s\" % (i+1)] = packet.data[\"text\"][i]\n\t\t\t#0x83\n\t\t\tif packet.ident == 0x83:\n\t\t\t\tpacket.data['data_size'] = len(packet.data['data'])\n\t\t\t\tappend += self.pack_array_fast('byte', packet.data['data'])\n\t\t\n\t\t\tfor i in self.get_struct(packet):\n\t\t\t\toutput += self.pack(i[0], packet.data[i[1]])\n\t\t\t\n\t\t\toutput += append\n\t\t\treturn output\n\t\texcept Exception:\n\t\t\traise", "def pack(self):\n return self._STRUCT.pack(\n self._SYNC, self.sequence_number, self.packet_id, self.data_length,\n self.p1, self.p2, self.p3, self.p4, self.p5\n ) + (\n pack('%ss' % len(self.data), self.data)\n )", "def build_command_packet(self, command):\n packet = bytearray()\n # All option fields are 0\n packet.append(0)\n packet.append(0)\n packet.append(0)\n packet.append(command)\n return packet", "def makePacket(bytes):\n header = makePacketHeader(bytes[0:8])\n md5 = bytes[8:24]\n data = bytes[24:24 + header.length]\n p = Packet(header, data)\n if p.md5.digest() != md5:\n raise errors.NetworkError(\n 'Wrong MD5-checksum! (expected: %s, got: %s)' % (\n p.md5.hexdigest(),\n binascii.b2a_hex(md5)))\n return p", "def create_message(self, packet):\n self._header.packet_len = len(bytes(packet))\n \n frame_bytes = super(EthernetTransport, self).create_message(packet) \n \n # Update control counter for next frame\n self._header.update_control()\n \n return bytes(frame_bytes)", "def _pack(self) -> bytes:\n return struct.pack(self.PACKAGING_FORMAT, self.type, self.length)", "def makePacket(self,dhash,index,val=None):\n msg = STX + self.addr + CMD\n if val is None:\n msgtype = DATA_READ\n else:\n msgtype = DATA_WRITE\n msg += msgtype\n payload = dhash + chr(index)\n if val is not None:\n payload += struct.pack('>I',val)\n table = {STX : ESC_STX, CR : ESC_CR, ESC : ESC_ESC}\n for i in range(len(payload)):\n if payload[i] in table:\n msg += ESC\n msg += table[payload[i]]\n else:\n msg += payload[i]\n cksum = self.checksum(self.addr+CMD+msgtype+payload)\n msg += cksum\n msg += CR\n return msg", "def gen_ieee_packet(self, data):\n\t\tpacket = Dot15d4FCS() / Dot15d4Data() / Raw(load=data)\n\n\t\tpacket.fcf_srcaddrmode = 2\n\t\tpacket.fcf_destaddrmode = 2\n\n\t\tpacket.fcf_panidcompress = True\n\t\tpacket.fcf_ackreq = True\n\t\tpacket.seqnum = self.seqnum\n\n\t\tpacket.dest_panid = self.link_config.dest_panid\n\n\t\tpacket.dest_addr = self.link_config.destination.get_short_address()\n\t\tpacket.src_addr = self.link_config.source.get_short_address()\n\n\t\treturn packet.build()", "def make_packet(message, host):\n\tRESOURCE = \"/\"\t\t\t\t# dummy resource\n\t\n\t# First line is the request\n\trequest = HTTPConstants.GET_REQUEST + \" \" + RESOURCE + \" \" + HTTPConstants.VERSION + HTTPConstants.CRLF\n\t\n\t# Next are the headers\n\theaders = \"Host: {0}\".format(host) + HTTPConstants.CRLF\n\t\n\t# Construct the head\n\thead = request + headers\n\t\n\t# Construct the body\n\tbody = message + HTTPConstants.CRLF\n\t\n\t# Assembly into a packet, where the head and body (message) are separated by a blank line (CRLF), and the EOM is\n\t# denoted by a blank line\n\treturn head + HTTPConstants.CRLF + body + HTTPConstants.CRLF", "def __init__(self, command=None, data_length=0, data=[]):\n if command is not None:\n self.command = command\n self.data_length = data_length\n self.data = data\n self.encode()\n else:\n self.message_length = 0\n self.command = 0\n self.data_length = 0\n self.data = []\n self.string = \"\"", "def read_packet(self):\n\n\t\t#self.debug(\"READ BUFFER SIZE: %d\" % len(self.buff))\n\t\tbackup = self.buff[:]\n\t\tpacket = Packet()\n\t\ttry:\n\t\t\tpacket.direction = self.node\n\t\t\tpacket.ident = self.unpack('ubyte')\n\t\t\t\n\t\t\t#Defined structs from huge dict\n\t\t\tfor datatype, name in self.get_struct(packet):\n\t\t\t\t# this populates packet.data with {name: value}\n\t\t\t\tpacket.data[name] = self.unpack(datatype)\n\n\t\t\t# I believe the following are packet-type specific fixes for variable-length packets.\n\n\t\t\t#0x17\n\t\t\tif packet.ident == 0x17:\n\t\t\t\tif packet.data['unknown'] > 0:\n\t\t\t\t\tpacket.data['x2'] = self.unpack('short')\n\t\t\t\t\tpacket.data['y2'] = self.unpack('short')\n\t\t\t\t\tpacket.data['z2'] = self.unpack('short')\n\t\t\n\t\t\t#0x33\n\t\t\tif packet.ident in (0x33, 0x34):\n\t\t\t\tpacket.data['data'] = self.unpack_array_fast('byte', packet.data['data_size'])\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n#\t\t\t#0x34\n#\t\t\tif packet.ident == 0x34:\n#\t\t\t\tcoords = self.unpack_array_fast('short', packet.data['data_size'])\n#\t\t\t\tbtype = self.unpack_array_fast('byte', packet.data['data_size'])\n#\t\t\t\tmetadata = self.unpack_array_fast('byte', packet.data['data_size'])\n#\t\t\t\tpacket.data[\"blocks\"] = []\n#\t\t\t\tfor i in zip(coords, btype, metadata):\n#\t\t\t\t\tblock = {}\n#\t\t\t\t\tblock[\"x\"] =\t\ti[0] >> 12\n#\t\t\t\t\tblock[\"z\"] = 0x0F & i[0] >> 8\n#\t\t\t\t\tblock[\"y\"] = 0xFF & i[0]\n#\t\t\t\t\tblock[\"type\"] = i[1]\n#\t\t\t\t\tblock[\"metadata\"] = i[2]\n#\t\t\t\t\tpacket.data[\"blocks\"].append(block)\n#\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n\t\t\t#0x3C\n\t\t\tif packet.ident == 0x3C:\n\t\t\t\trecords = self.unpack_array_fast('byte', packet.data['data_size']*3)\n\t\t\t\ti = 0\n\t\t\t\tpacket.data[\"blocks\"] = []\n\t\t\t\twhile i < packet.data['data_size']*3:\n\t\t\t\t\tpacket.data[\"blocks\"].append(dict(zip(('x','y','z'), records[i:i+3])))\n\t\t\t\t\ti+=3\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\n\t\t\t#0x68\n\t\t\tif packet.ident == 0x68:\n\t\t\t\tpacket.data[\"slots_data\"] = self.unpack_array('slot', packet.data[\"data_size\"])\n\t\t\t\tdel packet.data[\"data_size\"]\n\t\t\t#0x82:\n\t\t\tif packet.ident == 0x82:\n\t\t\t\tpacket.data[\"text\"] = []\n\t\t\t\tfor i in range(4):\n\t\t\t\t\tpacket.data[\"text\"].append(packet.data[\"line_%s\" % (i+1)])\n\t\t\t\t\t\n\t\t\t#0x83\n\t\t\tif packet.ident == 0x83:\n\t\t\t\tpacket.data[\"data\"] = self.unpack_array_fast('byte', packet.data['data_size'])\n\t\t\t\tdel packet.data[\"data_size\"]\n\n\t\t\t# Sets packet.original to the byte string that the packet was decoded from.\n\t\t\tpacket.original = backup[:len(backup) - len(self.buff)]\n\n\t\t\treturn packet\n\n\t\texcept IncompleteData:\n\t\t\tself.buff = backup\n\t\t\treturn None\n\t\texcept Exception, ex:\n\t\t\tself.buff = backup\n\t\t\tex.args += (self.buff[20:],)\n\t\t\traise", "def __init__(self, data):\n self.bytes = bytearray(data)", "def pack(self):\n\n #define UID_SIZE 6\n\t#typedef struct {\n # uint8_t userId[UID_SIZE];\n # uint16_t sessionCtr;\t\t# NOTE: this is use_ctr\n # uint24_t timestamp;\n # uint8_t sessionUse;\t\t# NOTE: this is session_ctr\n # uint16_t rnd;\n # uint16_t crc;\n\t#} TICKET;\n fmt = \"< %is H HB B H\" % (pyhsm.defines.UID_SIZE)\n\n ts_high = (self.timestamp & 0x00ff0000) >> 16\n ts_low = self.timestamp & 0x0000ffff\n\n res = struct.pack(fmt, self.user_id, \\\n self.use_ctr, \\\n ts_low, ts_high, \\\n self.session_ctr, \\\n self.rnd)\n crc = 0xffff - crc16(res)\n\n return res + struct.pack('<H', crc)", "def craft_payload(self):\n\n junk_size = randint(1, 16)\n\n junk_data = bytearray(\n [\n choice([i for i in range(0, 256)])\n for i in range(0, junk_size)\n ])\n\n challenge_request = bytes(b'\\x00' * 0x100)\n\n payload = \\\n struct.pack('B', junk_size) + \\\n junk_data + \\\n struct.pack('B', (junk_size*2 & 0xff)) + \\\n challenge_request\n\n return payload", "def encode(self) -> bytes:\n\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n\n t = struct.pack(\"H\", socket.htons(self.typ))\n return self.dmac + self.smac + t + self.payload", "def pack(self):\n values = (\n self.start,\n pressure_to_pa(self.peep),\n self.freq,\n self.ratio,\n pressure_to_pa(self.pressure + self.peep),\n self.oxygen)\n\n print(values)\n\n s = struct.Struct('H'*len(values))\n packed_data = s.pack(*values)\n return packed_data", "def __construct_data(self, cmd_mode, cmd_value):\n if cmd_mode not in command_mode.values():\n raise TypeError(\"%s: specified cmd_mode is not valid.\", self.sensor_name)\n if not isinstance(cmd_value, int):\n raise TypeError(\"%s: cmd_value must be of type %s.\", self.sensor_name, type(int))\n\n ret_val = bytearray()\n ret_val.append(cmd_mode)\n ret_val.append(cmd_value)\n return ret_val", "def pack(data):\r\n data['length'] = [len(text) for text in data['text']] \r\n data['longest_word_len'] = [longest_word_len(text) for text in data['text']] \r\n data['mean_word_len'] = [mean_word_len(text) for text in data['text']] \r\n data['subject_len'] = [len(subject) for subject in data['subject']]\r\n data['stop_words_num'] = [stop_words_count(text) for text in data['text']]\r\n data['pounctuation_num'] = [punctuation_count(text) for text in data['text']]\r\n data['text'] = [clean_text(text) for text in data['text']]", "def stateless_pack(packet, to_server):\n\tdecoder = PacketDecoder(to_server)\n\treturn decoder.encode_packet(packet)", "def createData(self, address: ghidra.program.model.address.Address, datatype: ghidra.program.model.data.DataType) -> ghidra.program.model.listing.Data:\n ...", "def _pack(self):\n to_pack = {\n \"remote_ip\": self.remote_ip,\n \"remote_port\": self.remote_port,\n \"min_fidelity\": self.min_fidelity,\n \"max_time\": self.max_time,\n \"num_pairs\": self.num_pairs,\n \"priority\": self.priority,\n \"store\": self.store,\n \"atomic\": self.atomic,\n \"measure_directly\": self.measure_directly,\n }\n request_Bitstring = bitstring.pack(self.PACKAGING_FORMAT, **to_pack)\n requestH = request_Bitstring.tobytes()\n\n return requestH", "def build(self, data: dict):", "def packet_from_bytes(data: bytes) -> Packet:\n packet = PacketBase.from_buffer_copy(data)\n struct = packet.struct_type()\n if len(data) != struct.SIZE:\n raise racetools.errors.PacketSizeMismatch(struct.SIZE, len(data))\n return struct.from_buffer_copy(data)", "def __init__(self, bytes = None):\n hrd = pcs.Field(\"hrd\", 16, default = 1)\n pro = pcs.Field(\"pro\", 16, default = 0x800)\n hln = pcs.Field(\"hln\", 8, default = 6)\n pln = pcs.Field(\"pln\", 8, default = 4)\n op = pcs.Field(\"op\", 16)\n sha = pcs.StringField(\"sha\", 48)\n spa = pcs.Field(\"spa\", 32)\n tha = pcs.StringField(\"tha\", 48)\n tpa = pcs.Field(\"tpa\", 32)\n \n pcs.Packet.__init__(self, [hrd, pro, hln, pln, op,\n sha, spa, tha, tpa], bytes = bytes)\n self.description = \"ARP\"\n self.data = None", "def _fill_cdata(cls):\n\n funcs = {}\n for key, name in [(\"b\", \"char\"), (\"h\", \"short\"),\n (\"i\", \"int\"), (\"q\", \"longlong\")]:\n for echar, esuffix in [(\"<\", \"le\"), (\">\", \"be\")]:\n esuffix = \"_\" + esuffix\n for unsigned in [True, False]:\n s = struct.Struct(echar + (key.upper() if unsigned else key))\n get_wrapper = lambda f: lambda *a, **k: f(*a, **k)[0]\n unpack = get_wrapper(s.unpack)\n unpack_from = get_wrapper(s.unpack_from)\n\n def get_unpack_from(s):\n def unpack_from(data, offset=0):\n return s.unpack_from(data, offset)[0], offset + s.size\n return unpack_from\n\n unpack_from = get_unpack_from(s)\n pack = s.pack\n\n prefix = \"u\" if unsigned else \"\"\n if s.size == 1:\n esuffix = \"\"\n bits = str(s.size * 8)\n\n if unsigned:\n max_ = 2 ** (s.size * 8) - 1\n min_ = 0\n else:\n max_ = 2 ** (s.size * 8 - 1) - 1\n min_ = - 2 ** (s.size * 8 - 1)\n\n funcs[\"%s%s_min\" % (prefix, name)] = min_\n funcs[\"%s%s_max\" % (prefix, name)] = max_\n funcs[\"%sint%s_min\" % (prefix, bits)] = min_\n funcs[\"%sint%s_max\" % (prefix, bits)] = max_\n\n funcs[\"%s%s%s\" % (prefix, name, esuffix)] = unpack\n funcs[\"%sint%s%s\" % (prefix, bits, esuffix)] = unpack\n funcs[\"%s%s%s_from\" % (prefix, name, esuffix)] = unpack_from\n funcs[\"%sint%s%s_from\" % (prefix, bits, esuffix)] = unpack_from\n funcs[\"to_%s%s%s\" % (prefix, name, esuffix)] = pack\n funcs[\"to_%sint%s%s\" % (prefix, bits, esuffix)] = pack\n\n for key, func in iteritems(funcs):\n setattr(cls, key, staticmethod(func))", "def _Encode(msgtype, transaction_id, *attrs):\n transaction_id = str(transaction_id)\n if len(transaction_id) != 12:\n raise ValueError('transactionid %r must be exactly 12 bytes'\n % transaction_id)\n print(attrs)\n for attrtype, attrval in attrs:\n print(attrtype,attrval)\n\n attrtext = ''.join(_EncodeAttr(attrtype, attrval) for attrtype, attrval in attrs)\n pkt = (struct.pack('!HHI', msgtype, len(attrtext), MAGIC_COOKIE) +\n transaction_id +\n attrtext)\n return pkt", "def pack(self):\n\n datalen = 0\n if self._data:\n datalen = len(self._data)\n\n header = struct.pack(CCPMessage.HEADER_FMT,\n self._version, self._msg_type, datalen, self._conn_id )\n\n if datalen > 0:\n msg = header + self._data\n else:\n msg = header\n\n return msg", "def prepare_packet(msg_parts, nonce=None, add_time=True):\n if not isinstance(msg_parts, list):\n msg_parts = [msg_parts, \"\", \"\"]\n else:\n while len(msg_parts) < 3:\n msg_parts.append(\"\")\n for ind, mp in enumerate(msg_parts):\n if not isinstance(mp, str):\n msg_parts[ind] = str(mp)\n has_ts = c.TRUE_STR if add_time else c.FALSE_STR\n has_nonce = c.TRUE_STR if nonce is not None else c.FALSE_STR\n eofp = str(len(msg_parts[0])).zfill(5)\n eosp = str(len(msg_parts[0] + msg_parts[1])).zfill(5)\n header = has_ts + has_nonce + eofp + eosp\n res_msg = header + \"\".join(msg_parts) + (nonce if nonce is not None else \"\")\n res_msg += PacketOrganiser.get_new_timestamp() if add_time else \"\"\n return res_msg", "def __init__(self, data: bytes):\n super().__init__()\n self._expected_packet_type = MessageType.MAIN\n self._expected_data_size = 34\n self._data_raw = b''\n self._packet_type = MessageType.UNDEFINED\n self._packet_number = 0\n self.time_stamp_1MHz = 0\n self.accelerometer_x = 0\n self.accelerometer_y = 0\n self.accelerometer_z = 0\n self.magnetometer_x = 0\n self.magnetometer_y = 0\n self.magnetometer_z = 0\n self.gyroscope_x = 0\n self.gyroscope_y = 0\n self.gyroscope_z = 0\n self.quaternion_q0 = 0\n self.quaternion_q1 = 0\n self.quaternion_q2 = 0\n self.quaternion_q3 = 0\n self.flags = 0\n self.shield_and_kinetis_byte = 0\n self._is_valid = False\n self._parse_data(data)", "def packetize(self):\n byte_str = b''\n\n # Bit string to accumulate bit values until we are ready to convert it into bytes\n bit_str = \"\"\n\n for field in self.fields:\n #if the current field is a special type, the bit_str value to the byte string and clear the accumulated bit_str.\n if not isinstance(field.size, int) and len(bit_str) != 0:\n byte_str += self.encode_bit_str(bit_str)\n bit_str = \"\"\n if field.size == NULL_TERMINATE:\n byte_str += self.encode_null_term(field.value)\n elif field.size == PREFIX_LENGTH:\n byte_str += self.encode_prefix_length(field.value)\n elif field.size == PREFIX_LEN_NULL_TERM:\n byte_str += self.encode_prefix_length_null_term(field.value)\n elif field.size == IPv4:\n byte_str += self.encode_ipv4(field.value)\n elif field.size == 1: # One bit, just add it to our bit string.\n bit_str += \"0\" if field.value == 0 else \"1\"\n else:\n if isinstance(field.value, int):\n bit_str += \"0\" * (field.size - len(bin(field.value)[2:])) + bin(field.value)[2:]\n elif isinstance(field.value, bytes):\n bit_str += field.value.decode('latin-1')\n #clear the bit string one last time\n if len(bit_str) != 0:\n byte_str += self.encode_bit_str(bit_str)\n bit_str = \"\"\n\n return byte_str", "def toData(self):\n\n lines = []\n # 1. Request and protocol version\n lines.append(self.request + \" \" + BANNER)\n # 2. Request arguments\n lines.extend(['%s: %s' % (arg, self.args[arg]) for arg in self.args])\n # 3. End of message (double CR-LF)\n data = \"\\r\\n\".join(lines) + \"\\r\\n\\r\\n\"\n # In debug mode, parse our own message to check it is well-formed\n assert checkMessage(data), \"Bad generated message: \" + data\n return data", "def __init__(self, msg_id=0, xtd=0, rtr=0, ndata=0, data=() ):\r\n self.msg_id = msg_id\r\n self.rtr = rtr\r\n self.xtd = xtd\r\n self.ndata = ndata\r\n self.data = data # tuple with length 0..8\r\n self.timestamp = time.time() # Timestamp of object creation\r", "def payload_creation(self, id, data):\n\n payload = {\n 'UUID': self.uuid,\n 'ID': id,\n 'RATE': self.rate,\n 'GPIO': data[2],\n 'DDL': self.ddl,\n 'VALUE': data[1],\n 'TIME': data[0]\n }\n return payload", "def fill_data(self, data):\n self._data = data\n\n self._data_length = data[1:3]\n self._frame_id = data[4]\n self._address = XbeeAddress(data[5:9], data[9:13], data[13:15])\n self._at_command = data[15:17]\n self._command_status = data[17]\n try:\n self._command_data = data[18:21]\n self._checksum = data[22]\n except IndexError:\n self._command_data = None\n self._checksum = data[18]", "def createByte(self, address: ghidra.program.model.address.Address) -> ghidra.program.model.listing.Data:\n ...", "def __init__(self, data_size):\n try:\n self.data_size = int(data_size)\n except ValueError as exc:\n raise ValueError(\"Exepected arg 'size' to be int: \" + str(exc))\n self.packet = bytearray()\n self.in_data = False\n self.header_pos = 0\n self.transport = None", "def __init__(self, data):\n # check if dataset contains time information\n # (fetched from bootloader storage)\n if len(data) == 61:\n (_, seconds, minutes, hours, days, months, years) = struct.unpack(\n '<55sBBBBBB', data)\n self.date = datetime(2000 + years, months, days, hours, minutes,\n seconds)\n\n # Only parse preceding data\n data = data[:55]\n power = [0, 0]\n kWh = [0, 0]\n MWh = [0, 0]\n (_, digital, speed, active, power[0], kWh[0], MWh[0], power[1], kWh[1],\n MWh[1]) = struct.unpack('<32sH4sBLHHLHH', data)\n\n analog = struct.unpack(\n '<{}{}'.format('H' * 16, 'x' * (len(data) - 32)), data)\n\n self.analog = {}\n for channel in range(0, 16):\n self.analog[channel + 1] = round(\n self._convert_analog(analog[channel]), 3)\n\n self.digital = {}\n for channel in range(0, 16):\n self.digital[channel + 1] = self._convert_digital(digital, channel)\n\n '''\n self.speed = {}\n for channel in range(0, 4):\n self.speed[channel + 1] = round(\n self._convert_speed(speed[channel]), 3)\n \n\n self.energy = {}\n for channel in range(0, 2):\n self.energy[channel + 1] = round(\n self._convert_energy(MWh[channel], kWh[channel], active,\n channel), 3)\n \n\n self.power = {}\n for channel in range(0, 2):\n self.power[channel + 1] = round(\n self._convert_power(power[channel], active, channel), 3)\n '''", "def _pack(self):\n cqcH = struct.pack(self.PACKAGING_FORMAT, self.version, self.tp, self.app_id, self.length)\n return cqcH", "def marshall(data):\n\tpacket_size = 1000 # can modify this\n\tpackets = [data[i:i+packet_size] for i in range(0, len(data), packet_size)]\n\tpackets_marshalled = []\n\tfor packet in packets:\n\t\ts = \" \".join(str(data) for data in packet)\n\t\tpackets_marshalled.append(s)\n\n\treturn packets_marshalled", "def data_payload(data, metadata, data_type, data_count):\n\n # Make the data payload.\n if isinstance(data, bytes):\n # Assume bytes are big-endian; we have no way of checking.\n data_payload = data\n elif (isinstance(data, backend.array_types) or\n isinstance(data, Iterable)):\n data_payload = backend.python_to_epics(\n native_type(data_type), data, byteswap=True)\n elif data is None:\n data_payload = b''\n else:\n raise CaprotoTypeError(\"data given as type we cannot handle - {}\"\n \"\".format(type(data)))\n\n md_payload = parse_metadata(metadata, data_type)\n size, pad_payload = pad_buffers(md_payload, data_payload)\n if pad_payload:\n return size, md_payload, data_payload, pad_payload\n else:\n return size, md_payload, data_payload", "def __init__(self, data):\n\t\tself.protocol_version, self.le_state, self.playback_state, \\\n\t\t self.source, self.le_flags, self.playback_flags, \\\n\t\t self.source_flags, self.fullness, self.point_rate, \\\n\t\t self.point_count = \\\n\t\t\tstruct.unpack(\"<BBBBHHHHII\", data)", "def setupPacket(self):\n return None", "def _pack(self):\n header = struct.pack(self.PACKAGING_FORMAT, self.cmd_length)\n return header", "def buildPackets(self):\n return self.input", "def createData(program: ghidra.program.model.listing.Program, addr: ghidra.program.model.address.Address, newDataType: ghidra.program.model.data.DataType, length: int, stackPointers: bool, clearMode: ghidra.program.model.data.DataUtilities.ClearDataMode) -> ghidra.program.model.listing.Data:\n ...", "def create_dt_response_packet(self, data, port):\n now = datetime.datetime.now()\n textual_representation = \"\"\n\n # Magic number (2 bytes)\n byte_1 = 0x49\n byte_2 = 0x7E\n\n # Packet type (2 bytes)\n byte_3 = 0x00\n byte_4 = 0x02\n\n # Language Code (2 bytes)\n byte_5 = 0x00\n byte_6 = 0x01 # Default to English.\n\n # English\n if port == self.ports['English']:\n\n # Date request\n if ((data[4] << 8) | data[5]) == 0x0001:\n textual_representation = \"Today’s date is {} {:0>2}, {:0>4}\".format(\n now.strftime(\"%B\"), now.day, now.year)\n\n # Time request\n elif ((data[4] << 8) | data[5]) == 0x0002:\n textual_representation = \"The current time is {:0>2}:{:0>2}\".format(\n now.hour, now.minute)\n\n # Te reo Maori\n elif port == self.ports['Te reo Maori']:\n byte_6 = 0x02\n\n # Date request\n if ((data[4] << 8) | data[5]) == 0x0001:\n textual_representation = \"Ko te ra o tenei ra ko {} {:0>2}, {:0>4}\".format(\n cfg.MONTHS_MAORI[now.month-1], now.day, now.year)\n\n # Time request\n elif ((data[4] << 8) | data[5]) == 0x0002:\n textual_representation = \"Ko te wa o tenei wa {:0>2}:{:0>2}\".format(\n now.hour, now.minute)\n\n # German\n elif port == self.ports['German']:\n byte_6 = 0x03\n\n # Date request\n if ((data[4] << 8) | data[5]) == 0x0001:\n textual_representation = \"Heute ist der {:0>2}. {} {:0>4}\".format(\n now.day, cfg.MONTHS_GERMAN[now.month-1], now.year)\n\n # Time request\n elif ((data[4] << 8) | data[5]) == 0x0002:\n textual_representation = \"Die Uhrzeit ist {:0>2}:{:0>2}\".format(\n now.hour, now.minute)\n\n # Year (2 bytes)\n byte_7 = (now.year >> 8) & 0xFF\n byte_8 = now.year & 0xFF\n\n # Month (1 byte)\n byte_9 = now.month & 0xFF\n\n # Day (1 byte)\n byte_10 = now.day & 0xFF\n\n # Hour (1 byte)\n byte_11 = now.hour & 0xFF\n\n # Minute (1 byte)\n byte_12 = now.minute & 0xFF\n\n # Length (1 byte)\n text_in_bytes = textual_representation.encode()\n byte_13 = len(text_in_bytes) & 0xFF\n\n if len(text_in_bytes) > 0xFF:\n print(responses.ERROR_TEXT_PAYLOAD_OVERFLOW)\n return None\n\n dt_res_packet = bytearray([byte_1, byte_2, byte_3, byte_4, byte_5,\n byte_6, byte_7, byte_8, byte_9, byte_10,\n byte_11, byte_12, byte_13])\n\n # Text\n for byte in text_in_bytes:\n dt_res_packet.append(byte)\n\n return dt_res_packet", "def build_data(self):\n\n _header_ = self._header_ + 'build_data(): '\n\n if self.verbose:\n print(_header_ + 'Building data for %s ...' % self.p_data)\n\n self.read_data()\n self.map_data()\n self.partition_data()\n self.composition()\n\n if self.verbose:\n print(_header_ + 'Build complete.')\n\n return self", "def _create_data(self):\n return cmdgen.CommunityData(\n self.community(), mpModel=self.snmp_version().value)", "def build_header_1(self, header_len=b'\\x00\\x00\\x00\\x00', data_len=b'\\x00\\x00\\x00\\x00'):\n self.header_1 = b''\n header_1_dict = {'preamble': b'\\x50\\x4f',\n 'packet_type': b'\\x01\\x00\\x00\\x50',\n 'header_len': header_len + b'\\x02\\x00\\x00\\x00\\x00\\x00\\x00\\x00',\n 'data_len': data_len,\n 'agent_guid': b'{%s}' % self.agent_guid,\n 'agent_guid_padding': b'\\x00' * 90 + b'\\x01\\x00\\x00\\x00',\n 'agent_hostname': b'%s' % self.agent_hostname,\n 'hostname_padding': b'\\x00' * (32 - len(self.agent_hostname)) + b'\\x00' * 48}\n\n for item in header_1_dict:\n self.header_1 += header_1_dict[item]\n return self.header_1", "def generateData(self, contract=None):\n return Data(self.phenomenon, self.size, contract)", "def pack(self, msg):\n value_len_max = HEADER_LENTGH - len(HEADER_PREFIX)\n header_value = str(len(msg) + HEADER_LENTGH)\n value_len = len(header_value)\n if value_len > value_len_max:\n return None, \"Socket Data Packing Error: Data is too big!\"\n header_value = '0' * (value_len_max - value_len) + header_value\n\n return (HEADER_PREFIX + header_value + msg).encode('utf-8'), None", "def pack(self):\n data = {\n 'name': self._name,\n 'piece': self._piece,\n 'pos': self._pos,\n 'cash': self._cash,\n 'properties': []\n }\n\n for i in self._properties:\n data['properties'].append({'name': i.name, 'value': i.value})\n\n return data", "def pack(self, data):\n #Need exception handling for packing incorrectly sized data\n tile_fmt = Struct(32 * 'c')\n tile_iter = tile_fmt.iter_unpack(data)\n\n vals = [Tile._tile_to_bitplanes(b''.join(tile)) for tile in tile_iter]\n self.data = b''.join(vals)\n\n if self.dimensions == 16:\n self.data = Tile._deinterleave_subtiles(self.data)", "def __init__(self):\n \n self.packetType = DATA\n self.types = [BYTE, # Packet type\n FLOAT, # Battery voltage\n FLOAT, FLOAT, FLOAT, FLOAT, # Temperature readings\n FLOAT, FLOAT, # Pressure and humidity readings\n BYTE, BYTE, BYTE, # GPS Year, month, date (sensor computer)\n BYTE, BYTE, BYTE, # GPS Hour, minute, second (sensor computer)\n LONG, LONG, LONG, # GPS latitude, longitude, altitude (sensor computer)\n ULONG, UINT, BYTE, # GPS speed, heading, num satellites (sensor computer)\n FLOAT, FLOAT, FLOAT, # IMU data (accelerometer)\n FLOAT, FLOAT, FLOAT, # IMU data (gyroscope)\n FLOAT, FLOAT, FLOAT, # IMU data (magnetometer)\n FLOAT, FLOAT, FLOAT, # Attitude data\n ULONG, # Time since reset\n BOOL, UINT, # Data logging\n ULONG, # Time since last data arrival\n ULONG, # Relay states\n BYTE, BYTE, BYTE, # GPS Year, month, date (comm computer)\n BYTE, BYTE, BYTE, # GPS Hour, minute, second (comm computer)\n LONG, LONG, LONG # GPS latitude, longitude, altitude (comm computer)\n ] \n\n self.values = [0]*len(self.types)\n self.values[0] = DATA", "def get_data(self):\n data = {\n \"ts\": self.drone.pos[0][0],\n \"drone\": self.drone,\n \"subject\": self.subject,\n \"peds\": self.peds, # can be None\n \"objs\": self.objs # can be None\n }\n self.empty_bag()\n return data", "def pack( self, key, timestamp = None, label = None, description = None, table = None, pad = False ) : \r\n\r\n duration = 1\r\n if timestamp is None :\r\n timestamp = ms_localtime() \r\n\r\n #\r\n # bugfix : as it seems that NetStation\r\n #\r\n # (a) does not clean the internal buffer for the \"event\" data\r\n # and\r\n # (b) ignores the \"total packet length\" from the \"event\" message header\r\n # when reading the \"label\" / \"description\" / \"key/data\" information , \r\n #\r\n # we have to append a fake \"tail\" to our message if the case if it is incomplete --\r\n # -- otherwise either garbage or the information from the previous datagram\r\n # would be erroneously recognized as belonging to ours . \r\n #\r\n # nb. this also means that the 'label' / 'description' / 'key/data' entries\r\n # cannot be optional . \r\n # \r\n\r\n if not is_32_bit_int_compatible( timestamp ) :\r\n \r\n raise Eggog( \"only 'small' 32-bit integer values less than %d are accepted as timestamps, not %s\" % ( 0xffffFFFF, timestamp ) ) \r\n \r\n if label is None : label = ''\r\n if description is None : description = ''\r\n\r\n label_str = pstring( label ) \r\n description_str = pstring( description ) \r\n\r\n if table is None or len( table.keys() ) <= 0 :\r\n # explicitly state that the number of keys is zero ( see above comment ) \r\n table_str = struct.pack( 'B', 0 ) \r\n else : \r\n table_str = self._pack_dict(table, pad)\r\n\r\n size = len( label_str ) + len( description_str ) + len( table_str ) \r\n \r\n ## # debug \r\n ## print \"+size: \", size \r\n\r\n header_str = self._make_event_header( size, timestamp, duration, key ) \r\n\r\n ## # debug \r\n ## print \"'%s', '%s', '%s', '%s'\" % ( header_str, label_str, description_str, table_str ) \r\n\r\n result_str = _cat( header_str, label_str, description_str, table_str ) \r\n \r\n return result_str", "def create_tcp_pkt(smac: bytes, dmac: bytes, sip: bytes, dip: bytes, ip_id: int, sp: int, dp: int,\n flags: int =dpkt.tcp.TH_SYN, payload: bytes = b\"\") -> dpkt.ethernet.Ethernet:\n tcp_pkt = dpkt.tcp.TCP(sport=sp, dport=dp, flags=flags)\n tcp_pkt.data = payload\n ip_pkt = dpkt.ip.IP(id=ip_id, p=6, src=sip, dst=dip)\n ip_pkt.data = tcp_pkt\n ip_pkt.len += len(ip_pkt.data)\n eth_pkt = dpkt.ethernet.Ethernet(src=smac, dst=dmac)\n eth_pkt.data = ip_pkt\n return eth_pkt", "def pack(self,buffer):\n buffer.append(self.data)", "def _build_packet_out(self, datapath, buffer_id, src_port, dst_port, data):\r\n actions = []\r\n if dst_port:\r\n actions.append(datapath.ofproto_parser.OFPActionOutput(dst_port))\r\n\r\n msg_data = None\r\n if buffer_id == datapath.ofproto.OFP_NO_BUFFER:\r\n if data is None:\r\n return None\r\n msg_data = data\r\n\r\n out = datapath.ofproto_parser.OFPPacketOut(\r\n datapath=datapath, buffer_id=buffer_id,\r\n data=msg_data, in_port=src_port, actions=actions)\r\n return out", "def _make_payload(self, width, height, depth, text):\n message = text_to_bits(text) + [0] * 32\n\n payload = message\n while len(payload) < width * height * depth:\n payload += message\n\n payload = payload[:width * height * depth]\n\n return torch.FloatTensor(payload).view(1, depth, height, width)", "def createPdu(data):\n\n memoryStream = BytesIO(data)\n inputStream = DataInputStream(memoryStream)\n\n return getPdu(inputStream)", "def create_msg(data):\n length = str(len(str(data))).zfill(LENGTH_FIELD_SIZE)\n return length + data", "def serialize(self):\n\n # The len must be multiple of 4 bits to convert unambiguously\n\n id_len = self.id.bit_length()\n while (id_len % 4)!= 0:\n id_len += 1\n if self.payload:\n pay_len = self.payload.bit_length()\n while (pay_len % 4)!= 0:\n pay_len += 1\n else: pay_len = 0\n if self.command:\n com_len = self.command.bit_length()\n while (com_len % 4)!= 0:\n com_len += 1\n else: com_len = 0\n\n values = {\n \"id\": self.id,\n \"id_len\": id_len,\n \"payload\": self.payload,\n \"payload_len\": pay_len,\n \"command\": self.command,\n \"command_len\": com_len\n }\n\n\n if self.id == Message.MEASURE or self.id == Message.SINGLE_MEASURE:\n serial_format = (\n \"uint:id_len=id, bits:payload_len=payload, bits:command_len = command, 0x0D0A\"\n )\n else:\n serial_format = (\n \"0x23, uint:id_len=id, bits:payload_len=payload, bits:command_len = command, 0x0D0A\"\n )\n\n message = bitstring.pack(serial_format, **values)\n\n rospy.logdebug(\"Sent command '0x%s'\", message.hex)\n\n return message.tobytes()", "def to_data(self) -> dict:\n return {'pingData': {'challenge': self.ping_challenge}}", "def _pack(self):\n xtraH = struct.pack(\n self.PACKAGING_FORMAT,\n self.qubit_id,\n self.remote_app_id,\n self.remote_node,\n self.cmdLength,\n self.remote_port,\n self.step,\n 0,\n )\n return xtraH", "def _pack(self):\n xtraH = struct.pack(\n self.PACKAGING_FORMAT,\n self.qubit_id,\n self.remote_app_id,\n self.remote_node,\n self.datetime,\n self.remote_port,\n self.outcome,\n 0,\n )\n return xtraH", "def encode(self):\n packet = (\n\n str(self.pos_number) + # 2 octets 0:3\n\n ('%.0f' % (self.amount * 100)).zfill(8) + # 8 octets 3:11\n\n self.answer_flag + # 1 octet 11:12\n\n self.payment_mode + # 1 octet 12:13\n\n self.transaction_type + # 1 octet 13:14\n\n self.currency_numeric + # 3 octet 14:17\n\n self.private + # 10 octet 17:27\n\n self.delay + # 4 octet 27:31\n\n self.authorization) # 4 octet 31:35\n\n packet_len = len(packet)\n\n if packet_len != TERMINAL_ASK_REQUIRED_SIZE:\n raise SequenceDoesNotMatchLengthException('Cannot create ask payment sequence with len != {0} octets. '\n 'Currently have {1} octet(s).'.format\n (TERMINAL_ASK_REQUIRED_SIZE, packet_len))\n\n return TeliumData.framing(packet)", "def _pack(self) -> bytes:\n\n return struct.pack(\n self.PACKAGING_FORMAT, \n self.first_operand, \n self.operator, \n self.type_of_second_operand, \n self.second_operand, \n self.length\n )", "def __init__(self, ip, x_len, u_len):\n self._x_fmt = '>' + x_len * 'd'\n self._u_fmt = '>' + u_len * 'd'\n self._buf_size = x_len * 8 # 8 bytes for each double\n self._port = 9095 # fixed in Simulink model\n self._ip = ip\n self._soc = None", "def create_msg(data):\n length = str(len(str(data))).zfill(LENGTH_FIELD_SIZE)\n return length + str(data)", "def __init__(self, data, parent=None):\n self.parent = parent\n self.bootable_flag = struct.unpack(\"<B\", data[0])[0]\n self.start_chs_address = struct.unpack(\"<BH\", data[1:4])[0]\n self.partition_type = struct.unpack(\"<B\", data[4])[0]\n self.end_chs_address = struct.unpack(\"<BH\", data[5:8])[0]\n # FIXME Check to see how the lba address bytes are used\n if self.get_type() == 'Empty':\n self.lba = 0\n else:\n self.lba = struct.unpack(\"<L\", data[8:12])[0]\n\n self.size = struct.unpack(\"<L\", data[12:16])[0]", "def generate_bytestream(self):\n # Start empty\n stream = bytearray()\n # Source ID\n stream.extend([self.data_source])\n # Dest ID\n stream.extend([self.data_dest])\n # Param block\n stream.extend(bytearray([len(self.parameters)]))\n for parameter in self.parameters:\n stream.extend(parameter)\n # Content pointer (not used)\n stream.extend([0xFF]) # Content inline\n # Content itself\n stream.extend(self.content)\n return stream", "def test_1():\n raw = struct.pack('<BBIIIIIIBIIIIBII',\n 3, 3, 0, 100, 1, 101, 2, 102, 2, 3, 103, 4, 104, 1, 5, 105)\n packet = Packet1.from_raw(raw)\n\n # Parent packet has 3 sub packets\n assert packet['size1'] == 3\n assert len(packet['data1']) == 3\n\n # Sub packet 0, has 3 sub-sub packets\n assert packet['data1'][0]['size2'] == 3\n assert len(packet['data1'][0]['data2']) == 3\n\n assert packet['data1'][0]['data2'][0]['entry1'] == 0\n assert packet['data1'][0]['data2'][0]['entry2'] == 100\n assert packet['data1'][0]['data2'][1]['entry1'] == 1\n assert packet['data1'][0]['data2'][1]['entry2'] == 101\n assert packet['data1'][0]['data2'][2]['entry1'] == 2\n assert packet['data1'][0]['data2'][2]['entry2'] == 102\n\n # Sub packet 1 has 2 sub-sub packets\n assert packet['data1'][1]['size2'] == 2\n assert len(packet['data1'][1]['data2']) == 2\n\n assert packet['data1'][1]['data2'][0]['entry1'] == 3\n assert packet['data1'][1]['data2'][0]['entry2'] == 103\n assert packet['data1'][1]['data2'][1]['entry1'] == 4\n assert packet['data1'][1]['data2'][1]['entry2'] == 104\n\n # Sub packet 3 has 1 sub sub packet\n assert packet['data1'][2]['size2'] == 1\n assert len(packet['data1'][2]['data2']) == 1\n\n assert packet['data1'][2]['data2'][0]['entry1'] == 5\n assert packet['data1'][2]['data2'][0]['entry2'] == 105", "def build_message(self, ssid, psk, ktype):\n payload = bytearray(0x88)\n payload[0x26] = 0x14 # This seems to always be set to 14\n # Add the SSID to the payload\n ssid_start = 68\n ssid_length = 0\n for letter in ssid:\n payload[(ssid_start + ssid_length)] = ord(letter)\n ssid_length += 1\n # Add the WiFi password to the payload\n pass_start = 100\n pass_length = 0\n for letter in psk:\n payload[(pass_start + pass_length)] = ord(letter)\n pass_length += 1\n #\n payload[0x84] = ssid_length # Character length of SSID\n payload[0x85] = pass_length # Character length of password\n payload[0x86] = [\"none\", \"wep\", \"wpa\", \"wpa2\"].index(\n ktype\n ) # Type of encryption (00=none,01=WEP,02=WPA1,03=WPA2,04=WPA1/2)\n #\n checksum = 0xBEAF\n for i in range(len(payload)):\n checksum += payload[i]\n checksum = checksum & 0xFFFF\n #\n payload[0x20] = checksum & 0xFF # Checksum 1 position\n payload[0x21] = checksum >> 8 # Checksum 2 position\n\n return payload", "def bytes2obj(buf: bytes) -> StunPacket:\n\n type_raw, length, magic_cookie, transaction_id = unpack_from(\n HEADER_FORMAT, buf, 0)\n\n # Convert message type into bits, to parse it as a class and method.\n type_bits: str = bin(type_raw)[2:].zfill(16)\n if type_bits[:2] != \"00\":\n raise Exception(\n \"STUN packet invalid - most signifigant 2 bits must be zeroes.\")\n type_class = type_bits[7] + type_bits[11]\n type_method = type_bits[2:7] + type_bits[8:11] + type_bits[12:]\n\n message_bytes = unpack_from('!{}s'.format(\n length), buf, calcsize(HEADER_FORMAT))[0]\n # if magic_cookie != bytes.fromhex(\"2112A442\"):\n # print(\"Warning: Request not RFC 8489 compilant.\")\n\n message_read = 0\n message: Dict[bytes, bytes] = {}\n while message_read < length:\n attribute_type, attribute_length = unpack_from(\n ATTRIBUTE_HEADER_FORMAT, message_bytes, message_read\n )\n attribute_val = unpack_from(\n \"!{}s\".format(attribute_length), message_bytes, message_read +\n calcsize(ATTRIBUTE_HEADER_FORMAT)\n )[0]\n message[attribute_type] = attribute_val\n\n message_read += calcsize(ATTRIBUTE_HEADER_FORMAT)\n message_read += attribute_length\n\n return StunPacket(type_class, type_method, magic_cookie, transaction_id, message)", "def _make_event_header( self, size_of_the_rest, timestamp, duration, keycode ) : \r\n\r\n # 'D' and the size of the message are not counted \r\n sizeof_int32 = 4 \r\n addendum = 3 * sizeof_int32\r\n \r\n total_length = addendum + size_of_the_rest\r\n\r\n ## return struct.pack( \"=sH2L4s\", 'D', total_length, timestamp, duration, keycode ) \r\n result_str = struct.pack( \"=sH2L4s\", 'D', total_length, timestamp, duration, keycode ) \r\n\r\n # # debug \r\n # print 'header: \"%s\" ' % (result_str, )\r\n\r\n return result_str", "def encode(self):\n payload = []\n\n # Generate Payload\n if self.IsEnsembleData:\n payload += self.EnsembleData.encode()\n if self.IsAncillaryData:\n payload += self.AncillaryData.encode()\n if self.IsAmplitude:\n payload += self.Amplitude.encode()\n if self.IsCorrelation:\n payload += self.Correlation.encode()\n if self.IsBeamVelocity:\n payload += self.BeamVelocity.encode()\n if self.IsInstrumentVelocity:\n payload += self.InstrumentVelocity.encode()\n if self.IsEarthVelocity:\n payload += self.EarthVelocity.encode()\n if self.IsGoodBeam:\n payload += self.GoodBeam.encode()\n if self.IsGoodEarth:\n payload += self.GoodEarth.encode()\n if self.IsBottomTrack:\n payload += self.BottomTrack.encode()\n if self.IsRangeTracking:\n payload += self.RangeTracking.encode()\n if self.IsSystemSetup:\n payload += self.SystemSetup.encode()\n if self.IsNmeaData:\n payload += self.NmeaData.encode()\n\n # Generate the header\n # Get the ensemble number\n ens_num = 0\n if self.IsEnsembleData:\n ens_num = self.EnsembleData.EnsembleNumber\n\n # Get the payload size\n payload_size = len(payload)\n\n header = Ensemble.generate_ens_header(ens_num, payload_size)\n\n # Generate the Checksum CITT\n # Parameters found at https: // pycrc.org / models.html\n #crc = pycrc.algorithms.Crc(width=16, poly=0x1021,\n # reflect_in=False, xor_in=0x1d0f,\n # reflect_out=False, xor_out=0x0000)\n #checksum = crc.bit_by_bit_fast(binascii.a2b_hex(bytes(payload)))\n #checksum = Ensemble.int32_to_bytes(CRCCCITT().calculate(input_data=bytes(payload)))\n checksum = crc16.crc16xmodem(payload)\n\n\n result = []\n result += header\n result += payload\n result += checksum\n\n return bytearray(result)", "def from_buffer(data):\n opcode = ustruct.unpack(ATT.struct_format, data[:ATT.struct_size])[0]\n\n # att = uctypes.struct(\n # uctypes.addressof(data[:ATT.struct_size]),\n # ATT_STRUCT,\n # uctypes.LITTLE_ENDIAN\n # )\n\n data = data[ATT.struct_size:]\n return ATT(opcode, data)" ]
[ "0.7320882", "0.6888568", "0.6545822", "0.650796", "0.6446084", "0.6386353", "0.6256809", "0.6228369", "0.6189242", "0.61338437", "0.6114817", "0.6103833", "0.60672927", "0.60629386", "0.60616815", "0.605075", "0.60235", "0.5961972", "0.594975", "0.59361804", "0.5934646", "0.59334093", "0.59327763", "0.5892975", "0.5887977", "0.5839838", "0.58065915", "0.57856464", "0.57812816", "0.577893", "0.5760183", "0.5757494", "0.57541776", "0.57430315", "0.5693223", "0.5665616", "0.56589454", "0.56336546", "0.5624865", "0.56228966", "0.56222695", "0.56165123", "0.5615336", "0.56009835", "0.559702", "0.55921346", "0.5582549", "0.55815583", "0.55810076", "0.5579064", "0.5574558", "0.55573136", "0.5556001", "0.55493796", "0.5511171", "0.5505378", "0.5491395", "0.54890764", "0.54836845", "0.5482694", "0.5478233", "0.5475474", "0.5467165", "0.5464062", "0.5457233", "0.54544055", "0.5446004", "0.5431508", "0.54311335", "0.542929", "0.5427644", "0.5418578", "0.54176426", "0.5414673", "0.54135156", "0.5399162", "0.53918177", "0.5386781", "0.5386362", "0.5380068", "0.537555", "0.5365115", "0.53602237", "0.5342023", "0.53383803", "0.5324974", "0.5320102", "0.53106666", "0.5309502", "0.53028995", "0.5300968", "0.5300589", "0.5293374", "0.52834386", "0.5279496", "0.5277965", "0.5275047", "0.52717984", "0.52680147", "0.52665216" ]
0.53749585
81
Numeral registry index of the protocol.
def __index__(cls): raise UnsupportedCall(f'{cls.__name__!r} object cannot be interpreted as an integer')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def InterfaceIndex(self) -> int:", "def InterfaceIndex(self) -> int:", "def get_numkey(self):\n return self._numkey", "def getPidx(self):\n return int(bytes(self.keeper.getGbl(b\"pidx\")), 16)", "def idx(self):\n return int(self.__ph.get('idx', 0))", "def register_index(self) -> int:\n return self._parent_node.register_index", "def get_nh_idx(self):\n return int(self.get('nhr_id'))", "def get_list_index(self):\r\n _debug('simq03b_api.get_list_index')\r\n \r\n s = self.query('LIST:IND?')\r\n return int(s)", "def get_list_index(self):\r\n return self.n", "def reg(self) -> int:", "def get_list_index(self):\r\n s = self.query('LIST:IND?')\r\n return int(s)", "def get_list_index(self):\r\n s = self.query('LIST:IND?')\r\n return int(s)", "def _get_tunnel_interface_index(self):\n return self.__tunnel_interface_index", "def get():\n global __internal_state_index_counter\n __internal_state_index_counter += long(1)\n return __internal_state_index_counter", "def idx(self):\n if self._idx is None:\n self._loads()\n return self._idx", "def _get_iface_index(self):\n E = data_element_maker()\n top = E.top(\n E.Ifmgr(\n E.Interfaces(\n E.Interface(\n E.Name(self.interface_name)\n )\n )\n )\n )\n\n nc_get_reply = self.device.get(('subtree', top))\n reply_data = find_in_data(\n self._iface_index_name, nc_get_reply.data_ele)\n\n if reply_data is None:\n return ''\n\n return reply_data.text", "def getNum(self) :\n return self._num", "def get_subsys_index(self, subsys):\n return self.index(subsys)", "def __next_index():\n return redis_store.incr(String.__name__.lower() + '-index')", "def index(self) -> int:\r\n return self._index", "def index(self) -> int:\n return self._index", "def index(self) -> int:\n return self._index", "def index(self) -> int:\n return self._index", "def index(self) -> int:\n return self._index", "def index(self) -> int:\n return self._index", "def ring_idx(self) -> int:\n return self._ring_idx", "def index(self) -> int:", "def index(self):\n return self._ll_tree.get_index()", "def index(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"index\")", "def number(self) -> int:\n return self._id", "def token_to_idx(self) -> Dict[Hashable, int]:\n return self._token_to_idx", "def atomic_number(self) -> int:\n return elements.index(self.label) + 1", "def get_registration_number(self):\n return self._reg_number", "def next_num():\r\n CHModuleFactory.num += 1\r\n return CHModuleFactory.num", "def next_num():\r\n CHModuleFactory.num += 1\r\n return CHModuleFactory.num", "def idx(self):\n return self._idx", "def get_index(self, key):\r\n\t\tindex = self._hash_function(key) % self.capacity\r\n\t\treturn index", "def pcie_id(self) -> int:\r\n return self._pcie_id", "def _num(self):\n try:\n num = int(self.__rId[3:])\n except ValueError:\n num = 9999\n return num", "def num_id(self) -> str:\n return pulumi.get(self, \"num_id\")", "def n(self):\n return self.module.n", "def get_num_shift_reg(self):\n return self.num_registers", "def fget(self):\n if not hasattr(self, \"_n\"):\n self._n = 0\n self._n += 1\n return self._n", "def getNumber():", "def rank(self):\r\n\t\trank = self.n % 13\r\n\t\treturn rank", "def getNodalIndex(self, iVertex):\n node = self.gr.getMeshPoint(iVertex)\n no = node.getNo()\n \n return no", "def get_index(band_nums,chan_num):\n ch_index=np.searchsorted(band_nums,chan_num)\n return int(ch_index)", "def _get_index(self):\n\n return self.index\n\n # to avoid having differences bigger than 2pi", "def get_list_index(self):\r\n s = self.query('LIST:MAN?')\r\n if s == None: return None\r\n return int(s)-1", "def global_index(self):\n raise NotImplementedError", "def get_adapter_index(self):\n\t\treturn call_sdk_function('PrlVirtNet_GetAdapterIndex', self.handle)", "def get_list_index(self):\r\n return self._api.get_list_index()", "def num(self):\n return self.num", "def get_sensor_number(self):\n return int(self.data[1][-1])", "def getLimbIndex(self):\n\n data = self.name.split('-')\n return int(data[1]) - 1", "def get_index(self):\r\n i = 0\r\n for container in self.settings[\"containers\"]:\r\n if container[\"name\"] == self.container[\"name\"]:\r\n return i\r\n i += 1", "def getPowerIndex(self):\n return self.powerIndex_", "def ied_num(self) -> str:\n return pulumi.get(self, \"ied_num\")", "def serial_num(self) -> int:\n return self._serial_num", "def ordinal(self) -> int:\n return pulumi.get(self, \"ordinal\")", "def get_ident():\n return -1", "def subkey_number(self):\n number = self.unpack_dword(0x14)\n if number == 0xFFFFFFFF:\n return 0\n return number", "def registry_id(self) -> str:\n return self._registry_id", "def current_index(self):\n job = self.client.query(\"SELECT MAX(ID) FROM {}.{};\".format(self.database_name, self.table_name))\n for row in job.result():\n if row[0] == None:\n return 1\n current_index = row[0] + 1\n return current_index", "def registration_number(self):\n return self._registration_number", "def get_number(self):\n return self.__device_number", "def new_key(self):\n return max(self.code_table.keys()) + 1", "def _num_nodes(self):\n return int(self._node_map[-1])", "def index(self):\n return self.data.index", "def value(self):\n return self.__n", "def _get_index(self, key):\n return self._hash_function(key) % self.capacity", "def get_iter_num(self):\n\tif len(self.cost) > 0:\n first_key = list(self.cost.keys())[0]\n num = len(self.cost[first_key]) - 1\n\telse:\n\t first_key = list(self.prim_var.keys())[0]\n num = len(self.prim_var[first_key]) - 1\n\treturn num", "def getResidueNumber(self, iAtom):\n return self._getResiduePointer(iAtom)+1", "def get_encoder_number(self):\n return TrainManager.TESTS[self.test_index][1][0]", "def symbolic_incr(self):\n return self.limits[2]", "def get_number(cls, client_object):\n return client_object.ovsdb.Interface.get_one(\n search='name=%s' % client_object.name).ofport", "def ipython_current_number(self):\n return self.IP.outputcache.prompt_count", "def next_index(state):\n node = state\n for key in (\"layers\", \"index\"):\n node = node.get(key, {})\n indices = [key for key in node.keys()]\n if len(indices) == 0:\n return 0\n else:\n return max(indices) + 1", "def getnumanz(self):\n numanz_ = ctypes.c_int32()\n res = __library__.MSK_XX_getnumanz(self.__nativep,ctypes.byref(numanz_))\n if res != 0:\n _,msg = self.__getlasterror(res)\n raise Error(rescode(res),msg)\n numanz_ = numanz_.value\n _numanz_return_value = numanz_\n return (_numanz_return_value)", "def identifier(self):\n mod_id = self.read16(regAddr=0x8000)\n # print(\"Inserted module is\", self.cfpDict[mod_id])\n # print(\"Inserted module is\", mod_id)\n return mod_id", "def get_index(self):\n return self.disk.partitions.index(self)", "def getAddressOfIndex(self) -> long:\n ...", "def index(self):\n return self._data.get('index')", "def get_index(self):\n\t\treturn call_sdk_function('PrlBootDev_GetIndex', self.handle)", "def __getIdHash(self, val):\n return PublicTransit.TANA_NODE_NUMBER_OFFSET + int((\"%1.0f\" % val)[7:])", "def getOccurence(self) -> int:\n ...", "def _bucket_index(self, key):\n # return hash(key) % len(self.buckets)\n hash_value = 0 # hash is set to 0\n for char in key: # iterates through as much as the number of characters in key\n hash_value += ord(char) # return the unicode value to make the number different everytime\n return hash_value % len(self.buckets) # returns a number that will never be greater than the length of the bucket", "def _name2idx(name):\n match = re.search(r\"eth(\\d+)\", name, re.I)\n if not match:\n raise exception.CloudbaseInitException(\n \"invalid NetworkDetails name {!r}\"\n .format(name)\n )\n return int(match.group(1))", "def GetNotchNum(self):\n num = self.ordChar(self._notch) + 1\n return num", "def getInteger(self):\n pass", "def get_namespace_index(cls, libvirt_network_if):\n matcher = re.match(r\"^tt(\\d+)$\", libvirt_network_if)\n return int(matcher.groups()[0]) if matcher is not None else 0", "def increment_register_index(self) -> None:\n self._parent_node.increment_register_index()", "def number(self):\n return self._num", "def route_idx(self):\n return self._route_idx", "def get_NID():\n return NID", "def get_rank(self) -> int:\r\n return self.rank", "def get_index(self):\n return self.index", "def get_index(self):\n return self.index", "def __getnum__(self, i, j):\n return self.pos_to_num[(i, j)]", "def next_node_id(self) -> int:\n i = 1\n while True:\n if i not in self.session.nodes:\n break\n i += 1\n return i", "def get_res_port():\n return get_port() + 1" ]
[ "0.66120684", "0.66120684", "0.6520881", "0.6501883", "0.6485339", "0.63360345", "0.6295744", "0.6161678", "0.6085867", "0.603896", "0.6011073", "0.6011073", "0.5929243", "0.5907422", "0.5906776", "0.58790284", "0.5871748", "0.58596975", "0.5855196", "0.5851725", "0.58458555", "0.58458555", "0.58458555", "0.58458555", "0.58458555", "0.5814226", "0.58032733", "0.57950366", "0.57914513", "0.5785745", "0.5778516", "0.5763974", "0.5753005", "0.5751445", "0.5751445", "0.5746419", "0.5708086", "0.57050717", "0.56966066", "0.56955403", "0.56621724", "0.5650763", "0.5650625", "0.5649198", "0.56405205", "0.56321245", "0.5610751", "0.56074506", "0.5607103", "0.5602176", "0.5595558", "0.55916643", "0.5584147", "0.55788136", "0.5561522", "0.5551767", "0.5547858", "0.5537085", "0.5535296", "0.55174094", "0.55123067", "0.5507717", "0.5505542", "0.54989153", "0.54910237", "0.548623", "0.5474903", "0.5464064", "0.54634005", "0.54593563", "0.5455791", "0.54319245", "0.54314566", "0.5430802", "0.5427713", "0.54226357", "0.54195476", "0.5415915", "0.5400915", "0.5394484", "0.539052", "0.538258", "0.5379563", "0.5377691", "0.5374456", "0.53610563", "0.5350546", "0.53472835", "0.53468055", "0.53416204", "0.5335442", "0.53348005", "0.5334611", "0.5333712", "0.5333392", "0.5324213", "0.5323911", "0.5323911", "0.5323591", "0.5322178", "0.5319383" ]
0.0
-1
Decode next layer protocol.
def _decode_next_layer(self, *args, **kwargs): # pylint: disable=signature-differs raise UnsupportedCall(f"'{self.__class__.__name__}' object has no attribute '_decode_next_layer'")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _decode_next_layer(self, dict_, length=None):\n # make next layer protocol name\n proto = str(self._prot or 'Raw').lower()\n\n # make BytesIO from frame package data\n bytes_ = io.BytesIO(self._file.read(dict_['len']))\n info, protochain = self._import_next_layer(bytes_, length)\n\n # write info and protocol chain into dict\n self._protos = ProtoChain(self._prot, protochain)\n dict_[proto] = info\n dict_['protocols'] = self._protos.chain\n return dict_", "def decode(data):\n raise NotImplementedError", "def decode(data): #@NoSelf", "def _dinamic_decode(self):\n raise NotImplementedError", "def _DecodeFn():\n _, decode_dict = self._model.ConstructDecodeGraph(\n input_batch=inp_instance.TpuDequeueBatch())\n self.decode_nm = py_utils.NestedMap(decode_dict)\n return self.decode_nm.Flatten()", "def _DecodeStep():\n _, decode_dict = self._model.ConstructDecodeGraph()\n self.decode_nm = py_utils.NestedMap(decode_dict)\n return [self._OutfeedEnqueue(decode_dict)]", "def decode(self, code):\n raise NotImplementedError", "def decoder(self):\n pass", "def handle_decode(self, encoded_data):\n \n config.COD_PROMPT = config.DEC_PROMPT\n print config.DEC_PROMPT + \" decoding...\"\n \n # while there is another decoder, run each item through the next decoder\n data = encoded_data\n success = False\n for decoder in self.decoder_list:\n current_decoder = decoder()\n success, data = self.recursive_decoder(current_decoder.decode, data)\n if not success:\n break\n print config.DEC_PROMPT + \"%s decoded to '%s'\" % ( current_decoder.name(),data)\n return success, data", "def _decode(self):\n \n self.version = int(data_to_hex_str(self.packet[0])[2])\n self.header_len = int(data_to_hex_str(self.packet[0])[3]) * 4\n self.type_of_service = data_to_hex_str(self.packet[1:2])\n self.total_len = int(data_to_hex_str(self.packet[2:4]), 16)\n self.id = data_to_hex_str(self.packet[4:6])\n \n #parse the flags fields(reservedbit, don't fragment, more fragment)\n if ((ord(self.packet[6]) & (1 << 7)) != 0):\n self.flags_reservedbit = 1\n else:\n self.flags_reservedbit = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 6)) != 0):\n self.flags_dont_fragment = 1\n else:\n self.flags_dont_fragment = 0\n #endof if\n \n if ((ord(self.packet[6]) & (1 << 5)) != 0):\n self.flags_more_fragment = 1\n else:\n self.flags_more_fragment = 0\n #endof if\n \n #parse the offset field(in packet[6:7]): 00011111 & packet[6] (to filter flags) -->> get packet[6:7] in hex_str\n #tmp = str(31 & ord(self.packet[6]))\n self.fragment_offset = int(data_to_hex_str(self.packet[6:8]), 16)\n if (self.fragment_offset >= (1 << 13)):\n #take away the flags fields: 00011111 11111111 & self.fragment_offset\n self.fragment_offset = self.fragment_offset & ((1 << 13) - 1) \n \n self.TTL = ord(self.packet[8])\n self.protocol = IPPROTO[ord(self.packet[9])]\n self.header_checksum = data_to_hex_str(self.packet[10:12])\n \n self.src = str(ord(self.packet[12])) + '.' + str(ord(self.packet[13])) + '.' + \\\n str(ord(self.packet[14])) + '.' + str(ord(self.packet[15]))\n self.dst = str(ord(self.packet[16])) + '.' + str(ord(self.packet[17])) + '.' + \\\n str(ord(self.packet[18])) + '.' + str(ord(self.packet[19]))\n \n if (self.header_len > 20):\n self.opt_paddings = self.packet[20 : (self.header_len)]", "def decode(decode_format):\n return output_from_decode", "def decode(self):\n for layer in self.layers:\n layer.decode()", "def decode(self):\n for layer in self.layers:\n layer.decode()", "def decode(self, z):\n raise NotImplementedError", "def decode(cls, data):\n # Message (PHYPayload) must be at least 1 byte\n if len(data) < 1:\n raise DecodeError()\n # Decode the MAC Header\n mhdr = MACHeader.decode(data[0])\n # Decode the Message\n if mhdr.mtype == JOIN_REQUEST:\n return JoinRequestMessage.decode(mhdr, data)\n elif mhdr.mtype == UN_DATA_UP or mhdr.mtype == CO_DATA_UP:\n return MACDataUplinkMessage.decode(mhdr, data)\n else:\n return None", "def decode(self, encoded):", "def _define_decoder(self):\n raise NotImplementedError", "def decode(self, data: bytes) -> bytes:\n ...", "def decode(self): # pragma: no cover\n pass", "def decode(self):\n if self.ciphered:\n msg = self.result \n self.result = ''\n else:\n msg = self.msg\n try:\n self.result = self.doDecode(msg,self.shift)\n except Exception as e:\n raise CipherError(\"decoding failure {}.\".format(e))\n self.ciphered = False\n return self.result", "def doDecode(self):\n raise CipherError(\"override this funct and return the decoded msg\")", "def _decode_end(_fp):\n return 0", "def test_decode(self):\n pass # TODO(tlarsen)", "def decode(self,m):\n raise NotImplementedError('subclasses must override decode()!')", "def bdecode(f):\n\tbtype = TYPES[f.read(1)]\n\tif btype is not None:\n\t\tf.seek(-1, SEEK_CUR)\n\t\treturn DECODERS[btype](f)\n\telse: #Used in dicts and lists to designate an end\n\t\treturn None", "def _recv_protocol(self):\n if not self._protocol_recv:\n try:\n data = self._read_bytes(1, timeout=1.0)\n if len(data) == 0:\n self.close()\n raise PipeClosed()\n peer_protocol = struct.unpack('>B', data)[0]\n self._protocol = min(self._protocol or pickle.HIGHEST_PROTOCOL, peer_protocol)\n self._protocol_recv = True\n self._serializer = _PickleSerializer(self._protocol)\n except (OSError, socket.error):\n self.close()\n raise PipeClosed()", "def decode(cls, data):\n if len(data) == 0:\n return None\n cid = struct.unpack('B', data[0])[0]\n if cid == LINKCHECKREQ:\n return LinkCheckReq.decode(data)\n elif cid == LINKADRANS:\n return LinkADRAns.decode(data)\n # TODO\n #elif cid == DUTYCYCLEANS:\n # return DutyCycleReq.decode(data)\n #elif cid == RXPARAMSETUPANS:\n # return RxParamSetupReq.decode(data)\n #elif cid == DEVSTATUSANS:\n # return DevStatusReq.decode(data)\n #elif cid == NEWCHANNELANS:\n # return NewChannelReq.decode(data)\n #elif cid == RXTIMINGSETUPANS:\n # return RxTimingSetupReq.decode(data)\n else:\n return None", "def decode(self, s):", "def decode(self, s):", "def greedy_decode(self, z):\r\n\r\n raise NotImplementedError", "def decode(self, buf=None):\n if buf is None:\n buf = self.receive()\n \n return decode_network_packet(buf)", "def decode(packed_data, custom_decoder=None):\n decoder = make_decoder(custom_decoder)\n structure = msgpack.unpackb(packed_data, ext_hook=decoder, encoding='utf-8')\n return structure", "def _define_decoder(self):\n self.decoder = nn.Sequential(nn.Linear(self.encoding_shape, 256), # B, 256\n View((-1, 256, 1, 1)), # B, 256, 1, 1\n nn.SELU(),\n nn.ConvTranspose2d(256, 64, 4), # B, 64, 4, 4\n nn.SELU(),\n nn.ConvTranspose2d(64, 64, 4, 2, 1), # B, 64, 8, 8\n nn.SELU(),\n nn.ConvTranspose2d(64, 32, 4, 2, 1), # B, 32, 16, 16\n nn.SELU(),\n nn.ConvTranspose2d(32, 32, 4, 2, 1), # B, 32, 32, 32\n nn.SELU(),\n nn.ConvTranspose2d(32, 3, 4, 2, 1), # B, nc, 64, 64\n nn.ReLU()\n )", "def decoding_layer_train(encoder_state, dec_cell, dec_embed_input, \n target_sequence_length, max_summary_length, \n output_layer, keep_prob):\n # TODO: Implement Function\n trainig_helper = tf.contrib.seq2seq.TrainingHelper(dec_embed_input, target_sequence_length)\n basic_decoder = tf.contrib.seq2seq.BasicDecoder(dec_cell, trainig_helper, encoder_state, output_layer)\n f_output, _, _ = tf.contrib.seq2seq.dynamic_decode(basic_decoder,maximum_iterations=max_summary_length)\n return f_output", "def _DecodeFn():\n with cluster_factory.SetEval(True):\n _, decode_dict = self._decode_model.ConstructDecodeGraph()\n self.decode_nm = py_utils.NestedMap(decode_dict)\n return self.decode_nm.Flatten()", "def decoding_layer_infer(encoder_state, dec_cell, dec_embeddings, start_of_sequence_id,\n end_of_sequence_id, max_target_sequence_length,\n vocab_size, output_layer, batch_size, keep_prob):\n # Convert the start_ids to be a vector with batch size (the go id repeated batch size times)\n start_ids = tf.tile([start_of_sequence_id], [batch_size])\n # Create the embedding helper.\n embedding_helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(\n dec_embeddings, start_ids, end_of_sequence_id)\n basic_decoder = tf.contrib.seq2seq.BasicDecoder(\n dec_cell, embedding_helper, encoder_state, output_layer)\n f_output, _, _ = tf.contrib.seq2seq.dynamic_decode(\n basic_decoder,maximum_iterations=max_target_sequence_length)\n return f_output", "def decode(self, eth):\n\t\tif eth.type == dpkt.ethernet.ETH_TYPE_ARP:\n\t\t\t# print 'arp'\n\t\t\treturn ARP(eth.data).get()\n\n\t\telif eth.type == dpkt.ethernet.ETH_TYPE_IP6:\n\t\t\tip = eth.data\n\t\t\tif ip.p == dpkt.ip.IP_PROTO_UDP:\n\t\t\t\tudp = ip.data\n\n\t\t\t\t# multicast is just like IPv4\n\t\t\t\tif udp.dport == 5353:\n\t\t\t\t\t# print udp\n\t\t\t\t\tans = mDNS(udp).get()\n\t\t\t\t\t# print 25*'='\n\t\t\t\t\t# pp.pprint(ans)\n\t\t\t\t\t# print 25*'='\n\t\t\t\t\treturn ans\n\n\t\t\t\t# print 'IPv6 UDP','port:',udp.dport,'src:',self.getip(ip.src,True),'dst:',self.getip(ip.dst,True)\n\n\t\t\t# TCP not useful\n\t\t\telif ip.p == dpkt.ip.IP_PROTO_TCP:\n\t\t\t\tpass\n\t\t\t\t# tcp = ip.data\n\t\t\t\t# print 'IPv6 TCP','port:',tcp.dport,'src:',self.getip(ip.src,True),'dst:',self.getip(ip.dst,True)\n\n\t\t\t# ICMP error msg not useful for mapping\n\t\t\telif ip.p == dpkt.ip.IP_PROTO_ICMP6:\n\t\t\t\t# print 'IPv6 icmp6:',ip.data.data\n\t\t\t\tpass\n\n\t\t\t# other stuff I haven't decoded\n\t\t\telse:\n\t\t\t\tpass\n\t\t\t\t# print 'IPv6',ip.p,'src:',self.getip(ip.src,True),'dst:',self.getip(ip.dst,True)\n\t\telif eth.type == dpkt.ethernet.ETH_TYPE_IP:\n\t\t\tip = eth.data\n\n\t\t\t# roku interface port: 1900 dst: 239.255.255.250 1900\n\t\t\tif ip.p == dpkt.ip.IP_PROTO_UDP:\n\t\t\t\tudp = ip.data\n\n\t\t\t\t# these aren't useful\n\t\t\t\tif udp.dport == 53: # DNS\n\t\t\t\t\t# return DNS(udp.data)\n\t\t\t\t\treturn {}\n\n\t\t\t\telif udp.dport == 5353: # mDNS\n\t\t\t\t\t# print 'mDNS'\n\t\t\t\t\t# print udp\n\t\t\t\t\treturn mDNS(udp).get()\n\n\t\t\t\telif self.getip(ip.dst) == '239.255.255.250':\n\t\t\t\t\treturn {}\n\n\t\t\t\telse:\n\t\t\t\t\t# don't print standard ports\n\t\t\t\t\t# 17500 dropbox\n\t\t\t\t\t# if not ip.data.dport in [17500]:\n\t\t\t\t\t# \tprint 'other udp','port:',udp.dport,'src:',self.getip(ip.src),'dst:',self.getip(ip.dst),': '\n\t\t\t\t\treturn {}\n\t\t\telif ip.p == dpkt.ip.IP_PROTO_TCP:\n\t\t\t\t# src = self.getip(ip.src)\n\t\t\t\t# if netaddr.IPAddress(src) not in netaddr.IPNetwork(\"192.168.1.0/24\"):\n\t\t\t\t# \twho = ''\n\t\t\t\t# \tif src not in self.ipMap:\n\t\t\t\t# \t\twho = WhoIs(src).record['NetName']\n\t\t\t\t# \t\tself.ipMap[src] = who\n\t\t\t\t# \telse:\n\t\t\t\t# \t\twho = self.ipMap[src]\n\t\t\t\t# \tif who in ['GOOGLE','AKAMAI','APPLE-WWNET','AMAZO-ZIAD1','DROPBOX']:\n\t\t\t\t# \t\treturn {}\n\t\t\t\t# \telse:\n\t\t\t\t# \t\tprint src,who\n\t\t\t\t# don't print standard ports\n\t\t\t\t# port 58969 - XSANS Apple, why do i see that?\n\t\t\t\t# 22 ssh\n\t\t\t\t# 25 smtp\n\t\t\t\t# 80 http\n\t\t\t\t# 123 time server\n\t\t\t\t# 143 imap\n\t\t\t\t# 443 https\n\t\t\t\t# 445 smb\n\t\t\t\t# 548 afp over tcp\n\t\t\t\t# 5009 airport admin utility\n\t\t\t\t# 5222 ichat\n\t\t\t\t# 17500 dropbox\n\t\t\t\t# if not ip.data.dport in [22,25,80,123,143,443,445,548,5009,5222,17500]:\n\t\t\t\t\t# print 'other tcp','port:',ip.data.dport,'src:',self.getip(ip.src),'dst:',self.getip(ip.dst)\n\t\t\t\treturn {}\n\t\t\t# elif ip.p == dpkt.ip.IP_PROTO_ICMP6:\n\t\t\t# \tprint '?????? other icmp6','src:',self.getip(ip.src),'dst:',self.getip(ip.dst)\n\t\t\telif ip.p == 2:\n\t\t\t\tpass\n\t\t\t\t# print 'IGMP','src:',self.getip(ip.src),'dst:',self.getip(ip.dst)\n\t\t\telse:\n\t\t\t\t# print 'other ip packet','src:',self.getip(ip.src),'dst:',self.getip(ip.dst)\n\t\t\t\treturn {}", "def _decode(data: BencodedString) -> Union[bytes, dict, int, list]:\n if not data.bytes:\n raise ValueError(\"Cannot decode an empty bencoded string.\")\n\n if data.bytes[0] == START_DICT:\n return _decode_dict(data)\n\n if data.bytes[0] == START_LIST:\n return _decode_list(data)\n\n if data.bytes[0] == START_INTEGER:\n return _decode_int(data)\n\n if chr(data.bytes[0]).isdigit():\n return _decode_bytes(data)\n\n raise ValueError(\n \"Cannot decode data, expected the first byte to be one of \"\n f\"'d', 'i', 'l' or a digit, got {chr(data.bytes[0])!r} instead.\"\n )", "def _import_next_layer(self, file_, length):\n if self._prot == 'Ethernet':\n from .link import Ethernet as Protocol\n elif self._prot == 'IPv4':\n from .internet import IPv4 as Protocol\n elif self._prot == 'IPv6':\n from .internet import IPv6 as Protocol\n else:\n data = file_.read(*[length]) or None\n return data, None\n next_ = Protocol(file_, length)\n return next_.info, next_.protochain", "def _decode_message(self, label: str, buf, typedef=None, pos=0, end=None, group=False):\n print(str(pos) + \" decode_message \" + label)\n if end is None:\n end = len(buf)\n\n if typedef is None:\n typedef = {}\n else:\n # Don't want to accidentally modify the original\n typedef = copy.deepcopy(typedef)\n output = {}\n\n while pos < end:\n oldpos = pos\n tag, pos = decoder._DecodeVarint(buf, pos)\n try:\n field_number, wire_type = wire_format.UnpackTag(tag)\n except Exception as exc:\n raise (ValueError,\n 'Could not read valid tag at pos %d. Ensure it is a valid protobuf message: %s'\n % (pos-len(tag), exc), sys.exc_info()[2])\n # Convert to str\n field_number = str(field_number)\n orig_field_number = field_number\n \n field_typedef = None\n if field_number in typedef:\n field_typedef = typedef[field_number]\n else:\n field_typedef = {}\n field_typedef['type'] = self.wire_type_defaults[wire_type]\n field_type = field_typedef['type']\n if self.debug:\n ft = field_type\n if ft == None:\n ft = \"None\"\n print(\"@\" + str(oldpos) + \"-\" + str(pos-1) + \":\" + label + \" field_number \" +\n str(field_number) +\n \" wire_type \" + str(wire_type) +\n \" field_type \" + str(ft))\n # If field_type is None, its either an unsupported wire type, length delim or group\n # length delim we have to try and decode first\n field_out = None\n if field_type == 'LD':\n field_out, pos = self.decode_message_LD(label, buf, pos, field_typedef)\n elif field_type == 'endGroup':\n # TODO Should probably match the field_number to START_GROUP\n if not group:\n raise ValueError(\"Found END_GROUP before START_GROUP\")\n # exit out\n return output, typedef, pos\n elif field_type == 'message':\n field_out, pos = self.decode_message_message(\n label, buf, pos, field_typedef, field_number)\n elif field_type == 'group':\n group_typedef = None\n # Check for a anonymous type\n if 'group_typedef' in field_typedef:\n group_typedef = field_typedef['group_typedef']\n field_out, group_typedef, pos = self.decode_group(\n label, buf, group_typedef, pos)\n # Save type definition\n field_typedef['group_typedef'] = group_typedef\n else:\n # Verify wiretype matches\n if self.wiretypes[field_type] != wire_type:\n raise ValueError(\"Invalid wiretype for field number %s. %s is not wiretype %s\"\n % (field_number, field_type, wire_type))\n # Simple type, just look up the decoder\n field_out, pos = self.decoders[field_type](buf, pos)\n field_typedef['type'] = field_type\n if 'name' not in field_typedef:\n field_typedef['name'] = ''\n field_key = field_number\n if '-' not in field_number and 'name' in field_typedef and field_typedef['name'] != '':\n field_key = field_typedef['name']\n # Deal with repeats\n if field_key in output:\n if isinstance(field_out, list):\n if isinstance(output[field_number], list):\n output[field_key] += field_out\n else:\n output[field_key] = field_out.append(output[field_key])\n else:\n if isinstance(output[field_number], list):\n output[field_key].append(field_out)\n else:\n output[field_key] = [output[field_key], field_out]\n else:\n output[field_key] = field_out\n typedef[orig_field_number] = field_typedef\n if self.debug:\n print(str(field_key) + \" field_out:\" + str(field_out))\n if pos > end:\n raise decoder._DecodeError(\"Invalid Message Length, pos=\" +\n str(pos) + \" end=\" + str(end))\n # Should never hit here as a group\n if group:\n raise ValueError(\"Got START_GROUP with no END_GROUP.\")\n print(\"decode_message finish \" + str(pos))\n return output, typedef, pos", "def build_decoder(shift):\n ### TODO.\n decoder = build_coder(27-shift)\n # print decoder\n return decoder", "def decode(self, crypto):", "def _decode(self, rel_codes, anchors):\n pass", "def protocol(self):\n return self._info.next # pylint: disable=E1101", "def decode(self, z):\n if self.switch:\n x = self.bijecter(z, inverse=True)\n return self.decode_(x)\n else:\n return self.decode_(z)", "def decode(cls, data, remote):\r\n # Check length\r\n if len(data) < 4:\r\n raise DecodeError(\"Message too short.\")\r\n # Decode header\r\n (version, token, identifer) = unpack('<BHB', data[:4])\r\n #print('Received Token', token)\r\n m = GatewayMessage(version=version, token=token, identifier=identifer)\r\n m.remote = remote\r\n # Test versions (1 or 2) and supported message types\r\n if ( m.version not in (1, 2) or \r\n m.version == 1 and m.id not in (PUSH_DATA, PULL_DATA) or \r\n m.version == 2 and m.id not in (PUSH_DATA, PULL_DATA, TX_ACK)\r\n ):\r\n print('Version',m.version,'ID',m.id)\r\n pass\r\n #raise UnsupportedMethod()\r\n\r\n # Decode gateway EUI and payload\r\n if m.id == PUSH_DATA:\r\n print('PUSH DATA')\r\n if len(data) < 12:\r\n raise DecodeError(\"PUSH_DATA message too short.\")\r\n m.gatewayEUI = unpack('<Q', data[4:12])[0]\r\n m.payload = data[12:]\r\n elif m.id == PULL_DATA:\r\n print('PULL DATA')\r\n if len(data) < 12:\r\n raise DecodeError(\"PULL_DATA message too short.\")\r\n m.gatewayEUI = unpack('<Q', data[4:12])[0]\r\n #print('Gateway EUI: ',m.gatewayEUI)\r\n elif m.id == TX_ACK:\r\n m.payload = data[4:]\r\n \r\n # Decode PUSH_DATA payload\r\n if m.id == PUSH_DATA:\r\n try:\r\n jdata = loads(m.payload)\r\n except ValueError:\r\n raise DecodeError(\"JSON payload decode error\")\r\n m.ptype = list(jdata.keys())[0]\r\n # Rxpk payload - one or more.\r\n \r\n \r\n if m.ptype == 'rxpk':\r\n m.rxpk = []\r\n \r\n for r in jdata['rxpk']:\r\n rx = Rxpk.decode(r)\r\n if rx is not None:\r\n m.rxpk.append(rx)\r\n if not m.rxpk:\r\n raise DecodeError(\"Rxpk payload decode error\")\r\n \r\n # Stat payload\r\n elif m.ptype == 'stat':\r\n m.stat = Stat.decode(jdata)\r\n if m.stat is None:\r\n raise DecodeError(\"Stat payload decode error\")\r\n # Unknown payload type\r\n else:\r\n raise DecodeError(\"Unknown payload type\")\r\n\r\n '''for a, v in m.__dict__.items():\r\n try:\r\n for attr, value in v.__dict__.items():\r\n print(' '+attr,value)\r\n except:\r\n print(a, v)\r\n continue\r\n ''' \r\n return m", "def decode(cls, data: bytes):\n\n raise NotImplemented()", "def decode_packet(data):\n\n opcodes = [(\"AUTH_LOGON_CHALLENGE\", \"\\x00\"), (\"AUTH_LOGON_PROOF\", \"\\x01\")]\n opcode = data[0] # Opcode of the received packet (First byte)\n if opcode == opcodes[0][1]: # Auth Logon challenge\n srp_rcvd = {\n 'error': data[1], # (you should hope that it is always 0)\n 'B': data[3:35], # Read B and skip 1 field (Length_g)\n 'g': data[36:37], # Read g and skip 1 field (Length_n)\n 'N': data[38:70],\n 's': data[70:102], # Read salt\n 'crc': data[102:] # (useless for private servers)\n }\n return srp_rcvd\n if opcode == opcodes[1][1]:\n # Auth logon proof\n if data[1] == \"\\x00\": # Code error: 0\n srp_rcvd = {'login': 1}\n else:\n srp_rcvd = {'login': 0}\n return srp_rcvd", "def get_decoder(self):\n raise NotImplementedError()", "def decode(self,buf):\n eth = dpkt.ethernet.Ethernet(buf)\n pkt_len = len(buf)\n if(eth.type== dpkt.ethernet.ETH_TYPE_IP):\n ip = eth.data\n dst_ip = socket.inet_ntoa(ip.dst)\n src_ip = socket.inet_ntoa(ip.src)\n octet_list = string.split(dst_ip,'.')\n broadcast = False\n for o in octet_list:\n if (o == \"255\"):\n broadcast = True\n break\n if((octet_list[0] == \"224\") or (octet_list[0] == \"239\")):\n broadcast = True #Its multicast actually.\n if not broadcast:\n if(ip.p == dpkt.ip.IP_PROTO_TCP):\n pass\n elif(ip.p == dpkt.ip.IP_PROTO_UDP):\n udp =ip.data\n if((udp.dport == 53) or (udp.sport == 53)): # A request. \n if(udp.dport == 53): # A request. \n return self.dns_handler.handle_dns_request(ip.src,ip.dst,ip.p,udp.sport,udp.dport,udp.data)\n if(udp.sport == 53): # A DNS response\n self.dns_handler.handle_dns_response(ip.src,ip.dst,ip.p,udp.sport,udp.dport,udp.data)\n else:\n pass", "def _decode(self):\n with tf.variable_scope('same_question_concat'):\n batch_size = tf.shape(self.start_label)[0]\n concat_passage_encodes = tf.reshape(\n self.fuse_p_encodes,\n [batch_size, -1, 2 * self.hidden_size]\n )\n no_dup_question_encodes = tf.reshape(\n self.sep_q_encodes,\n [batch_size, -1, tf.shape(self.sep_q_encodes)[1], 2 * self.hidden_size]\n )[0:, 0, 0:, 0:]\n decoder = PointerNetDecoder(self.hidden_size)\n self.start_probs, self.end_probs = decoder.decode(concat_passage_encodes,\n no_dup_question_encodes)", "def decode(self, data):\n return self.__cipher.decrypt(data)", "def _decode(self, message):\n raise NotImplementedError(\"_decode needs to be implemented in {} subclass\".format(type(self).__name__))", "def build_decoder(shift):\n ### TODO.", "def _decode_infer(self, decoder, _encoder_output, features, labels):\r\n\r\n return decoder(_encoder_output, labels)", "def decodepkt(self, pkt):\n res = \"\"\n if pkt.startswith('$'):\n try:\n self.logger.debug('unpack< %s', pkt) \n res = self.unpack(pkt)\n except ValueError as ex:\n self.logger.debug('GDB-< %s', res)\n self.logger.warning('Bad packet %s', ex) \n self.s.send(b'-')\n else:\n self.s.send(b'+')\n self.logger.debug('GDB+< %s', res) \n return res\n else:\n self.logger.warning('discards %s', pkt)", "def decode(self, z):\n result = self.decoder_input(z)\n result = result.view(-1, 512, 2, 2)\n result = self.decoder(result)\n result = self.final_layer(result)\n return result", "def decode(self, rel_codes, anchors):\n with tf.name_scope('Decode'):\n return self._decode(rel_codes, anchors)", "def _decode_frame(self):\n\n self._processed.eth_frame.log(level=logging_helper.INFO)\n\n # Parse IP packets, protocol=0x8\n if hex(self._processed.eth_frame.protocol) == u'0x8':\n self._processed.ip_frame = IPFrame(self._processed.eth_frame.payload)\n self._processed.ip_frame.log(level=logging_helper.INFO)\n\n if self._processed.ip_frame.payload is not None:\n self._processed.ip_frame.payload.log(level=logging_helper.INFO)\n\n else:\n logging.info(u'Not an IP payload')\n\n logging.info(self._processed)", "def decode_next_IE(stream):\n if len(stream) < 1:\n return None\n\n type = struct.unpack('B', stream[0:1]) \n\n if type < 128:\n if type not in ie_type_length:\n raise ValueError('Type ({}) extracted from decode stream is not defined'.str(type))\n length = ie_type_length[type]\n\n if len(stream) < length + 1:\n raise ValueError('Length of data for type ({}) should be ({}) but not enough bits left in encoded stream'.format(str(type), str(length)))\n\n return IE(type, stream[1:length+2], raw=True)\n else:\n if len(stream) < 3:\n raise ValueError('Type ({}) is TLV, but length of encoded stream is less than 3'.format(str(type)))\n\n length = struct.unpack('!I', stream[1:3])\n\n if len(stream) < length + 3:\n raise ValueError('Asserted length of next IE value in encoded stream is ({}) but length of stream ({}) is insufficient'.format(str(length), str(len(stream))))\n\n return IE(type, stream[3:length+4])", "def decode(cls, data):\n h = struct.unpack('B', data)[0]\n # Bits 7-5 define the message type\n mtype = (h & 224) >> 5\n # Bits 1-0 define the major version\n major = h & 3\n m = MACHeader(mtype, major)\n return m", "def Decodingfunc(Codebyte):\r\n Decodedint=struct.unpack('b',Codebyte)[0]\r\n N=0 #number of repetitions\r\n L=0 # length of single/multiple sequence\r\n if Decodedint >= 0: #single\r\n N = 1\r\n L = Decodedint+1\r\n else: #multiple\r\n L = -Decodedint//16+1\r\n N = -Decodedint-(L-1)*16+1\r\n #print(\"N =\",N,\" L =\",L)\r\n return (N,L)", "def decode(self, data):\n\n # Tested:\n # types: z, T, a\n # nested_structure\n # repeated\n if not hasattr(data, 'read'):\n data = io.BytesIO(data)\n\n if self._kv_fmt:\n return dict(self._decode_wire(data))\n else:\n return tuple(self._decode_wire(data))", "def get_next_decoder(current_decoder, decoder_dict):\n switch = { # Dictionary with next model calls\n 'KeywordDecoder': [(decoder_dict['ColumnDecoder'], 'ColumnDecoder')],\n 'ColumnDecoder': [(decoder_dict['OperatorDecoder'], 'OperatorDecoder')],\n 'TableDecoder': [],\n 'OperatorDecoder': [(decoder_dict['ConstantDecoder'], 'ConstantDecoder')],\n 'AggregatorDecoder': [],\n 'RootDecoder': [(decoder_dict['AggregatorDecoder'], 'AggregatorDecoder'),\n (decoder_dict['ColumnDecoder'], 'ColumnDecoder'),\n (decoder_dict['TableDecoder'], 'TableDecoder'),\n (decoder_dict['KeywordDecoder'], 'KeywordDecoder'),\n ],\n 'AndOrDecoder': [(decoder_dict['ColumnDecoder'], 'ColumnDecoder')],\n 'ConstantDecoder': [(decoder_dict['AndOrDecoder'], 'AndOrDecoder')]\n }\n\n return switch.get(current_decoder, \"None\")", "def decode(self, value):\r\n pass", "def decode(fmtstr, data):\n return Wire(fmtstr).decode(data)", "def decode(self, bytes_, errors='strict'):\n decoder = self.IncrementalDecoder(errors=errors)\n return (\n decoder.decode(bytes_, final=True),\n len(bytes_),\n )", "def decode_stream(self):\n io = self.io\n result = None\n\n while True:\n opcode = io.read(1)\n if not opcode:\n break\n else:\n opcode = ord(opcode)\n\n klass = MicroOpDecoder.opcode_to_class.get(opcode)\n yield klass.decode(io)", "def _define_decoder(self):\n self.decoder = nn.Sequential(nn.Linear(self.encoding_shape, 512, bias=False), nn.SELU(),\n nn.BatchNorm1d(512),\n nn.Linear(512, 2560, bias=False), nn.SELU(),\n nn.BatchNorm1d(2560),\n nn.Linear(2560, 5120, bias=False), nn.SELU(),\n nn.BatchNorm1d(5120),\n nn.Linear(5120, 64*64*3, bias=False), nn.ReLU(),\n View((-1, 3, 64, 64)),\n )", "def _decode_tree(self, raw_bytes, type_tree):\n try:\n type_name, subtypes = type_tree\n except ValueError:\n raise DecodeError(\"could not unpack type tree %s\" % str(type_tree))\n if type_name not in self.codecs:\n raise UnknownCodecError(type_name)\n codec = self.codecs[type_name]\n return codec.decode(raw_bytes, serialization=self, subtypes=subtypes)", "def decode(raw_bytes, *, serialization=None, subtypes=tuple()):\n raise NotImplementedError", "def decoder(self, z):\n x1 = self.dec_conv(z)\n return x1", "def decodeName(self, last=-1):\n label = []\n done = False\n while not done:\n (length,) = self.unpack(\"!B\")\n if getBits(length, 6, 2) == 3:\n # Pointer\n self.offset -= 1\n pointer = getBits(self.unpack(\"!H\")[0], 0, 14)\n save = self.offset\n if last == save:\n raise BufferError(\n \"Recursive pointer [offset=%d,pointer=%d,length=%d]\" %\n (self.offset, pointer, len(self.data))\n )\n if pointer < self.offset:\n self.offset = pointer\n else:\n # Pointer can't point forwards\n raise BufferError(\n \"Invalid pointer [offset=%d,pointer=%d,length=%d]\" %\n (self.offset, pointer, len(self.data))\n )\n label.extend(self.decodeName(save).label)\n self.offset = save\n done = True\n else:\n if length > 0:\n l = self.get(length)\n try:\n l.decode()\n except UnicodeDecodeError:\n raise BufferError(\"Invalid label <%s>\" % l)\n label.append(l)\n else:\n done = True\n return \".\".join(str(label))", "def decode(self, remove: str = None):\n\n self.create_map()\n\n # remove the trailing padding from the flattened binary tree\n if len(self.bit_string[self.bit_string_index:]) % 8 != 0:\n self.bit_string_index += len(self.bit_string[self.bit_string_index:]) % 8\n\n # extract message and write it to a file\n message = self.encode_message(remove)\n with open('decode.txt', 'w') as output:\n output.write(message)\n\n print('Message Decoded')", "def decode(self, z):\n out = self.fc_decoder(z)\n out = out.view(-1, 512, 2, 2)\n out = self.decoder(out)\n return out", "def decode_raw(data):\n return RawWire().decode(data)", "def next(self, bytes):\n ## the protocol above IP, such as ICMP, UDP, TCP, AH, ESP etc.\n if self.protocol == IPPROTO_UDP:\n return udp.udp(bytes)\n elif self.protocol == IPPROTO_TCP:\n return tcp.tcp(bytes)\n elif self.protocol == IPPROTO_AH:\n return ipsec.ah(bytes)\n elif self.protocol == IPPROTO_ESP:\n return ipsec.esp(bytes)\n elif self.protocol == IPPROTO_ICMP:\n return icmpv4.icmpv4(bytes)\n # Fall through\n return None", "def decode(self):\n s = self.encoded_content\n if self.encoded_content:\n if self.encoding:\n if self.encoding == u'base64':\n s = decode_base64(s)\n else:\n raise Exception(u'unknown data encoding %s' % (self.encoding))\n if self.compression:\n if self.compression == u'gzip':\n s = decompress_gzip(s)\n else:\n raise Exception(u'unknown data compression %s' %(self.compression))\n else:\n raise Exception(u'no encoded content to decode')\n self.decoded_content = []\n for idx in xrange(0, len(s), 4):\n val = ord(str(s[idx])) | (ord(str(s[idx + 1])) << 8) | \\\n (ord(str(s[idx + 2])) << 16) | (ord(str(s[idx + 3])) << 24)\n self.decoded_content.append(val)\n # generate the 2D version\n self._gen_2D()", "def mostlikelydecode(self):\n\n # Add your code here\n most_likely_codeword = Cipher(None) # Replace None with a method\n return most_likely_codeword.decode(None) # Replace None. What does decode take again in the Cipher class? ", "def decode(self, s):\n o = self._decoder.decode(s)\n return o", "def _decode(self, input_dict):\n pass", "def test_decode():\n decoding = d.decode()\n assert type(decoding) == list\n assert len(decoding) == 7\n assert decoding[0] == '-12;-1\\n\\nESS'\n assert decoding[-1] == '2;-2\\n\\nWSWESNESSS'\n for x in decoding:\n assert \"\\n\" in x", "def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event", "def decode(self, output, nwords, params):\n raise NotImplementedError()", "def _decode_back(self):\n with tf.variable_scope('same_question_concat'):\n batch_size = tf.shape(self.start_label)[0]\n concat_passage_encodes = tf.reshape(\n self.fuse_p_encodes,\n [batch_size, -1, self.hidden_size]\n )\n no_dup_question_encodes = tf.reshape(\n self.sep_q_encodes,\n [batch_size, -1, tf.shape(self.sep_q_encodes)[1], self.hidden_size]\n )[0:, 0, 0:, 0:]\n decoder = PointerNetDecoder(self.hidden_size)\n self.start_probs, self.end_probs = decoder.decode(concat_passage_encodes,\n no_dup_question_encodes)\n\n outer = tf.matmul(tf.expand_dims(tf.nn.softmax(self.start_probs), axis=2),\n tf.expand_dims(tf.nn.softmax(self.end_probs), axis=1))\n outer = tf.matrix_band_part(outer, 0, -1)\n self.yp1 = tf.argmax(tf.reduce_max(outer, axis=2), axis=1)\n self.yp2 = tf.argmax(tf.reduce_max(outer, axis=1), axis=1)", "def decode(self) -> None:\n self.msg_type = AISType(self.nmea.ais_id)\n self.content = decode(self.nmea)", "def decode(self, z: Any, *args, **kwargs) -> Any:\n return self.decoder(z)", "def decode(self, byteString):\n decoded = ''\n portion_left = byteString\n while len(portion_left) > 0:\n substr_len = 1\n symbol = None\n while (symbol == None) and (substr_len <= len(portion_left)):\n symbol = self.decode_symbol(portion_left[:substr_len])\n substr_len += 1\n\n if symbol == None:\n print \"decode failed:\"\n print \"decoded: \" + decoded\n print \"left: \" + portion_left\n return None\n\n decoded += symbol\n #print \"decoded: _\" + symbol + \"_\"\n portion_left = portion_left[substr_len-1:]\n\n return decoded", "def decode_proto(self):\n # Create output directory it does not exist\n if not os.path.exists(PROTO_CACHE):\n os.makedirs(PROTO_CACHE)\n\n # Compile proto (TODO: Assumes protoc is in PATH)\n cmd = \"protoc -I {} --python_out={} {}\".format(\n os.path.dirname(self.proto_file_path),\n PROTO_CACHE,\n self.proto_file_path)\n subprocess.check_call(cmd, shell=True)\n\n # Append compiled python module to Python's system path\n sys.path.insert(0, PROTO_CACHE)\n globals()[\"ProtoDefinition\"] = __import__(\"u_s_s_r_proto_pb2\")", "def packet_decoder(packet_type,string):\n dct = json.loads(string)\n if packet_type == HS_Version:\n return HS_Version(dct['version'])\n if packet_type == HS_Options:\n return HS_Options(minport=dct['minport'], maxport=dct['maxport'],\n portusage=dct['portusage'], protocol=dct['protocol'],\n timeout=dct['timeout'], payload=dct['payload'],\n key=dct['key'])\n if packet_type == Data:\n return Data(data=dct['data'], terminate=int(dct['terminate']))\n if packet_type == Management:\n return Management(dct['command'],location=dct['location'])\n if packet_type == Switching:\n return Switching(dct['status'])\n if packet_type == Error:\n return Error()", "def decode_message(self, message):\n\n message[\"pl\"] = json.loads(message[\"pl\"])\n if message[\"pl\"][\"~c\"] != \"0\":\n decoded = base64.b64decode(message[\"pl\"][\"pl\"])\n decoded = zlib.decompress(decoded)\n message[\"pl\"][\"pl\"] = json.loads(decoded)\n return message", "def decode(type_info: CLType, as_bytes: typing.List[int]) -> typing.List[int]:\n if isinstance(type_info, CLType_Simple):\n entity = _SIMPLE_TYPE_DECODERS[type_info.typeof](as_bytes)\n elif isinstance(type_info, CLType_ByteArray):\n entity = decode_byte_array(as_bytes)\n elif isinstance(type_info, CLType_Option):\n entity = decode_option(as_bytes, type_info.inner_type)\n else:\n entity = None\n\n return entity", "def get_decoding_op(self):\n return self._dual.get_op('output')", "def decode_network_packet(buf):\n off = 0\n blen = len(buf)\n\n while off < blen:\n ptype, plen = header.unpack_from(buf, off)\n\n if plen > blen - off:\n raise ValueError(\"Packet longer than amount of data in buffer\")\n\n if ptype not in _decoders:\n raise ValueError(\"Message type %i not recognized\" % ptype)\n\n yield ptype, _decoders[ptype](ptype, plen, buf[off:])\n off += plen", "def decode(self, z):\n l1 = self.fc3(z)\n l1 = l1.unsqueeze(0).unsqueeze(0).unsqueeze(0).permute(0,3,1,2)\n h1 = F.relu(self.deconv1(l1))\n h2 = F.relu(self.deconv2(h1))\n h3 = F.relu(self.deconv3(h2))\n return torch.sigmoid(self.deconv4(h3))", "def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 16\n (_x.FL_vel, _x.FR_vel, _x.BL_vel, _x.BR_vel,) = _struct_4i.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill", "def decode(self):\n if self._decoded:\n return self._decoded_data\n # Need to use self._filter_key because, for some reason beyond my\n # grasp, the key changes when the stream data is external\n # Also, since these may be lists, let's make that happen\n filters = ensure_list(self._header.get(self._filter_key, []))\n params = ensure_list(self._header.get(self._params_key, []))\n if not params:\n params = [{} for f in filters]\n composed_filters = chain_funcs((partial(StreamFilter[f].decode, **p)\n for f, p in zip(filters, params)))\n decoded_data = composed_filters(self._data)\n self._decoded = True\n self._decoded_data = decoded_data\n return self._decoded_data", "def decode_message(buf, typedef=None, pos=0, end=None, group=False, depth=0, path=None):\n if end is None:\n end = len(buf)\n\n if typedef is None:\n typedef = {}\n else:\n # Don't want to accidentally modify the original\n typedef = copy.deepcopy(typedef)\n\n if path is None:\n path = []\n\n output = {}\n\n while pos < end:\n # Read in a field\n try:\n if six.PY2:\n tag, pos = decoder._DecodeVarint(str(buf), pos)\n else:\n tag, pos = decoder._DecodeVarint(buf, pos)\n except (IndexError, decoder._DecodeError) as exc:\n six.raise_from(DecoderException(\n \"Error decoding length from buffer: %r...\" %\n (binascii.hexlify(buf[pos : pos+8]))), exc)\n\n field_number, wire_type = wire_format.UnpackTag(tag)\n\n # Convert to str\n field_number = str(field_number)\n orig_field_number = field_number\n\n field_path = path[:]\n field_path.append(field_number)\n\n if wire_type not in blackboxprotobuf.lib.types.wire_type_defaults:\n raise DecoderException('%d is not a valid wire type at pos %d.' % (wire_type, pos), field_path)\n\n field_typedef = None\n if field_number in typedef:\n field_typedef = typedef[field_number]\n else:\n field_typedef = {}\n field_typedef['type'] = blackboxprotobuf.lib.types.wire_type_defaults[wire_type]\n\n field_type = field_typedef['type']\n\n # If field_type is None, its either an unsupported wire type, length delim or group\n # length delim we have to try and decode first\n field_out = None\n if field_type is None:\n if wire_type == wire_format.WIRETYPE_LENGTH_DELIMITED:\n out, field_type = decode_guess(buf, pos, depth=depth, path=field_path)\n if field_type == 'message':\n field_out, message_typedef, pos = out\n field_typedef['message_typedef'] = message_typedef\n else:\n field_out, pos = out\n elif wire_type == wire_format.WIRETYPE_END_GROUP:\n # TODO Should probably match the field_number to START_GROUP\n if not group:\n raise DecoderException( \"Found END_GROUP before START_GROUP\", field_path)\n # exit out\n return output, typedef, pos\n else:\n raise DecoderException(\"Could not find default type for wiretype: %d\" % wire_type, field_path)\n else:\n if field_type == 'message':\n #TODO probably big enough to factor out\n message_typedef = None\n # Check for a anonymous type\n if 'message_typedef' in field_typedef:\n message_typedef = field_typedef['message_typedef']\n # Check for type defined by message type name\n elif 'message_type_name' in field_typedef:\n message_typedef = blackboxprotobuf.lib.known_messages[\n field_typedef['message_type_name']]\n\n try:\n field_out, message_typedef, pos = decode_lendelim_message(\n buf, message_typedef, pos, path=field_path)\n # Save type definition\n field_typedef['message_typedef'] = message_typedef\n except DecoderException as exc:\n # If this is the root message just fail\n if pos == 0:\n six.reraise(*sys.exc_info())\n logging.debug(\n (\"Encountered exception when decoding message at %s \"\n \"with known typdef. Trying alt typedefs and then \"\n \"anonymous. Exception: \\n%s\"),\n \"->\".join(map(str, field_path)), str(exc))\n\n if field_out is None and 'alt_typedefs' in field_typedef:\n # check for an alternative type definition\n for alt_field_number, alt_typedef in field_typedef['alt_typedefs'].items():\n try:\n field_out, message_typedef, pos = decode_lendelim_message(\n buf, alt_typedef, pos, path=field_path)\n except DecoderException as exc:\n logging.debug(\n (\"Encountered exception when decoding message at %s with alt_typedef %s. Trying anonymous decoding next. Exception:\\n%s\"),\n \"->\".join(map(str, field_path)),\n str(alt_field_number),\n str(exc))\n\n if field_out is not None:\n # Found working typedef\n field_typedef['alt_typedefs'][alt_field_number] = message_typedef\n field_number = field_number + \"-\" + alt_field_number\n break\n\n if field_out is None:\n # Still no typedef, try anonymous, and let the error propogate if it fails\n field_out, message_typedef, pos = \\\n decode_lendelim_message(buf, {}, pos, path=field_path)\n\n if 'alt_typedefs' in field_typedef:\n # get the next higher alt field number\n alt_field_number = str(\n max(map(int, field_typedef['alt_typedefs'].keys()))\n + 1)\n else:\n field_typedef['alt_typedefs'] = {}\n alt_field_number = '1'\n\n field_typedef['alt_typedefs'][alt_field_number] = message_typedef\n field_number = field_number + \"-\" + alt_field_number\n elif field_type == 'group':\n group_typedef = None\n # Check for a anonymous type\n if 'group_typedef' in field_typedef:\n group_typedef = field_typedef['group_typedef']\n field_out, group_typedef, pos = \\\n decode_group(buf, group_typedef, pos, depth=depth, path=field_path)\n # Save type definition\n field_typedef['group_typedef'] = group_typedef\n else:\n # Verify wiretype matches\n if blackboxprotobuf.lib.types.wiretypes[field_type] != wire_type:\n raise DecoderException(\n \"Invalid wiretype for field number %s. %s is not wiretype %s\"\n % (field_number, field_type, wire_type), field_path)\n\n # Simple type, just look up the decoder\n try:\n field_out, pos = blackboxprotobuf.lib.types.decoders[field_type](buf, pos)\n except DecoderException as exc:\n exc.set_path(field_path)\n six.reraise(*sys.exc_info())\n field_typedef['type'] = field_type\n if 'name' not in field_typedef:\n field_typedef['name'] = ''\n\n field_key = field_number\n if '-' not in field_number and 'name' in field_typedef and field_typedef['name'] != '':\n field_key = field_typedef['name']\n # Deal with repeats\n if field_key in output:\n if isinstance(field_out, list):\n if isinstance(output[field_key], list):\n output[field_key] += field_out\n else:\n output[field_key] = field_out.append(output[field_key])\n else:\n if isinstance(output[field_key], list):\n output[field_key].append(field_out)\n else:\n output[field_key] = [output[field_key], field_out]\n else:\n output[field_key] = field_out\n typedef[orig_field_number] = field_typedef\n if pos > end:\n raise DecoderException(\n \"Field sizes are greater than designated length. pos: %d end_pos: %d\" % (pos, end))\n # Should never hit here as a group\n if group:\n raise DecoderException(\"Got START_GROUP with no END_GROUP.\")\n return output, typedef, pos", "def decode(self, code):\n genotype = self.calc_genotype(code)\n cfg_result = copy.deepcopy(self.darts_cfg)\n cfg_result.super_network.normal.genotype = genotype[0]\n cfg_result.super_network.reduce.genotype = genotype[1]\n cfg_result.super_network.search = False\n cfg_result.super_network.auxiliary = True\n # TODO: need to remove\n cfg_result.super_network[\"aux_size\"] = 8\n cfg_result.super_network[\"auxiliary_layer\"] = 13\n cfg_result.super_network.network = [\"PreOneStem\", \"normal\", \"normal\", \"normal\", \"normal\",\n \"normal\", \"normal\", \"reduce\", \"normal\", \"normal\", \"normal\", \"normal\",\n \"normal\", \"normal\",\n \"reduce\", \"normal\", \"normal\", \"normal\", \"normal\", \"normal\", \"normal\"]\n return cfg_result", "def decode(self, tgt, encoder_out):\n tgt_mask = get_lookahead_mask(tgt)\n tgt = self.emb(tgt)\n tgt = self.phn_lin(tgt)\n if self.attention_type == \"RelPosMHAXL\":\n # we use fixed positional encodings in the decoder\n tgt = tgt + self.positional_encoding_decoder(tgt)\n encoder_out = encoder_out + self.positional_encoding_decoder(\n encoder_out\n )\n elif self.positional_encoding_type == \"fixed_abs_sine\":\n tgt = tgt + self.positional_encoding(tgt) # add the encodings here\n prediction, self_attns, multihead_attns = self.decoder(\n tgt,\n encoder_out,\n tgt_mask=tgt_mask,\n pos_embs_tgt=None,\n pos_embs_src=None,\n )\n attention = multihead_attns[-1]\n return prediction, attention" ]
[ "0.75996035", "0.6549479", "0.65207684", "0.6393973", "0.6335503", "0.6324604", "0.6315522", "0.62680393", "0.62302494", "0.6203099", "0.61799216", "0.6160706", "0.6160706", "0.6118852", "0.6044695", "0.6001132", "0.5993506", "0.5988984", "0.59845906", "0.59568155", "0.5906239", "0.5903562", "0.5901664", "0.58828473", "0.5867668", "0.5863176", "0.58596295", "0.583821", "0.583821", "0.58307487", "0.5777897", "0.5772923", "0.5771005", "0.57525873", "0.57521784", "0.57443655", "0.5733613", "0.5731798", "0.57191706", "0.5676054", "0.56750673", "0.56701034", "0.56654096", "0.56592834", "0.5657787", "0.5653931", "0.5639652", "0.5619153", "0.561164", "0.5594201", "0.55752283", "0.5574911", "0.5574898", "0.5573578", "0.55349183", "0.5529259", "0.5519681", "0.5514342", "0.55024874", "0.54813963", "0.54730123", "0.54694384", "0.54590875", "0.5458312", "0.54525375", "0.5433127", "0.54254967", "0.5422195", "0.540783", "0.540549", "0.540254", "0.53969693", "0.53893274", "0.53879076", "0.5382651", "0.5371207", "0.536641", "0.5362138", "0.5351243", "0.5343234", "0.53387797", "0.5331377", "0.5323326", "0.5323106", "0.53191733", "0.5317023", "0.5316178", "0.52814764", "0.5279521", "0.52731514", "0.52653605", "0.5258759", "0.5247549", "0.52400076", "0.5239138", "0.52120656", "0.5210966", "0.5205614", "0.5198527", "0.51860315" ]
0.67941993
1
Import next layer extractor.
def _import_next_layer(self, *args, **kwargs): # pylint: disable=signature-differs raise UnsupportedCall(f"'{self.__class__.__name__}' object has no attribute '_import_next_layer'")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _import_next_layer(self, file_, length):\n if self._prot == 'Ethernet':\n from .link import Ethernet as Protocol\n elif self._prot == 'IPv4':\n from .internet import IPv4 as Protocol\n elif self._prot == 'IPv6':\n from .internet import IPv6 as Protocol\n else:\n data = file_.read(*[length]) or None\n return data, None\n next_ = Protocol(file_, length)\n return next_.info, next_.protochain", "def set_next_layer(self, layer):\n self._next_layer = layer", "def extractor(self):\n \n if self._extractor is None:\n if self.extractor_type == '29v2':\n self._extractor = self.build_extractor_29layers_v2(name='extract29v2', block=self._res_block, layers=[1, 2, 3, 4])\n elif self.extractor_type == '29':\n self._extractor = self.build_extractor_29layers(name='extract29', block=self._res_block, layers=[1, 2, 3, 4])\n elif self.extractor_type == '9':\n self._extractor = self.build_extractor_9layers(name='extract9')\n \n if self.extractor_weights is not None:\n self._extractor.load_weights(self.extractor_weights)\n \n return self._extractor", "def handle(self, configuration_options=[{'index': 0}], *args, **kwargs):\n layers = self.import_file(configuration_options=configuration_options)\n\n for layer, config in layers:\n config['handler_results'] = self.run_import_handlers(layer, config)\n\n return layers", "def _imported_functions(self):\n\n i = 0\n while 1:\n thunk = obj.Object('_IMAGE_THUNK_DATA',\n offset = self.obj_parent.DllBase + self.OriginalFirstThunk +\n i * self.obj_vm.profile.get_obj_size('_IMAGE_THUNK_DATA'),\n vm = self.obj_native_vm)\n\n # We've reached the end when the element is zero \n if thunk == None or thunk.AddressOfData == 0:\n break\n\n o = obj.NoneObject(\"Ordinal not accessible?\")\n n = obj.NoneObject(\"Imported by ordinal?\")\n f = obj.NoneObject(\"FirstThunk not accessible\")\n\n # If the highest bit (32 for x86 and 64 for x64) is set, the function is \n # imported by ordinal and the lowest 16-bits contain the ordinal value. \n # Otherwise, the lowest bits (0-31 for x86 and 0-63 for x64) contain an \n # RVA to an _IMAGE_IMPORT_BY_NAME struct. \n if thunk.OrdinalBit == 1:\n o = thunk.Ordinal & 0xFFFF\n else:\n iibn = obj.Object(\"_IMAGE_IMPORT_BY_NAME\",\n offset = self.obj_parent.DllBase +\n thunk.AddressOfData,\n vm = self.obj_native_vm)\n o = iibn.Hint\n n = iibn.Name\n\n # See if the import is bound (i.e. resolved)\n first_thunk = obj.Object('_IMAGE_THUNK_DATA',\n offset = self.obj_parent.DllBase + self.FirstThunk +\n i * self.obj_vm.profile.get_obj_size('_IMAGE_THUNK_DATA'),\n vm = self.obj_native_vm)\n if first_thunk:\n f = first_thunk.Function.v()\n\n yield o, f, str(n or '')\n i += 1", "def import_file(self, *args, **kwargs):\n filename = self.file\n self.completed_layers = []\n err = GdalErrorHandler()\n gdal.PushErrorHandler(err.handler)\n gdal.UseExceptions()\n configuration_options = kwargs.get('configuration_options', [{'index': 0}])\n\n # Configuration options should be a list at this point since the importer can process multiple layers in a\n # single import\n if isinstance(configuration_options, dict):\n configuration_options = [configuration_options]\n\n data, inspector = self.open_source_datastore(filename, *args, **kwargs)\n\n datastore_layers = inspector.describe_fields()\n\n if len(datastore_layers) == 0:\n logger.debug('No Dataset found')\n\n layers_info = []\n\n # Add index for any layers configured by name\n for layer_configuration in configuration_options:\n if 'layer_name' in layer_configuration:\n lookup = 'layer_name'\n elif 'index' in layer_configuration:\n lookup = 'index'\n else:\n lookup = None\n logger.debug('could not find lookup')\n continue\n\n for datastore_layer in datastore_layers:\n if datastore_layer.get(lookup) == layer_configuration.get(lookup):\n layer_configuration.update(datastore_layer)\n layers_info.append(layer_configuration)\n\n for layer_options in layers_info:\n if layer_options['raster']:\n \"\"\"\n File is a raster, we need to convert into optimized GeoTiff\n and skip any further testing or loading into target_store\n \"\"\"\n # Increment filename to make sure target doesn't exists\n filedir, filebase = os.path.split(filename)\n outfile = '%s.tif' % os.path.splitext(filebase)[0]\n fileout = increment_filename(os.path.join(RASTER_FILES, outfile))\n raster_import(layer_options['path'], fileout)\n self.completed_layers.append([fileout, layer_options])\n else:\n target_file, _ = self.open_target_datastore(self.target_store)\n target_create_options = []\n\n # Prevent numeric field overflow for shapefiles https://trac.osgeo.org/gdal/ticket/5241\n if target_file.GetDriver().GetName() == 'PostgreSQL':\n target_create_options.append('PRECISION=NO')\n\n layer_options['modified_fields'] = {}\n layer = data.GetLayer(layer_options.get('index'))\n layer_name = layer_options.get('name', layer.GetName().lower())\n layer_type = self.get_layer_type(layer, data)\n srs = layer.GetSpatialRef()\n\n if layer_name.lower() == 'ogrgeojson':\n try:\n layer_name = os.path.splitext(os.path.basename(filename))[0].lower()\n except IndexError:\n pass\n\n layer_name = launder(str(layer_name))\n\n # default the layer to 4326 if a spatial reference is not provided\n if not srs:\n srs = osr.SpatialReference()\n srs.ImportFromEPSG(4326)\n\n # pass the srs authority code to handlers\n if srs.AutoIdentifyEPSG() == 0:\n layer_options['srs'] = '{0}:{1}'.format(srs.GetAuthorityName(None), srs.GetAuthorityCode(None))\n\n n = 0\n while True:\n n += 1\n try:\n target_layer = self.create_target_dataset(target_file, layer_name, srs, layer_type,\n options=target_create_options)\n except RuntimeError as e:\n # logger.exception('exception in creating target dataset')\n # the layer already exists in the target store, increment the name\n if 'Use the layer creation option OVERWRITE=YES to replace it.' in e.message:\n layer_name = increment(layer_name)\n\n # try 100 times to increment then break\n if n >= 100:\n break\n\n continue\n else:\n raise e\n break\n\n # adding fields to new layer\n layer_definition = ogr.Feature(layer.GetLayerDefn())\n source_fid = None\n\n wkb_field = 0\n\n for i in range(layer_definition.GetFieldCount()):\n\n field_def = layer_definition.GetFieldDefnRef(i)\n\n if field_def.GetName() == target_layer.GetFIDColumn() and field_def.GetType() != 0:\n field_def.SetType(0)\n\n if field_def.GetName() != 'wkb_geometry':\n target_layer.CreateField(field_def)\n new_name = target_layer.GetLayerDefn().GetFieldDefn(i - wkb_field).GetName()\n old_name = field_def.GetName()\n\n if new_name != old_name:\n layer_options['modified_fields'][old_name] = new_name\n\n if old_name == target_layer.GetFIDColumn() and not layer.GetFIDColumn():\n source_fid = i\n else:\n wkb_field = 1\n\n if wkb_field is not 0:\n layer.SetIgnoredFields(['wkb_geometry'])\n\n for i in range(0, layer.GetFeatureCount()):\n feature = layer.GetFeature(i)\n\n if feature and feature.geometry():\n\n if not layer.GetFIDColumn():\n feature.SetFID(-1)\n\n if feature.geometry().GetGeometryType() != target_layer.GetGeomType() and \\\n target_layer.GetGeomType() in range(4, 7):\n\n conversion_function = ogr.ForceToMultiPolygon\n\n if target_layer.GetGeomType() == 5:\n conversion_function = ogr.ForceToMultiLineString\n\n elif target_layer.GetGeomType() == 4:\n conversion_function = ogr.ForceToMultiPoint\n\n geom = ogr.CreateGeometryFromWkb(feature.geometry().ExportToWkb())\n feature.SetGeometry(conversion_function(geom))\n\n if source_fid is not None:\n feature.SetFID(feature.GetField(source_fid))\n\n try:\n target_layer.CreateFeature(feature)\n\n except:\n for field in range(0, feature.GetFieldCount()):\n if feature.GetFieldType(field) == ogr.OFTString:\n try:\n feature.GetField(field).decode('utf8')\n except UnicodeDecodeError:\n feature.SetField(field, decode(feature.GetField(field)))\n except AttributeError:\n continue\n try:\n target_layer.CreateFeature(feature)\n except err as e:\n logger.error('Create feature failed: {0}'.format(gdal.GetLastErrorMsg()))\n raise e\n\n self.completed_layers.append([target_layer.GetName(), layer_options])\n\n return self.completed_layers", "def add_layer(self, layer):\n assert isinstance(layer, torch.nn.Module)\n setattr(self, 'layer'+str(self._layer_counter), layer)\n self._layer_counter += 1\n # layer indexing : layer 0 is closest to input", "def importer():\n pass", "def zoo_import(name, head=''):\n net = gz.get_model(name, pretrained=True)\n export_block(head + name, net, preprocess=True)", "def import_ops(self):\n if self.is_training:\n self.lr = tf.get_collection_ref(\"lr\")[0]\n self.new_lr = tf.get_collection_ref(\"new_lr\")[0]\n self.lr_update = tf.get_collection_ref(\"lr_update\")[0]\n\n self.cost = tf.get_collection_ref(util.with_prefix(self.name, \"cost\"))[0]\n self.initial_state = util.import_state_tuples(\n self.initial_state, self.initial_state_name, self.name)\n self.final_state = util.import_state_tuples(\n self.final_state, self.final_state_name, self.name)", "def get_feature_extractor():\n net = alexnet(pretrained=False)\n net.load_state_dict(model_zoo.load_url(model_urls['alexnet'], \n model_dir=model_urls['local']))\n\n feature_extractor = nn.Sequential(*list(net.classifier.children())[:-1])\n net.classifier = feature_extractor\n net.eval()\n return net", "def _init_layers(self):\n self._init_predictor()\n if self.use_edge_fusion:\n self._init_edge_module()", "def add_layers(self, layers):\n\n existing_layers = self.layers\n assert len(existing_layers) > 0\n for layer in layers:\n assert layer.get_mlp() is None\n layer.set_mlp(self)\n layer.set_input_space(existing_layers[-1].get_output_space())\n existing_layers.append(layer)\n assert layer.layer_name not in self.layer_names\n self.layer_names.add(layer.layer_name)", "def import_forward(self):\n self.import_property('OG')\n self.import_property('IBU')\n self.import_property('ABV')\n self.import_property('SRM')", "def add_layer(self, full_path, delimiter=\"::\"):\n if self.find_layer_from_fullpath(full_path):\n return self.find_layer_from_fullpath(full_path)\n else:\n # Cumulative List Split\n # Using accumulate() + join()\n temp = full_path.split(delimiter)\n res = list(accumulate(temp, lambda x, y: delimiter.join([x, y])))\n parent_layer = Layer()\n for part in res:\n if self.find_layer_from_fullpath(part):\n parent_layer = self.find_layer_from_fullpath(part)\n continue\n else:\n *parent_name, name = part.split(delimiter)\n _layer = Layer() # Create Layer\n _layer.Name = name # Set Layer Name\n if parent_layer:\n _layer.ParentLayerId = parent_layer.Id # Set parent Id\n self._file3dm.Layers.Add(_layer) # Add Layer\n _layer = self._file3dm.Layers.FindName(name, parent_layer.Id)\n\n # set parent layer to this layer (for next iter)\n parent_layer = _layer\n # Sets Layer as class attr\n setattr(UmiLayers, _layer.FullPath, _layer)\n return _layer", "def set_next(self, next_layer):\n self.next_layer = next_layer", "def layer_from_name(layer_name):\n if layer_name in _layer_name_cache:\n return _layer_name_cache[layer_name]\n layer_names = layer_name.split('.')\n layer_module, module_layer_name = layer_names[:-1], layer_names[-1]\n module_name = '.'.join(layer_module)\n module = import_name(module_name)\n try:\n return getattr(module, module_layer_name)\n except AttributeError:\n # the default error is very uninformative:\n # AttributeError: 'module' object has no attribute 'DemoLayer'\n # it doesn't say *which* module\n raise AttributeError('module %r has no attribute %r'\n % (module_name, module_layer_name))", "def _decode_next_layer(self, *args, **kwargs): # pylint: disable=signature-differs\n raise UnsupportedCall(f\"'{self.__class__.__name__}' object has no attribute '_decode_next_layer'\")", "def _decode_next_layer(self, dict_, length=None):\n # make next layer protocol name\n proto = str(self._prot or 'Raw').lower()\n\n # make BytesIO from frame package data\n bytes_ = io.BytesIO(self._file.read(dict_['len']))\n info, protochain = self._import_next_layer(bytes_, length)\n\n # write info and protocol chain into dict\n self._protos = ProtoChain(self._prot, protochain)\n dict_[proto] = info\n dict_['protocols'] = self._protos.chain\n return dict_", "def _analyse_stmt_Import(self, statement: ast.Import, *, next: CFNode) -> CFNode:\n return self._ast_node(statement, next=next, error=self._raise)", "def connect_layers(self):\n if not self.check():\n msg = \"Failed to check neural network.\"\n print(msg)\n logging.error(msg)\n return\n\n # 1. set input layer\n pre_layer = self.input_layer\n for layer in self.hidden_layers:\n layer.set_input_layer(pre_layer)\n pre_layer = layer\n self.output_layer.set_input_layer(pre_layer)\n\n # 2. set output layer\n next_layer = self.output_layer\n for layer in reversed(self.hidden_layers):\n layer.set_next_layer(next_layer)\n next_layer = layer\n self.input_layer.set_next_layer(next_layer)\n\n # 3. call layer init\n self.input_layer.init()\n for layer in self.hidden_layers:\n layer.init()\n self.output_layer.init()\n\n return", "def run(layers):", "def forward(self, curr_layer):\n if self.cin == self.cout and self.stride == 1:\n return self.path(curr_layer) + curr_layer\n else:\n return self.path(curr_layer)", "def forward(self, curr_layer):\n if self.cin == self.cout and self.stride == 1:\n return self.path(curr_layer) + curr_layer\n else:\n return self.path(curr_layer)", "def forward(self, curr_layer):\n if self.cin == self.cout and self.stride == 1:\n return self.path(curr_layer) + curr_layer\n else:\n return self.path(curr_layer)", "def infer_layer(f):\n return layer_host", "def import_module(self, location, name):", "async def async_step_import(self, import_data: dict[str, str]) -> FlowResult:\n import_source = import_data.pop(\"import_source\")\n if import_source == \"geography_by_coords\":\n return await self.async_step_geography_by_coords(import_data)\n return await self.async_step_geography_by_name(import_data)", "def _post_install(dir_):\n scapy_locations = get_scapy_locations(get_site_packages())\n for scapy_location in scapy_locations:\n scapy_config = os.path.join(scapy_location, \"config.py\")\n processing_layer_list = False\n for line in fileinput.input(scapy_config, inplace=1, backup=\".bak\"):\n if line.strip().startswith(\"load_layers\"):\n print(line, end=\"\")\n processing_layer_list = True\n else:\n if processing_layer_list and line.strip().endswith(\"]\"):\n # TODO, consider single quote strings, and consider lonely\n # ] characters\n last_quote = line.rfind(\"\\\"\")\n if last_quote > 0 and \"http2\" not in line:\n print(\"%s, \\\"http2\\\" ]\" % line[\n :last_quote + 1], end=\"\")\n processing_layer_list = False\n else:\n print(line)\n processing_layer_list = False\n else:\n print(line, end=\"\")", "def forward(self, input):\n return self.layers(input)", "def build_layers(self):\n raise NotImplementedError", "def add_layer(self, *args):\n\n nm = None\n\n #check to see if we're sending an already formed layer to add - used for data file\n if len(args) == 1 & isinstance(args[0], QgsVectorLayer):\n print('Importing {} as a vector'.format(args[0]))\n self.project.addMapLayer(args[0])\n nm = args[0].name()\n\n elif len(args) > 1:\n print('Importing {} as a vector'.format(args[0]))\n print(args)\n self.project.addMapLayer(QgsVectorLayer(*args))\n nm = args[1]\n\n if nm:\n self.get_layer(nm)\n\n else:\n print()\n print('***Bad map layer for {}***'.format(str(args)))\n print()", "def forward(self, x):\n sources = list()\n new_sources = list()\n\n # apply lds to the initial image\n x_pool = self.lds(x)\n\n # apply vgg up to conv4_3\n for k in range(22):\n x = self.features[k](x)\n conv4_3_bn = self.ibn1(x)\n x_pool1_skip, x_pool1_icn = self.icn1(x_pool)\n s = self.Norm1(conv4_3_bn * x_pool1_icn)\n\n # apply vgg up to fc7\n for k in range(22, 34):\n x = self.features[k](x)\n conv7_bn = self.ibn2(x)\n x_pool2_skip, x_pool2_icn = self.icn2(x_pool1_skip)\n p = self.Norm2(self.dsc1(s) + conv7_bn * x_pool2_icn)\n\n x = self.features[34](x)\n\n # apply extra layers and cache source layer outputs\n for k, v in enumerate(self.extra):\n x = v(x)\n if k == 0:\n x_pool3_skip, x_pool3_icn = self.icn3(x_pool2_skip)\n w = self.Norm3(self.dsc2(p) + x * x_pool3_icn)\n elif k == 2:\n x_pool4_skip, x_pool4_icn = self.icn4(x_pool3_skip)\n q = self.Norm4(self.dsc3(w) + x * x_pool4_icn)\n elif k == 4:\n o = self.Norm5(self.dsc4(q) + x)\n sources.append(o)\n elif k == 7 or k == 9:\n sources.append(x)\n else:\n pass\n\n # project the forward features into lower dimension.\n tmp1 = self.proj1(p)\n tmp2 = self.proj2(w)\n tmp3 = self.proj3(q)\n tmp4 = self.proj4(o)\n\n # The conv4_3 level\n proj1 = F.upsample(tmp1, scale_factor=2, mode='bilinear')\n proj2 = F.upsample(tmp2, scale_factor=4, mode='bilinear')\n proj3 = F.upsample(tmp3, scale_factor=8, mode='bilinear')\n proj4 = F.upsample(tmp4, scale_factor=16, mode='bilinear')\n proj = torch.cat([proj1, proj2, proj3, proj4], dim=1)\n\n agent1 = self.agent1(s)\n\n convert1 = self.convert1(proj)\n pred1 = torch.cat([agent1, convert1], dim=1)\n pred1 = self.merge1(pred1)\n new_sources.append(pred1)\n\n # The fc_7 level\n proj2 = F.upsample(tmp2, scale_factor=2, mode='bilinear')\n proj3 = F.upsample(tmp3, scale_factor=4, mode='bilinear')\n proj4 = F.upsample(tmp4, scale_factor=8, mode='bilinear')\n proj = torch.cat([proj2, proj3, proj4], dim=1)\n\n agent2 = self.agent2(p)\n convert2 = self.convert2(proj)\n pred2 = torch.cat([agent2, convert2], dim=1)\n pred2 = self.merge2(pred2)\n new_sources.append(pred2)\n\n # The conv8 level\n proj3 = F.upsample(tmp3, scale_factor=2, mode='bilinear')\n proj4 = F.upsample(tmp4, scale_factor=4, mode='bilinear')\n proj = torch.cat([proj3, proj4], dim=1)\n\n agent3 = self.agent3(w)\n convert3 = self.convert3(proj)\n pred3 = torch.cat([agent3, convert3], dim=1)\n pred3 = self.merge3(pred3)\n new_sources.append(pred3)\n\n # The conv9 level\n proj4 = F.upsample(tmp4, scale_factor=2, mode='bilinear')\n proj = proj4\n\n agent4 = self.agent4(q)\n convert4 = self.convert4(proj)\n pred4 = torch.cat([agent4, convert4], dim=1)\n pred4 = self.merge4(pred4)\n new_sources.append(pred4)\n\n for prediction in sources:\n new_sources.append(prediction)\n\n return new_sources", "def step(self):\n for layer in self.layers:\n layer.step()", "def compile(self):\n for layer in self.layers:\n layer._Dense__load()", "def append_import_thunk_to_next_of_descriptor(self, import_thunk,\n descriptor):\n descriptor_index = self.import_structures.index(descriptor)\n self.import_structures.insert(descriptor_index + 1, import_thunk)", "def initialize_layers(self, layers_config: dict, inputs=None):\n layers_config = layers_config.copy()\n input_lyrs = []\n initiated_layers = OrderedDict()\n wrp_layer = None # indicator for wrapper layers\n first_layer = True\n\n for lyr, lyr_args in layers_config.items():\n\n lyr_config, lyr_inputs, named_outs, call_args = self.deconstruct_lyr_args(lyr, lyr_args)\n\n lyr_name, args, lyr_config, activation = self.check_lyr_config(lyr, lyr_config)\n\n if K.BACKEND == 'pytorch':\n\n if first_layer:\n first_layer = False\n\n if callable(lyr_config):\n lyr_initiated = lyr_config\n else:\n lyr_initiated = TORCH_LAYERS[lyr_name](**lyr_config)\n setattr(self, lyr, lyr_initiated)\n initiated_layers[lyr] = {\"layer\": lyr_initiated, \"named_outs\": named_outs, 'call_args': call_args,\n 'inputs': lyr_inputs}\n\n else:\n # may be user has defined layers without input layer, in this case add Input layer as first layer\n if first_layer:\n if inputs is not None: # This method was called by providing it inputs.\n assert isinstance(inputs, tf.Tensor)\n # since inputs have been defined, all the layers that will be added will be next to first layer\n first_layer = False\n layer_outputs = inputs\n initiated_layers[layer_outputs.name] = {'layer': layer_outputs, 'tf_name': lyr_name}\n\n elif lyr_name != \"Input\":\n if 'input_shape' in lyr_config: # input_shape is given in the first layer so make input layer\n initialized_layer = LAYERS[\"Input\"](shape=lyr_config['input_shape'])\n else:\n # for simple dense layer based models, lookback will not be used\n def_shape = (self.num_ins,) if self.lookback == 1 else (self.lookback, self.num_ins)\n initialized_layer = LAYERS[\"Input\"](shape=def_shape)\n\n # first layer is built so next iterations will not be for first layer\n first_layer = False\n # put the first layer in memory to be used for model compilation\n # add th layer which the user had specified as first layer\n initiated_layers[initialized_layer.name] = {'layer': initialized_layer,\n 'tf_name': lyr_name}\n input_lyrs.append(initialized_layer)\n\n # The inputs to the layer have not been specified, so either it is an Input layer\n if lyr_inputs is None:\n # or it uses the previous outputs as inputs\n if lyr_name == \"Input\":\n # it is an Input layer, hence should not be called\n initialized_layer = LAYERS[lyr_name](*args, **lyr_config)\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'tf_name': lyr_name}\n input_lyrs.append(initialized_layer)\n else:\n # it is executable and uses previous outputs as inputs\n if lyr_name in ACTIVATION_LAYERS:\n layer_outputs = ACTIVATION_LAYERS[lyr_name](name=lyr_config['name'])\n initiated_layers[lyr_config['name']] = {'layer': layer_outputs,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n elif lyr_name in ['TimeDistributed', 'Bidirectional']:\n wrp_layer = LAYERS[lyr_name]\n # because wrapper layer name is property\n initiated_layers[lyr_config['name']] = {'layer': wrp_layer,\n 'tf_name': lyr_name}\n continue\n elif \"LAMBDA\" in lyr_name.upper():\n # lyr_config is serialized lambda layer, which needs to be deserialized\n initialized_layer = tf.keras.layers.deserialize(lyr_config)\n # layers_config['lambda']['config'] still contails lambda, so we need to replace the python\n # object (lambda) with the serialized version (lyr_config) so that it can be saved as json file.\n layers_config[lyr]['config'] = lyr_config\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n else:\n if wrp_layer is not None:\n initialized_layer = wrp_layer(LAYERS[lyr_name](*args, **lyr_config))\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n wrp_layer = None\n else:\n if lyr_name == \"TemporalFusionTransformer\":\n lyr_config['return_attention_components'] = True\n initialized_layer = LAYERS[lyr_name](*args, **lyr_config)\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n\n else: # The inputs to this layer have been specified so they must exist in lyr_cache.\n # it is an executable\n if lyr_name in ACTIVATION_LAYERS:\n\n layer_outputs = ACTIVATION_LAYERS[lyr_name](name=lyr_config['name'])\n initiated_layers[lyr_config['name']] = {'layer': layer_outputs,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n elif lyr_name in ['TimeDistributed', 'Bidirectional']:\n wrp_layer = LAYERS[lyr_name]\n # because wrapper layer name is property\n initiated_layers[lyr_config['name']] = {'layer': wrp_layer,\n 'tf_name': lyr_name}\n continue\n elif \"LAMBDA\" in lyr_name.upper():\n initialized_layer = tf.keras.layers.deserialize(lyr_config)\n layers_config[lyr]['config'] = lyr_config\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n else:\n if wrp_layer is not None:\n initialized_layer = wrp_layer(LAYERS[lyr_name](*args, **lyr_config))\n initiated_layers[lyr_config['name']] = {'layer': initialized_layer,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n wrp_layer = None\n else:\n layer_initialized = LAYERS[lyr_name](*args, **lyr_config)\n initiated_layers[lyr_config['name']] = {'layer': layer_initialized,\n 'named_outs': named_outs,\n 'call_args': call_args,\n 'inputs': lyr_inputs,\n 'tf_name': lyr_name}\n\n if activation is not None: # put the string back to dictionary to be saved in config file\n lyr_config['activation'] = activation\n\n first_layer = False\n\n self.jsonize_lyr_config(lyr_config)\n\n # inputs = [] todo, indentify input layers\n # for k,v in lyr_cache.items():\n # since the model is not build yet and we have access to only output tensors of each list, this is probably\n # # the only way to know that how many `Input` layers were encountered during the run of this method. Each\n # tensor (except TimeDistributed) has .op.inputs attribute, which is empty if a tensor represents output of Input layer.\n # if int(''.join(tf.__version__.split('.')[0:2]).ljust(3, '0')) < 240:\n # if k.upper() != \"TIMEDISTRIBUTED\" and hasattr(v, 'op'):\n # if hasattr(v.op, 'inputs'):\n # _ins = v.op.inputs\n # if len(_ins) == 0:\n # inputs.append(v)\n # else: # not sure if this is the proper way of checking if a layer receives an input or not!\n # if hasattr(v, '_keras_mask'):\n # inputs.append(v)\n\n setattr(self, 'initiated_layers', initiated_layers)\n setattr(self, 'input_lyrs', input_lyrs)\n\n\n # todo,\n # # for case when {Input -> Dense, Input_1}, this method wrongly makes Input_1 as output so in such case use\n # # {Input_1, Input -> Dense }, thus it makes Dense as output and first 2 as inputs, so throwing warning\n # if int(''.join(tf.__version__.split('.')[0:2]).ljust(3, '0')) < 240:\n # if len(layer_outputs.op.inputs) < 1:\n # print(\"Warning: the output is of Input tensor class type\")\n # else:\n # if 'op' not in dir(layer_outputs): # layer_outputs does not have `op`, which means it has no incoming node\n # print(\"Warning: the output is of Input tensor class type\")\n\n # outs = None\n #if BACKEND == 'tensorflow':\n # outs = self.call(input_lyrs)\n # setattr(self, 'output_lyrs', outs)\n # if BACKEND == 'tensorflow':\n # ## Reinitial\n # super(Model, self).__init__(\n # inputs=input_lyrs,\n # outputs=outs)\n #MODEL.__init__(self, inputs=inputs, outputs=outs)\n\n return input_lyrs # , outs", "def get_layer(self, i):\n return self.layers[i]", "def plus(self, layer):\n\n input1 = self.node(layer)\n if not input1:\n return\n LOGGER.debug('Plus layer to last:%s', layer)\n if not self.last_node:\n self.last_node = nuke.nodes.Constant()\n\n if layer not in self.layers():\n input1 = nuke.nodes.Shuffle(inputs=[input1], out=layer)\n self.last_node = nuke.nodes.Merge2(\n inputs=[self.last_node, input1], operation='plus',\n also_merge=layer if layer not in self.layers() else 'none',\n label=utf8(self.l10n(layer)),\n output='rgb')", "def init_layers(self):\n\n # get caching layers activated\n caching_layers = G3WCachingLayer.objects.all()\n for caching_layer in caching_layers:\n self.add_layer(str(caching_layer), caching_layer)", "def _get_resnet_fc_layer(self):\n\t\tlayer_iterator = ww.WeightWatcher().make_layer_iterator(self.model)\n\t\tnum_layers = 0\n\t\tfor ww_layer in layer_iterator:\n\t\t\tnum_layers += 1\t\n\t\tfc_layer = ww_layer\n\t\t\n\t\treturn fc_layer", "def add_dll_to_import_descriptor(self, first_thunk_rva, dll_name_rva,\n iat_rva):\n empty_import_descriptor = \\\n self.pe_manager.gen_new_empty_import_descriptor()\n setattr(empty_import_descriptor, \"Characteristics\", 0)\n setattr(empty_import_descriptor, \"FirstThunk\", iat_rva)\n setattr(empty_import_descriptor, \"ForwarderChain\", 0)\n setattr(empty_import_descriptor, \"Name\", dll_name_rva)\n setattr(empty_import_descriptor, \"OriginalFirstThunk\", first_thunk_rva)\n setattr(empty_import_descriptor, \"TimeDateStamp\", 0)\n\n # TODO : inject dll_name and get its rva for set name\n\n last_descriptor = self.import_structures[-1]\n if last_descriptor.name != 'IMAGE_IMPORT_DESCRIPTOR':\n print(\"something wrong\")\n exit\n\n last_descriptor_offset = self.get_last_import_descriptor_offset()\n last_descriptor = self.get_last_import_descriptor()\n last_descriptor_index = self.import_structures.index(last_descriptor)\n\n empty_import_descriptor.set_file_offset(last_descriptor_offset)\n last_descriptor.set_file_offset(last_descriptor_offset\n + empty_import_descriptor.sizeof())\n self.import_structures.insert(last_descriptor_index,\n empty_import_descriptor)\n # print(\"OFFSET : {:x}\".format(last_descriptor_offset))\n self.count_of_additional_dll += 1\n return empty_import_descriptor", "def test_loading_a_simple_layer(self, sdoc):\n tdoc = ezdxf.new()\n loader = xref.Loader(sdoc, tdoc)\n loader.load_layers([\"first\"])\n loader.execute()\n assert document_has_no_errors(tdoc) is True\n\n layer = tdoc.layers.get(\"first\")\n\n assert layer is not sdoc.layers.get(\"first\"), \"expected a copy\"\n assert layer.dxf.name == \"FIRST\", \"expected the original layer name\"\n assert layer.doc is tdoc, \"bound to wrong document\"\n assert layer.dxf.handle in tdoc.entitydb, \"entity not in database\"\n assert layer.dxf.owner == tdoc.layers.head.dxf.handle, \"invalid owner handle\"\n assert layer.dxf.material_handle == tdoc.materials[\"global\"].dxf.handle", "def request_layers(url):\n layer_names = get_layers(url)\n for l in layer_names:\n print(\"Checking '%s'...\" % l)\n get_image(url, l, check_blank=True)", "def process(self, input_=None) -> Iterator[cat.Catalog]:\n logger.debug(f'import entering process with href {self._import.href}')\n fetcher = cache.FetcherFactory.get_fetcher(self._trestle_root, self._import.href)\n\n model: Union[cat.Catalog, prof.Profile]\n model, model_type = fetcher.get_oscal()\n\n if model_type == 'catalog':\n logger.debug(f'DIRECT YIELD in import of catalog {model.metadata.title}')\n yield model\n else:\n if model_type != 'profile':\n raise TrestleError(f'Improper model type {model_type} as profile import.')\n profile: prof.Profile = model\n\n pipelines: List[Pipeline] = []\n logger.debug(\n f'import pipelines for sub_imports of profile {self._import.href} with title {model.metadata.title}'\n )\n for sub_import in profile.imports:\n import_filter = ProfileResolver.Import(self._trestle_root, sub_import)\n prune_filter = ProfileResolver.Prune(sub_import)\n pipeline = Pipeline([import_filter, prune_filter])\n pipelines.append(pipeline)\n logger.debug(\n f'sub_import add pipeline for sub href {sub_import.href} of main href {self._import.href}'\n )\n merge_filter = ProfileResolver.Merge(profile)\n modify_filter = ProfileResolver.Modify(profile)\n final_pipeline = Pipeline([merge_filter, modify_filter])\n yield next(final_pipeline.process(pipelines))", "def append_import_thunk_to_descriptor(self, descriptor, thunk):\n # TODO : now, this method only support 1 import thunk. must need enhance.\n descriptor_index = self.import_structures.index(descriptor)\n self.import_structures.insert(descriptor_index + 1, thunk)\n rva = self.PE.get_rva_from_offset(thunk.get_file_offset())\n print(\"RVA : {:x}\".format(rva))\n descriptor.Characteristics = rva\n descriptor.FirstThunk = rva\n descriptor.ForwarderChain = rva\n descriptor.Name = 0\n descriptor.OriginalFirstThunk = rva\n descriptor.TimeDateStamp = 0\n\n self.count_of_additional_fn += 1", "def _import_elmo():\n\n elmo = hub.Module('https://storage.googleapis.com/az-nlp/elmo_ru-news_wmt11-16_1.5M_steps.tar.gz',\n trainable=False) # news\n # elmo = hub.Module('https://storage.googleapis.com/az-nlp/elmo_ru-twitter_2013-01_2018-04_600k_steps.tar.gz',\n # trainable=False) # twitter\n print('❤️ ❤️ ❤️ DONE (re)importing Tensorflow hub.Module ')\n print('Tensorflow version is', tf.__version__)\n\n return elmo", "def move_imports_offset_to_new_section(self):\n self.print_imports_offset()\n (entry_rva, size) = self.pe_manager.get_import_descriptor_address_range()\n section = self.pe_manager.get_section_belong_rva(entry_rva)\n data = self.pe_manager.get_section_raw_data(section)\n # append free space that to use be import descriptor.\n import_free_space = 0x3000\n data = data + bytearray(import_free_space)\n new_section = self.pe_manager.create_new_data_section(data, \".newdata\")\n self._origin_import_section = section\n self._new_import_section = new_section\n\n rva_gap_size = new_section.VirtualAddress - section.VirtualAddress\n offset_gap_size = new_section.PointerToRawData \\\n - section.PointerToRawData\n\n origin_iat_rva = 0\n origin_iat_size = 0\n for entry in self.PE.OPTIONAL_HEADER.DATA_DIRECTORY:\n if entry.name == 'IMAGE_DIRECTORY_ENTRY_IMPORT':\n entry.VirtualAddress += (rva_gap_size\n + self._IMPORT_DESCRIPTOR_TABLE_RVA_)\n elif entry.name == 'IMAGE_DIRECTORY_ENTRY_IAT':\n origin_iat_rva = entry.VirtualAddress\n origin_iat_size = entry.Size\n entry.VirtualAddress += rva_gap_size\n\n for entry in self.import_structures:\n entry_rva = self.PE.get_rva_from_offset(entry.get_file_offset())\n if entry.name == 'IMAGE_IMPORT_DESCRIPTOR':\n entry.set_file_offset(\n self.PE.get_offset_from_rva(entry_rva + rva_gap_size\n + self._IMPORT_DESCRIPTOR_TABLE_RVA_)\n )\n if entry.OriginalFirstThunk > 0:\n entry.OriginalFirstThunk += (rva_gap_size\n + self._IMPORT_LOOKUP_TABLE_RVA_)\n if entry.Characteristics > 0:\n entry.Characteristics += (rva_gap_size\n + self._IMPORT_LOOKUP_TABLE_RVA_)\n if entry.FirstThunk > 0:\n # FirstThunk point to _IMPORT_ADDRESS_TABLE_\n entry.FirstThunk += (rva_gap_size + self._IMPORT_ADDRESS_TABLE_RVA_)\n if entry.Name > 0:\n entry.Name += rva_gap_size\n elif entry.name == 'IMAGE_THUNK_DATA':\n entry_rva = self.PE.get_rva_from_offset(entry.get_file_offset())\n if (origin_iat_rva\n <= entry_rva\n <= origin_iat_rva + origin_iat_size):\n # this entry is located at import address table\n entry.set_file_offset(\n self.PE.get_offset_from_rva(\n entry_rva + rva_gap_size\n + self._IMPORT_ADDRESS_TABLE_RVA_)\n )\n else:\n # this entry is located at import lookup table\n entry.set_file_offset(\n self.PE.get_offset_from_rva(\n entry_rva + rva_gap_size\n + self._IMPORT_LOOKUP_TABLE_RVA_)\n )\n\n if entry.Ordinal & 0x80000000:\n # This is Ordinal import\n pass\n else:\n # IMPORT_THUNK_DATA is not moving.\n if entry.Ordinal > 0:\n entry.Ordinal += rva_gap_size + self._IMPORT_ADDRESS_TABLE_RVA_\n if entry.AddressOfData > 0:\n entry.AddressOfData += rva_gap_size + self._IMPORT_ADDRESS_TABLE_RVA_\n if entry.ForwarderString > 0:\n entry.ForwarderString += rva_gap_size + self._IMPORT_ADDRESS_TABLE_RVA_\n if entry.Function > 0:\n entry.Function += rva_gap_size + self._IMPORT_ADDRESS_TABLE_RVA_\n\n for entry in self.import_structures:\n if entry.name == 'IMAGE_IMPORT_DESCRIPTOR':\n if entry.OriginalFirstThunk > 0:\n pass\n if entry.FirstThunk > 0:\n pass\n elif entry.name == 'IMAGE_THUNK_DATA':\n if entry.Ordinal & 0x80000000:\n # This is Ordinal import\n pass\n\n self.adjust_references_of_iat(origin_iat_rva,\n origin_iat_rva + origin_iat_size,\n rva_gap_size)", "def _init_extractors(self):\n @self.extractors_wrapper(\"networkx\")\n def get_nx_extractor(graph):\n \"\"\"\n :param graph: networkx.Graph\n :returns: projx.nx_extractor\n \"\"\"\n return nx_xtrct.nx_extractor(\n self.extractor_json[self.extractor_name], graph\n )\n\n @self.extractors_wrapper(\"neo4j\")\n def get_neo4j_extractor(graph):\n \"\"\"\n :returns: projx.nx_extractor\n \"\"\"\n return neo4j_xtrct.neo4j_extractor(\n self.extractor_json[self.extractor_name], graph\n )\n\n @self.extractors_wrapper(\"edgelist\")\n def get_edgelist_extractor(graph):\n \"\"\"\n :returns: projx.nx_extractor\n \"\"\"\n return edgelist_xtrct.edgelist_extractor(\n self.extractor_json[self.extractor_name], graph\n )", "def _get_layers(self) :\n \n return self._layers", "def ezimport(conn: BlitzGateway, target: str,\n project: Optional[Union[str, int]] = None,\n dataset: Optional[Union[str, int]] = None,\n screen: Optional[Union[str, int]] = None,\n ln_s: Optional[bool] = False, ann: Optional[dict] = None,\n ns: Optional[str] = None) -> Union[List[int], None]:\n\n imp_ctl = Importer(conn, target, project, dataset, screen,\n ln_s, ann, ns)\n imp_ctl.ezimport()\n if imp_ctl.screen:\n imp_ctl.get_plate_ids()\n imp_ctl.organize_plates()\n imp_ctl.annotate_plates()\n return imp_ctl.plate_ids\n\n else:\n imp_ctl.get_my_image_ids()\n imp_ctl.organize_images()\n imp_ctl.annotate_images()\n return imp_ctl.image_ids", "def add_layer(self, layer):\n idx = len(self.dict_topo)\n idx += 1\n self.dict_topo[idx] = layer", "def skip_layer(self, count=1):\n self._layer_counter += count", "def add_layer(self, layer):\n self.__layers.append(layer)", "def feed_to_layer(self, data_X, end_layer=0):\n if len(self.layers) <= end_layer < 0:\n return None\n\n a, r = self.particle_input.feed_forward(data_X)\n for l, layer in enumerate(self.layers):\n a, r = layer.feed_forward(a, r)\n if l == end_layer:\n return a\n\n return None", "def forward(self, x):\n return self.layers(x)", "def next(self):\n\n from . import containers\n\n # Exit this task if we have eaten all the file groups\n if len(self.maps) == 0:\n raise pipeline.PipelineStopIteration\n\n group = self.maps.pop(0)\n\n map_stack = None\n\n # Iterate over all the files in the group, load them into a Map\n # container and add them all together\n for mfile in group[\"files\"]:\n\n self.log.debug(\"Loading file %s\", mfile)\n\n current_map = containers.Map.from_file(mfile, distributed=True)\n current_map.redistribute(\"freq\")\n\n # Start the stack if needed\n if map_stack is None:\n map_stack = current_map\n\n # Otherwise, check that the new map has consistent frequencies,\n # nside and pol and stack up.\n else:\n\n if (current_map.freq != map_stack.freq).all():\n raise RuntimeError(\"Maps do not have consistent frequencies.\")\n\n if (current_map.index_map[\"pol\"] != map_stack.index_map[\"pol\"]).all():\n raise RuntimeError(\"Maps do not have the same polarisations.\")\n\n if (\n current_map.index_map[\"pixel\"] != map_stack.index_map[\"pixel\"]\n ).all():\n raise RuntimeError(\"Maps do not have the same pixelisation.\")\n\n map_stack.map[:] += current_map.map[:]\n\n # Assign a tag to the stack of maps\n map_stack.attrs[\"tag\"] = group[\"tag\"]\n\n return map_stack", "def handle_layers(context, model, toplayer, layerids, materials, update, import_hidden=False):\n #setup main container to hold all layer collections\n layer_col_id=\"Layers\"\n if not layer_col_id in context.blend_data.collections:\n layer_col = context.blend_data.collections.new(name=layer_col_id)\n try:\n toplayer.children.link(layer_col)\n except Exception:\n pass\n else:\n #If \"Layers\" collection is in place, we assume the plugin had imported 3dm before\n layer_col = context.blend_data.collections[layer_col_id]\n\n # build lookup table for LayerTable index\n # from GUID, create collection for each\n # layer\n for lid, l in enumerate(model.Layers):\n if not l.Visible and not import_hidden:\n continue\n lcol = utils.get_iddata(context.blend_data.collections, l.Id, l.Name, None)\n layerids[str(l.Id)] = (lid, lcol)\n utils.tag_data(layerids[str(l.Id)][1], l.Id, l.Name)\n '''\n matname = l.Name + \"+\" + str(l.Id)\n if matname not in materials:\n laymat = utils.get_iddata(context.blend_data.materials, l.Id, l.Name, None)\n if update:\n\t laymat.use_nodes = True\n\t r, g, b, _ = l.Color\n\t principled = PrincipledBSDFWrapper(laymat, is_readonly=False)\n\t principled.base_color = (r/255.0, g/255.0, b/255.0)\n materials[matname] = laymat\n '''\n # second pass so we can link layers to each other\n for l in model.Layers:\n # link up layers to their parent layers\n if str(l.ParentLayerId) in layerids:\n parentlayer = layerids[str(l.ParentLayerId)][1]\n try:\n parentlayer.children.link(layerids[str(l.Id)][1])\n except Exception:\n pass\n # or to the top collection if no parent layer was found\n else:\n try:\n layer_col.children.link(layerids[str(l.Id)][1])\n except Exception:\n pass", "def process_layer(layer_data):\n layer_name = layer_data['name']\n\n # Instantiate layer.\n from tensorflow.python.keras._impl.keras.layers import deserialize as deserialize_layer # pylint: disable=g-import-not-at-top\n\n layer = deserialize_layer(layer_data, custom_objects=custom_objects)\n created_layers[layer_name] = layer\n\n # Gather layer inputs.\n inbound_nodes_data = layer_data['inbound_nodes']\n for node_data in inbound_nodes_data:\n # We don't process nodes (i.e. make layer calls)\n # on the fly because the inbound node may not yet exist,\n # in case of layer shared at different topological depths\n # (e.g. a model such as A(B(A(B(x)))))\n add_unprocessed_node(layer, node_data)", "def from_layer(layer, **kwargs):\r\n from .io import from_layer\r\n return from_layer(layer=layer, **kwargs)", "def import_dataset(self):\n\n if not self.layers_loaded and not self.data_type == \"table\":\n self.set_project_srid()\n\n if self.service == \"WFS\":\n uri = (\n \"pagingEnabled='true' \"\n \"preferCoordinatesForWfsT11='false' \"\n \"restrictToRequestBBOX='1' \"\n \"typename='{0}:{4}-{5}' \"\n \"url='https://{0}/services;key={1}/{2}/{4}-{5}' \"\n \"version='{3}'\"\n ).format(\n self.domain,\n self.api_key_instance.get_api_key(self.domain),\n self.service.lower(),\n self.service_versions[self.service.lower()],\n self.data_type,\n self.object_id,\n )\n\n layer = QgsVectorLayer(uri, self.layer_title, self.service.upper())\n\n elif self.service == \"WMTS\":\n if self.domain == \"basemaps.linz.govt.nz\":\n if self.selected_crs == \"EPSG:2193\":\n tms = \"NZTM2000Quad\"\n elif self.selected_crs == \"EPSG:3857\":\n tms = \"WebMercatorQuad\"\n else:\n self.iface.messageBar().pushMessage(\n \"Error\",\n \"\"\"The LINZ Basemaps WMTS has returned an unexpected coordinate system.\"\"\",\n level=Qgis.Critical,\n )\n return\n uri = (\n \"contextualWMSLegend=0\"\n \"&crs={1}\" # e.g. EPSG:2193\n \"&dpiMode=7&featureCount=10\"\n \"&format=image/webp\"\n \"&layers={2}\"\n \"&styles=default\"\n \"&tileMatrixSet={4}\" # e.g. NZTM2000Quad\n \"&url=https://{0}/v1/tiles/aerial/WMTSCapabilities.xml?api={3}\"\n ).format(\n self.domain,\n self.selected_crs,\n self.object_id,\n self.api_key_instance.get_api_key(self.domain),\n tms,\n )\n else:\n uri = (\n \"SmoothPixmapTransform=1\"\n \"&contextualWMSLegend=0\"\n \"&crs={1}&dpiMode=7&format=image/png\"\n \"&layers={2}-{3}&styles=style%3Dauto&tileMatrixSet={1}\"\n \"&url=https://{0}/services;\"\n \"key={4}/{5}/{6}/{2}/{3}/\"\n \"WMTSCapabilities.xml\"\n ).format(\n self.domain,\n self.selected_crs,\n self.data_type,\n self.object_id,\n self.api_key_instance.get_api_key(self.domain),\n self.service.lower(),\n self.service_versions[self.service.lower()],\n )\n layer = QgsRasterLayer(uri, self.layer_title, \"wms\")\n else:\n pass # ERROR not supported\n\n QgsProject.instance().addMapLayer(layer)\n self.layers_loaded = True\n self.dlg.close()", "def consume_layer(self, reports):\n layer_list = []\n layer_count = 1\n for report in reports:\n layer = create_image_layer(report)\n layer.layer_index = layer_count\n layer_list.append(layer)\n layer_count += 1\n return layer_list", "def install_iterator(self, extra_info=None):\n self.pre_extract()\n\n with ZipFile(self.path) as zp:\n self.z = zp\n\n arcnames = self.z.namelist()\n is_custom_egg = eggmeta.is_custom_egg(self.path)\n\n use_legacy_egg_info_format = has_legacy_egg_info_format(arcnames,\n is_custom_egg)\n\n for arcname in arcnames:\n if use_legacy_egg_info_format:\n n = self._extract_egg_with_legacy_egg_info(arcname,\n is_custom_egg)\n else:\n n = self._extract(arcname, is_custom_egg)\n yield n\n\n self.post_extract(extra_info)", "def load_next_image( self ):\n # Did we finish an epoch\n if self._cur == len(self.indexlist):\n self._epoch += 1\n l = np.random.seed( self._epoch ) #randomize, aslo reproducible\n l = np.random.permutation( len(self.indexlist)/2 )\n l2 = np.vstack( ( 2*l, 2*l + 1 )).T\n self.indexlist = l2.reshape(len(self.indexlist),)\n self._cur = 0\n \n # Index list\n index = self.indexlist[self._cur]\n \n #load an image\n image_file_name = self.images[index]\n \n im = np.asarray( mpimg.imread( image_file_name ))\n \n #Determine the new fliplr and rot90 status, used it in the stego \n if ( self.trainMode ):\n if ( self._cur % 2 == 0 ):\n self._flp = np.random.choice(2)*2 - 1\n self._rot = np.random.randint(4)\n im = im[:,::self._flp]\n im = np.rot90(im, self._rot)\n\n #load the ground truth\n label = self.labels[index]\n\n self._cur += 1\n \n return im, label", "def add_layer(self, layer_key_name, caching_layer):\n self.config.layers[layer_key_name] = _parseConfigLayer(self.build_layer_dict(caching_layer, layer_key_name),\n self.config, dirpath='.')", "def copyToInstanceImport():\n print >> import_out, INTRO_TO_INSTANCE\n instance_ipath, product_ipath = getImportedPathes()\n\n # Compose temp dir back_[date] dir path in Instance import directory\n temp_dir_id = \"back_%s\" % strftime(\"%Y%m%d%H%M%S\", gmtime())\n temp_dir_path = osp.join(instance_ipath, temp_dir_id)\n\n # Get *.zexp files from Skin Product's import dir and Plone's instance import dir files\n product_ilist = [i for i in os.listdir(product_ipath) \\\n if osp.isfile(osp.join(product_ipath,i)) and i.endswith('.zexp')]\n\n instance_ilist = [i for i in os.listdir(instance_ipath) \\\n if osp.isfile(osp.join(instance_ipath,i)) and i.endswith('.zexp')]\n\n # Check for presence samenamed files in Instance and Product import directories.\n same_instance_files = [f_name for f_name in instance_ilist if f_name in product_ilist]\n if same_instance_files:\n moveToTemp(same_instance_files, instance_ipath, temp_dir_path)\n\n # Copy all *zexp files from Product's import dir to Instance's import dir\n [copyFile(product_ipath, instance_ipath, f_name) for f_name in product_ilist]\n print >> import_out, SUMMARY_TO_INSTANCE\n\n return [instance_ipath, product_ipath, temp_dir_path, product_ilist]", "def file_import(self):\r\n\r\n try:\r\n self.process_file_import()\r\n except InputError as ex:\r\n print(ex)\r\n self.file_import()", "def import_all():\n import theory", "def add_layer(self, in_dim, out_dim, activation: Module or None, i=None):\n i = i or len(self.modules)\n self.modules.insert(i, Linear(in_dim=in_dim, out_dim=out_dim, activation=activation))", "def _imports_into_edges(self, filepath):\n edgelist = []\n for imp in self.get_imports(self.sourcepath / filepath):\n internal_file = False\n for _intern in self.internal:\n if imp in _intern:\n imp = _intern\n internal_file = True\n break\n if internal_file is False:\n imp = imp.split(\".\")[0]\n edge_dictionary = {}\n else:\n edge_dictionary = {\"width\": 4}\n edgelist.append((imp, self.name(filepath), edge_dictionary))\n return edgelist", "def importOptimizer():\n module_path = os.path.join(path, \"optimization\")\n module_path = os.path.join(module_path, \"optimizer.py\")\n optimizer_class = importClass(\"Optimizer\", \"optimizer\", module_path)\n return optimizer_class", "def _init_edge_module(self):\n self.edge_fuse_cls = EdgeFusionModule(self.num_classes, 256)\n for i in range(len(self.edge_fusion_inds)):\n reg_inds, out_inds = self.edge_fusion_inds[i]\n out_channels = self.group_reg_dims[reg_inds][out_inds]\n fusion_layer = EdgeFusionModule(out_channels, 256)\n layer_name = f'edge_fuse_reg_{reg_inds}_{out_inds}'\n self.add_module(layer_name, fusion_layer)", "def CreateLayer(self,layername):\n\t\treturn self.acad.ActiveDocument.Layers.Add(layername)", "def add_layer(self, func, *args, **kwargs):\n scope_name = self.name + '_layer' + str(self.layer_count)\n with tf.variable_scope(scope_name, reuse=self.reuse):\n self.last_layer = func(self.last_layer, *args, **kwargs)\n self.layer_seq += [self.last_layer]\n pass\n self.layer_count += 1\n return self.last_layer", "def importAovs(self):\n\t\tLayersInfo = pickle.load( open( self.aovsPath.path, \"rb\") )\n\t\tmc.refresh( su = 1 )\n\t\tfor ao in LayersInfo.keys():\n\t\t\taov.create( ao, LayersInfo[ao]['name'], LayersInfo[ao]['type'], LayersInfo[ao]['enabled'] )\n\t\tmc.refresh( su = 0 )", "def make_feature_layers(self, config):\n raise NotImplementedError", "def feature_extractor(mode, features, labels, config, params):\n\n del mode, labels, config\n\n # build base of feature extractor\n with tf.variable_scope('base'):\n # if num_classes=None no logits layer is created\n # if global_pool=False model is used for dense output\n fe, end_points = resnet_v1.resnet_v1_50(\n features,\n num_classes=None,\n is_training=params.batch_norm_accumulate_statistics,\n global_pool=False,\n output_stride=params.stride_feature_extractor)\n\n # build extension to feature extractor, which decreases feature dimensions\n # and increase field of view of feature extractor in a memory and \n # computational efficient way\n # TODO: add to end_points the outputs of next layers\n with tf.variable_scope('extension'):\n # WARNING: this scope assumes that slim.conv2d uses slim.batch_norm\n # for the batch normalization, which holds at least up to TF v1.4\n if params.feature_dims_decreased > 0:\n fe = slim.conv2d(fe,\n num_outputs=params.feature_dims_decreased,\n kernel_size=1,\n scope='decrease_fdims')\n if params.fov_expansion_kernel_rate > 0 and params.fov_expansion_kernel_size > 0:\n fe = slim.conv2d(fe,\n num_outputs=fe.shape[-1],\n kernel_size=params.fov_expansion_kernel_size,\n rate=params.fov_expansion_kernel_rate,\n scope='increase_fov')\n\n return fe, end_points", "def setup_layers(self):\n if self.args.model == \"exact\":\n self.layer = PPNPLayer\n else:\n self.layer = APPNPLayer\n self.setup_layer_structure()", "def ogrAppendFeatures(gpxFile, sourceLayer, destinationLayer):\r\n\r\n ##print \"Starting transaction for: {0}\".format(destinationLayer.GetName())\r\n print \" Importing {0}: {1} features\".format(sourceLayer.GetName(), sourceLayer.GetFeatureCount())\r\n fName = os.path.basename(gpxFile)\r\n destinationLayer.StartTransaction()\r\n for x in xrange(sourceLayer.GetFeatureCount()):\r\n sourceFeature = sourceLayer.GetNextFeature()\r\n ##print \"inserting record\"\r\n sourceFeature.SetFID(-1)\r\n sourceFeature.SetField(\"src\", fName)\r\n destinationLayer.CreateFeature(sourceFeature)\r\n\r\n #Commit the new features to the database\r\n ##print \" Committing transaction for: {0}\".format(destinationLayer.GetName())\r\n destinationLayer.CommitTransaction()", "def addLayer(self, layer):\n self.layers.append(layer)", "def _load( self, i ):\n if ir.config.verbosity_level >= 2: print(\"[observation] Lazy loading raster\")\n self._raster_data[i] = raster_cube( self._raster_files, line=self._line_info['description'][i], keep_null=self._keep_null )", "def next_file(self):\n raise NotImplementedError()", "def get_layer(self, l):\n\n if l == 0:\n return self.input_layer\n elif 0 < l < self.num_layers() - 1:\n return self.hidden_layers[l - 1]\n elif l == self.num_layers() - 1:\n return self.output_layer\n else:\n return None", "def enaml_importer():\n print(imports, dir(imports))\n old = imports.get_importers()\n\n yield imports\n\n imports._imports__importers = old", "def _init_layers(self) -> None:\n self.self_attn = MultiheadAttention(**self.self_attn_cfg)\n self.cross_attn = MultiheadAttention(**self.cross_attn_cfg)\n self.embed_dims = self.self_attn.embed_dims\n self.ffn = FFN(**self.ffn_cfg)\n norms_list = [\n build_norm_layer(self.norm_cfg, self.embed_dims)[1]\n for _ in range(3)\n ]\n self.norms = ModuleList(norms_list)", "def imports(self):\n\n try:\n data_dir = self.import_dir()\n except ValueError, why:\n raise StopIteration(why)\n\n i = 0\n\n desc_size = self.obj_vm.profile.get_obj_size('_IMAGE_IMPORT_DESCRIPTOR')\n\n while 1:\n desc = obj.Object('_IMAGE_IMPORT_DESCRIPTOR',\n vm = self.obj_native_vm,\n offset = self.DllBase + data_dir.VirtualAddress + (i * desc_size),\n parent = self)\n\n # Stop if the IID is paged or all zeros\n if desc == None or desc.is_list_end():\n break\n\n # Stop if the IID contains invalid fields \n if not desc.valid(self._nt_header()):\n break\n\n dll_name = desc.dll_name()\n\n for o, f, n in desc._imported_functions():\n yield dll_name, o, f, n\n\n i += 1", "def load_next_image(self):\n # Did we finish an epoch?\n if self._cur == len(self.indexlist):\n self._cur = 0\n shuffle(self.indexlist)\n\n # Load an image\n index = self.indexlist[self._cur] # Get the image index\n # Load and prepare ground truth\n multilabel = np.zeros(20).astype(np.int32)\n anns = load_pascal_annotation(index, self.pascal_root)\n for label in anns['gt_classes']:\n # in the multilabel problem we don't care how MANY instances\n # there are of each class. Only if they are present.\n # The \"-1\" is b/c we are not interested in the background\n # class.\n multilabel[label - 1] = 1\n\n self._cur += 1\n return index, multilabel", "def _get_resnet_fc_layer(self):\n\t\tlayer_iterator = ww.WeightWatcher().make_layer_iterator(self.model)\n\t\tfc_layer= None\n\t\tfor ww_layer in layer_iterator:\n\t\t\tprint(ww_layer.name)\n\t\t\tif ww_layer.name=='fc':\n\t\t\t\tfc_layer = ww_layer\n\t\t\n\t\treturn fc_layer", "def _init_layers(self) -> None:\n self.self_attn = MultiheadAttention(**self.self_attn_cfg)\n self.embed_dims = self.self_attn.embed_dims\n self.ffn = FFN(**self.ffn_cfg)\n norms_list = [\n build_norm_layer(self.norm_cfg, self.embed_dims)[1]\n for _ in range(2)\n ]\n self.norms = ModuleList(norms_list)", "def get_feature_extractor(device): \n vgg_temp = models.vgg19(pretrained=True).features\n model = FeatureExtractor()\n\n conv_counter = 1\n relu_counter = 1\n block_counter = 1\n\n for i, layer in enumerate(list(vgg_temp)):\n if isinstance(layer, nn.Conv2d):\n name = 'conv_' + str(block_counter) + '_' + str(conv_counter)\n conv_counter += 1\n model.add_module(name, layer)\n\n if isinstance(layer, nn.ReLU):\n name = 'relu_' + str(block_counter) + '_' + str(relu_counter)\n relu_counter += 1\n model.add_module(name, layer)\n\n if isinstance(layer, nn.MaxPool2d):\n # TODO: try to use nn.AvgPool2d((2,2))\n name = 'pool_' + str(block_counter)\n relu_counter = conv_counter = 1\n block_counter += + 1\n model.add_module(name, layer)\n\n model.to(device)\n return model", "def import_and_add(self, import_str):\n # loaded_classes.clear()\n\n try:\n import_module(import_str)\n except ImportError as e:\n traceback.print_exc()\n logger.warning(\"Tried to import `%s` and failed, ignoring\", import_str)\n logger.warning(\"Error: %s\", e)\n # else:\n # for k in loaded_classes:\n # if k.__module__.startswith(\"dataclay\"):\n # # dataClay contrib classes should not be registered here\n # continue\n # else:\n # self.add_class(k)", "def reload(self):\n\t\toldlayers = self.layers\n\t\tself.layers = []\n\t\tfor cp, filename, fp in oldlayers:\n\t\t\tcp = cp # pylint\n\t\t\tif fp is None:\n\t\t\t\tself.read(filename)\n\t\t\telse:\n\t\t\t\tself.readfp(fp, filename)", "def generate_legacy_layers(self, images_map, content_retriever):\n pass", "def generate_legacy_layers(self, images_map, content_retriever):\n pass", "def T(layer):\n return graph.get_tensor_by_name(\"import/%s:0\" % layer)", "def _importer(name, root_package=False, relative_globals=None, level=0):\n return __import__(name, locals=None, # locals has no use\n globals=relative_globals,\n fromlist=[] if root_package else [None],\n level=level)", "def _init_extractor_from_source(self, source_name):\n try:\n source = [s for s in self.sources if s['id'] == source_name][0]\n except IndexError:\n source = None\n\n if source is None:\n return\n\n extractor_klass = load_object(source['extractor'])\n return extractor_klass(source)", "def addExportLayerToCoreml(builder):\n outputNames = [output.name for output in builder.spec.description.output]\n\n for i, outputName in enumerate(outputNames):\n # formulas: https://github.com/ultralytics/yolov5/issues/471\n builder.add_activation(\n name=f\"sigmoid_{outputName}\",\n non_linearity=\"SIGMOID\",\n input_name=outputName,\n output_name=f\"{outputName}_sigmoid\",\n )\n\n ### Coordinates calculation ###\n # input (1, 3, nC, nC, 85), output (1, 3, nC, nC, 2) -> nC = 640 / strides[i]\n builder.add_slice(\n name=f\"slice_coordinates_xy_{outputName}\",\n input_name=f\"{outputName}_sigmoid\",\n output_name=f\"{outputName}_sliced_coordinates_xy\",\n axis=\"width\",\n start_index=0,\n end_index=2,\n )\n # x,y * 2\n builder.add_elementwise(\n name=f\"multiply_xy_by_two_{outputName}\",\n input_names=[f\"{outputName}_sliced_coordinates_xy\"],\n output_name=f\"{outputName}_multiplied_xy_by_two\",\n mode=\"MULTIPLY\",\n alpha=2,\n )\n # x,y * 2 - 0.5\n builder.add_elementwise(\n name=f\"subtract_0_5_from_xy_{outputName}\",\n input_names=[f\"{outputName}_multiplied_xy_by_two\"],\n output_name=f\"{outputName}_subtracted_0_5_from_xy\",\n mode=\"ADD\",\n alpha=-0.5,\n )\n grid = make_grid(featureMapDimensions[i], featureMapDimensions[i]).numpy()\n # x,y * 2 - 0.5 + grid[i]\n builder.add_bias(\n name=f\"add_grid_from_xy_{outputName}\",\n input_name=f\"{outputName}_subtracted_0_5_from_xy\",\n output_name=f\"{outputName}_added_grid_xy\",\n b=grid,\n shape_bias=grid.shape,\n )\n # (x,y * 2 - 0.5 + grid[i]) * stride[i]\n builder.add_elementwise(\n name=f\"multiply_xy_by_stride_{outputName}\",\n input_names=[f\"{outputName}_added_grid_xy\"],\n output_name=f\"{outputName}_calculated_xy\",\n mode=\"MULTIPLY\",\n alpha=strides[i],\n )\n\n # input (1, 3, nC, nC, 85), output (1, 3, nC, nC, 2)\n builder.add_slice(\n name=f\"slice_coordinates_wh_{outputName}\",\n input_name=f\"{outputName}_sigmoid\",\n output_name=f\"{outputName}_sliced_coordinates_wh\",\n axis=\"width\",\n start_index=2,\n end_index=4,\n )\n # w,h * 2\n builder.add_elementwise(\n name=f\"multiply_wh_by_two_{outputName}\",\n input_names=[f\"{outputName}_sliced_coordinates_wh\"],\n output_name=f\"{outputName}_multiplied_wh_by_two\",\n mode=\"MULTIPLY\",\n alpha=2,\n )\n # (w,h * 2) ** 2\n builder.add_unary(\n name=f\"power_wh_{outputName}\",\n input_name=f\"{outputName}_multiplied_wh_by_two\",\n output_name=f\"{outputName}_power_wh\",\n mode=\"power\",\n alpha=2,\n )\n # (w,h * 2) ** 2 * anchor_grid[i]\n anchor = (\n anchorGrid[i]\n .expand(-1, featureMapDimensions[i], featureMapDimensions[i], -1)\n .numpy()\n )\n builder.add_load_constant_nd(\n name=f\"anchors_{outputName}\",\n output_name=f\"{outputName}_anchors\",\n constant_value=anchor,\n shape=anchor.shape,\n )\n builder.add_elementwise(\n name=f\"multiply_wh_with_achors_{outputName}\",\n input_names=[f\"{outputName}_power_wh\", f\"{outputName}_anchors\"],\n output_name=f\"{outputName}_calculated_wh\",\n mode=\"MULTIPLY\",\n )\n\n builder.add_concat_nd(\n name=f\"concat_coordinates_{outputName}\",\n input_names=[f\"{outputName}_calculated_xy\", f\"{outputName}_calculated_wh\"],\n output_name=f\"{outputName}_raw_coordinates\",\n axis=-1,\n )\n builder.add_scale(\n name=f\"normalize_coordinates_{outputName}\",\n input_name=f\"{outputName}_raw_coordinates\",\n output_name=f\"{outputName}_raw_normalized_coordinates\",\n W=torch.tensor([1 / 640]).numpy(),\n b=0,\n has_bias=False,\n )\n\n ### Confidence calculation ###\n builder.add_slice(\n name=f\"slice_object_confidence_{outputName}\",\n input_name=f\"{outputName}_sigmoid\",\n output_name=f\"{outputName}_object_confidence\",\n axis=\"width\",\n start_index=4,\n end_index=5,\n )\n builder.add_slice(\n name=f\"slice_label_confidence_{outputName}\",\n input_name=f\"{outputName}_sigmoid\",\n output_name=f\"{outputName}_label_confidence\",\n axis=\"width\",\n start_index=5,\n end_index=0,\n )\n # confidence = object_confidence * label_confidence\n builder.add_multiply_broadcastable(\n name=f\"multiply_object_label_confidence_{outputName}\",\n input_names=[\n f\"{outputName}_label_confidence\",\n f\"{outputName}_object_confidence\",\n ],\n output_name=f\"{outputName}_raw_confidence\",\n )\n\n # input: (1, 3, nC, nC, 85), output: (3 * nc^2, 85)\n builder.add_flatten_to_2d(\n name=f\"flatten_confidence_{outputName}\",\n input_name=f\"{outputName}_raw_confidence\",\n output_name=f\"{outputName}_flatten_raw_confidence\",\n axis=-1,\n )\n builder.add_flatten_to_2d(\n name=f\"flatten_coordinates_{outputName}\",\n input_name=f\"{outputName}_raw_normalized_coordinates\",\n output_name=f\"{outputName}_flatten_raw_coordinates\",\n axis=-1,\n )\n\n builder.add_concat_nd(\n name=\"concat_confidence\",\n input_names=[\n f\"{outputName}_flatten_raw_confidence\" for outputName in outputNames\n ],\n output_name=\"raw_confidence\",\n axis=-2,\n )\n builder.add_concat_nd(\n name=\"concat_coordinates\",\n input_names=[\n f\"{outputName}_flatten_raw_coordinates\" for outputName in outputNames\n ],\n output_name=\"raw_coordinates\",\n axis=-2,\n )\n\n builder.set_output(\n output_names=[\"raw_confidence\", \"raw_coordinates\"],\n output_dims=[(25200, numberOfClassLabels), (25200, 4)],\n )", "def _layer_forward(self, z_prev, layer, use_relu=True):\n\n self.__dict__['z_prev_'+layer] = z_prev\n b = self.__getattribute__('b_'+layer)\n w = self.__getattribute__('w_'+layer)\n\n dim_out = w.shape[0]\n\n # simplification due to np broadcasting\n a = [email protected] + b\n\n z = relu(a) if use_relu else a\n\n return (a, z)", "def __init__(self):\n super(GatherLastLayer, self).__init__()" ]
[ "0.6111195", "0.54837346", "0.54835594", "0.5450349", "0.5450116", "0.54399455", "0.5409996", "0.5303996", "0.52164537", "0.52136886", "0.516096", "0.51069194", "0.5101633", "0.5099417", "0.5078807", "0.5042868", "0.4991108", "0.49878004", "0.4973067", "0.49313796", "0.49146825", "0.48918876", "0.48812124", "0.48812124", "0.48812124", "0.4869186", "0.4861221", "0.4856358", "0.48529756", "0.4848147", "0.4847011", "0.4841717", "0.4813863", "0.4810084", "0.48070508", "0.48046207", "0.48015773", "0.47992504", "0.4791084", "0.47837472", "0.47802195", "0.47737056", "0.47687405", "0.4764924", "0.47637418", "0.47573522", "0.4756528", "0.47516134", "0.47358266", "0.47271678", "0.47216052", "0.46993446", "0.469707", "0.4696555", "0.4692394", "0.46888366", "0.4687512", "0.4687452", "0.46838874", "0.46735784", "0.46620145", "0.46578643", "0.46573222", "0.46491086", "0.46446255", "0.46387565", "0.46251404", "0.46160468", "0.46130636", "0.46103394", "0.46080846", "0.46071956", "0.4606229", "0.46043196", "0.46041977", "0.46039373", "0.46026006", "0.45963994", "0.45900688", "0.45879322", "0.45854655", "0.45836893", "0.45677865", "0.45676476", "0.45632368", "0.45627594", "0.45599222", "0.45594588", "0.45571905", "0.4554427", "0.45531872", "0.4544577", "0.45423084", "0.45423084", "0.45382935", "0.4532764", "0.45313278", "0.4529167", "0.4527103", "0.45266992" ]
0.66877997
0
Class to organize and execute QA for a DESI production
def __init__(self, specprod_dir=None, **kwargs): if specprod_dir is None: specprod_dir = specprod_root() self.specprod_dir = specprod_dir # Init QA_MultiExp.__init__(self, specprod_dir=specprod_dir, **kwargs) # Load up exposures for the full production nights = get_nights(specprod_dir=self.specprod_dir) for night in nights: self.mexp_dict[night] = {} for exposure in get_exposures(night, specprod_dir = self.specprod_dir): # Object only?? frames_dict = get_files(filetype = str('frame'), night = night, expid = exposure, specprod_dir = self.specprod_dir) self.mexp_dict[night][exposure] = frames_dict # Output file names self.qaexp_outroot = self.qaprod_dir+'/'+self.prod_name+'_qa' # Nights list self.qa_nights = []
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def qa():\n env.config_file = 'config_production.py'\n env.hosts = ['[email protected]:34165']\n env.host_type = 'qa'\n env.user = 'ombu'\n env.host_webserver_user = 'www-data'\n env.host_site_path = '/mnt/main/qa/qa2/public'", "def main():\n\n # Run all the requirements for part A\n ##############################\n # Question 3\n # runs naive A*\n question_3()\n\n ##############################\n # Question 5\n # runs online A*\n question_5()\n\n ##############################\n # Question 7\n # runs online A* on fine grid\n question_7()\n\n\n # Run all the requirements for part B\n ##############################\n # Question 9\n question_9()\n\n ##############################\n # Question 10\n question_10()\n\n ##############################\n # Question 1\n question_11()", "def qa(ctx):\n header(qa.__doc__)\n with ctx.cd(ROOT):\n info(\"Ensure PyPI can render README and CHANGELOG\")\n info(\"Building dist package\")\n dist = ctx.run(\"python setup.py sdist\", pty=True, warn=False, hide=True)\n if dist.failed:\n error(\"Unable to build sdist package\")\n exit(\"Quality check failed\", dist.return_code)\n readme_results = ctx.run(\"twine check dist/*\", pty=True, warn=True, hide=True)\n if readme_results.failed:\n print(readme_results.stdout)\n error(\"README and/or CHANGELOG is not renderable by PyPI\")\n else:\n success(\"README and CHANGELOG are renderable by PyPI\")\n if readme_results.failed:\n exit(\"Quality check failed\", readme_results.return_code)\n success(\"Quality check OK\")", "def qa_test():\r\n # Reads Code and Runs Code Metrics\r\n with open(\"BrainDataVisualiser.py\",\"r\") as file:\r\n code = file.read()\r\n with open(\"QA_LOGS.txt\",\"a\") as file:\r\n # Timestamp and append metric results to log\r\n file.write(datetime.date.today().strftime(\"%b-%d-%Y\")+\"\\n\\t\")\r\n file.write(\"General Analysis\\n\\t\\t\")\r\n file.write(str(analyze(code))+\"\\n\\t\")\r\n file.write(\"Cyclomatic Complexity\\n\")\r\n for i in cc_visit(code):\r\n file.write(\"\\t\\t\"+cc_rank(i.complexity)+\" \"+str(i)+\"\\n\")", "def qa():\n env.hosts = ['[email protected]']\n env.directory = '/var/www/swordpushweb'", "def run_faqc(self, **kwargs):\n if self.qc is True:\n build([FaQC.SummarizeQC(fastq_dic=self.fastq_dic,\n num_cpus=self.num_cpus,\n workdir=self.workdir)],\n local_scheduler=self.local_scheduler,\n workers=1)\n qc_dic = {}\n for samp, path in self.fastq_dic.items():\n trim_dir = os.path.join(self.workdir, \"processes\", \"qc\", samp)\n qc_dic[samp] = trim_dir + \"/\" + samp + \".1.trimmed.fastq\" + \":\" + \\\n trim_dir + \"/\" + samp + \".2.trimmed.fastq\" \n return qc_dic\n\n else:\n return self.fastq_dic", "def export_QA(qa: QA):\n # TODO: implement\n\n log.info(\"assess_quality.export_QA: not yet implemented\")", "def test_manufacturing_scrap(self):\n\n # Update demo products\n (self.product_4 | self.product_2).write({\n 'tracking': 'lot',\n })\n\n # Update Bill Of Material to remove product with phantom bom.\n self.bom_3.bom_line_ids.filtered(lambda x: x.product_id == self.product_5).unlink()\n\n # Create Inventory Adjustment For Stick and Stone Tools with lot.\n lot_product_4 = self.env['stock.production.lot'].create({\n 'name': '0000000000001',\n 'product_id': self.product_4.id,\n 'company_id': self.env.company.id,\n })\n lot_product_2 = self.env['stock.production.lot'].create({\n 'name': '0000000000002',\n 'product_id': self.product_2.id,\n 'company_id': self.env.company.id,\n })\n\n stock_inv_product_4 = self.env['stock.inventory'].create({\n 'name': 'Stock Inventory for Stick',\n 'product_ids': [(4, self.product_4.id)],\n 'line_ids': [\n (0, 0, {'product_id': self.product_4.id, 'product_uom_id': self.product_4.uom_id.id, 'product_qty': 8, 'prod_lot_id': lot_product_4.id, 'location_id': self.stock_location_14.id}),\n ]})\n\n stock_inv_product_2 = self.env['stock.inventory'].create({\n 'name': 'Stock Inventory for Stone Tools',\n 'product_ids': [(4, self.product_2.id)],\n 'line_ids': [\n (0, 0, {'product_id': self.product_2.id, 'product_uom_id': self.product_2.uom_id.id, 'product_qty': 12, 'prod_lot_id': lot_product_2.id, 'location_id': self.stock_location_14.id})\n ]})\n (stock_inv_product_4 | stock_inv_product_2)._action_start()\n stock_inv_product_2.action_validate()\n stock_inv_product_4.action_validate()\n\n #Create Manufacturing order.\n production_form = Form(self.env['mrp.production'])\n production_form.product_id = self.product_6\n production_form.bom_id = self.bom_3\n production_form.product_qty = 12\n production_form.product_uom_id = self.product_6.uom_id\n production_3 = production_form.save()\n production_3.action_confirm()\n production_3.action_assign()\n\n # Check Manufacturing order's availability.\n self.assertEqual(production_3.reservation_state, 'assigned', \"Production order's availability should be Available.\")\n\n location_id = production_3.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) and production_3.location_src_id.id or production_3.location_dest_id.id,\n\n # Scrap Product Wood without lot to check assert raise ?.\n scrap_id = self.env['stock.scrap'].with_context(active_model='mrp.production', active_id=production_3.id).create({'product_id': self.product_2.id, 'scrap_qty': 1.0, 'product_uom_id': self.product_2.uom_id.id, 'location_id': location_id, 'production_id': production_3.id})\n with self.assertRaises(UserError):\n scrap_id.do_scrap()\n\n # Scrap Product Wood with lot.\n self.env['stock.scrap'].with_context(active_model='mrp.production', active_id=production_3.id).create({'product_id': self.product_2.id, 'scrap_qty': 1.0, 'product_uom_id': self.product_2.uom_id.id, 'location_id': location_id, 'lot_id': lot_product_2.id, 'production_id': production_3.id})\n\n #Check scrap move is created for this production order.\n #TODO: should check with scrap objects link in between", "def test_create_quizes(self):\n driver = self.driver\n wait = self.wait\n\n create_quizz_name(driver, wait, quiz_name)\n\n create_textual_question(driver, wait, textual_question_1)\n create_textual_question(driver, wait, textual_question_2)\n create_textual_question(driver, wait, textual_question_3)\n\n create_single_choice_question(driver, wait, single_choice_1, single_choice_1_opt_1,single_choice_1_opt_1, single_choice_1_opt_2)\n create_single_choice_question(driver, wait, single_choice_2, single_choice_2_opt_1,single_choice_2_opt_1, single_choice_2_opt_2)\n create_single_choice_question(driver, wait, single_choice_3, single_choice_3_opt_1,single_choice_3_opt_1, single_choice_3_opt_2)\n\n create_multiple_choice_question(driver, wait, multiple_choice_1, multiple_choice_1_opt_1, multiple_choice_1_opt_1, multiple_choice_1_opt_2)\n create_multiple_choice_question(driver, wait, multiple_choice_2, multiple_choice_2_opt_2, multiple_choice_2_opt_1, multiple_choice_2_opt_2)\n create_multiple_choice_question(driver, wait, multiple_choice_3, multiple_choice_3_opt_2, multiple_choice_3_opt_1, multiple_choice_3_opt_2)\n\n save_quiz(driver, wait)\n self.quiz_name = quiz_name\n self.email_teacher = email_teacher\n self.password_teacher = password_teacher\n\n find_quizz(driver, wait, quiz_name)\n driver.get_screenshot_as_file('{} created.png'.format(quiz_name))", "def run():\n print(\"\\n************************************** PARAMERTERS **************************************\\n\")\n print(f'TARGET_GROUP: {PARAM.TARGET_GROUP}\\n')\n print(f'ACQ_FILE: {PARAM.ACQ_FILE}\\n')\n print(f'FINAL_DATA_DIR: {PARAM.FINAL_DATA_DIR}\\n')\n print(f'FAULTY_EMPLOYEES_DIR: {PARAM.FAULTY_EMPLOYEES_DIR}\\n')\n print(f'NONE_MATCHED_DIR: {PARAM.NONE_MATCHED_DIR}\\n')\n print('*****************************************************************************************\\n')\n\n jti = JobTransitionInspector(PARAM.ACQ_FILE)\n jti.exec()", "def beginExecution(self):\n self.setup = self.am_getOption(\"Setup\", self.setup)\n self.enabled = self.am_getOption(\"EnableFlag\", self.enabled)\n self.restartAgents = self.am_getOption(\"RestartAgents\", self.restartAgents)\n self.restartExecutors = self.am_getOption(\"RestartExecutors\", self.restartExecutors)\n self.restartServices = self.am_getOption(\"RestartServices\", self.restartServices)\n self.diracLocation = os.environ.get(\"DIRAC\", self.diracLocation)\n self.addressTo = self.am_getOption('MailTo', self.addressTo)\n self.addressFrom = self.am_getOption('MailFrom', self.addressFrom)\n self.controlComponents = self.am_getOption('ControlComponents', self.controlComponents)\n self.commitURLs = self.am_getOption('CommitURLs', self.commitURLs)\n\n self.csAPI = CSAPI()\n\n res = self.getRunningInstances(instanceType='Agents')\n if not res[\"OK\"]:\n return S_ERROR(\"Failure to get running agents\")\n self.agents = res[\"Value\"]\n\n res = self.getRunningInstances(instanceType='Executors')\n if not res[\"OK\"]:\n return S_ERROR(\"Failure to get running executors\")\n self.executors = res[\"Value\"]\n\n res = self.getRunningInstances(instanceType='Services')\n if not res[\"OK\"]:\n return S_ERROR(\"Failure to get running services\")\n self.services = res[\"Value\"]\n\n self.accounting.clear()\n return S_OK()", "def ask_questions(self):\n\n # Ask questions to the user\n self.session['answers'] = inquirer.prompt(\n app_session(self), theme=load_theme_from_dict(self.theme)\n )\n\n # Get confirm status for [newbuild] [makezip] [zipsigner]\n confirm = True\n confirm_list = [\n self.session['answers']['confirm_newbuild'],\n self.session['answers']['confirm_makezip'],\n self.session['answers']['confirm_zipsigner'],\n ]\n\n for response in confirm_list:\n if response is False:\n confirm = False\n\n # Set confirm status for [makeclean] [menuconfig]\n if (self.session['answers']['cleanbuild'] is False\n and self.session['mode'] == 'makeclean') or (\n self.session['answers']['menuconfig'] is False\n and self.session['mode'] == 'menuconfig'):\n confirm = False\n\n # Restart new session?\n if confirm is False:\n del self\n os.execl(sys.executable, sys.executable, * sys.argv)", "def setup_prod():\n setup_general()", "def test_application(self):\n try:\n from qiskit import (\n Aer,\n ) # pylint: disable=unused-import,import-outside-toplevel\n except ImportError as ex: # pylint: disable=broad-except\n self.skipTest(f\"Aer doesn't appear to be installed. Error: '{str(ex)}'\")\n return\n\n num_qubits = 3\n\n # parameters for considered random distribution\n s_p = 2.0 # initial spot price\n vol = 0.4 # volatility of 40%\n r = 0.05 # annual interest rate of 4%\n t_m = 40 / 365 # 40 days to maturity\n\n # resulting parameters for log-normal distribution\n mu = (r - 0.5 * vol ** 2) * t_m + np.log(s_p)\n sigma = vol * np.sqrt(t_m)\n mean = np.exp(mu + sigma ** 2 / 2)\n variance = (np.exp(sigma ** 2) - 1) * np.exp(2 * mu + sigma ** 2)\n stddev = np.sqrt(variance)\n\n # lowest and highest value considered for the spot price;\n # in between, an equidistant discretization is considered.\n low = np.maximum(0, mean - 3 * stddev)\n high = mean + 3 * stddev\n bounds = (low, high)\n\n # construct circuit factory for uncertainty model\n uncertainty_model = LogNormalDistribution(\n num_qubits, mu=mu, sigma=sigma ** 2, bounds=bounds\n ).decompose()\n\n # set the strike price (should be within the low and the high value of the uncertainty)\n strike_price = 1.896\n\n # create amplitude function\n european_call_delta = EuropeanCallDeltaObjective(\n num_state_qubits=num_qubits, strike_price=strike_price, bounds=bounds\n )\n\n # create state preparation\n state_preparation = european_call_delta.compose(uncertainty_model, front=True)\n\n problem = EstimationProblem(\n state_preparation=state_preparation,\n objective_qubits=[num_qubits],\n post_processing=european_call_delta.post_processing,\n )\n\n # run amplitude estimation\n q_i = QuantumInstance(\n Aer.get_backend(\"aer_simulator\"), seed_simulator=125, seed_transpiler=80\n )\n iae = IterativeAmplitudeEstimation(epsilon_target=0.01, alpha=0.05, quantum_instance=q_i)\n result = iae.estimate(problem)\n self.assertAlmostEqual(result.estimation_processed, 0.8088790606143996)", "def __init__(self,fastqc_dir):\n self._fastqc_dir = os.path.abspath(fastqc_dir)\n self._fastqc_summary = FastqcSummary(\n summary_file=os.path.join(self._fastqc_dir,\n 'summary.txt'))\n self._fastqc_data = FastqcData(\n os.path.join(self._fastqc_dir,\n 'fastqc_data.txt'))\n self._html_report = self._fastqc_dir + '.html'\n self._zip = self._fastqc_dir + '.zip'", "def main():\n logging.info(\"Executing data quality module\")\n\n calculate_quality()", "def setupQi(context):\n logger = logging.getLogger('libertic.event / setuphandler')\n\n # Ordinarily, GenericSetup handlers check for the existence of XML files.\n\n # Here, we are not parsing an XML file, but we use this text file as a\n # flag to check that we actually meant for this import step to be run.\n # The file is found in profiles/default.\n\n if context.readDataFile('libertic.event_qi.txt') is None:\n return\n\n portal = context.getSite()\n portal_quickinstaller = getToolByName(portal, 'portal_quickinstaller')\n portal_setup = getToolByName(portal, 'portal_setup')\n logger = logging.getLogger('libertic.event.Install')", "def make_qa_report(metadata, base_dir, write_tag):\n # Change directory as QA code writes output directly to the running directory\n work_dir = os.getcwd()\n\n filenames = metadata['FITSImageFilename']\n for i, fits_file in enumerate(filenames):\n pb_dir = _productdir(metadata, base_dir, i, '_PB', write_tag)\n pb_filebase = os.path.splitext(fits_file)[0] + '_PB'\n\n log.info('Write QA report output')\n os.chdir(pb_dir)\n pb_fits = os.path.join(pb_dir, pb_filebase + FITS_EXT)\n command = '/home/kat/valid/Radio_continuum_validation -I {} --telescope MeerKAT -F'\\\n ' /home/kat/valid/filter_config_MeerKAT.txt -r'.format(pb_fits)\n sysarg = shlex.split(command)\n with log_qa(log):\n rcv.main(sysarg[0], sysarg[1:])\n os.chdir(work_dir)", "def setUp(self):\n print('\\n\\nSetting up test pre-conditions.')\n\n # Set of valid inputs\n self.api_key = 'this_is_a_valid_api_key'\n self.base_url = 'https://oti.slashnext.cloud/api'\n\n # Set of valid expected outputs\n self.name = 'slashnext-api-quota'\n self.description = 'This action queries the SlashNext cloud database and retrieves the details of API quota.'\n self.parameters = []\n\n self.version = f'\\nv1.1.0'\n self.version += f'\\nDeveloped by SlashNext, Inc. ([email protected])\\n'\n\n self.usage = f'\\n{self.description}\\n\\n'\n self.usage += f'Usage: {self.name} -a [api_key] -b [base_url]'\n for param in self.parameters:\n self.usage += f' -{param.get(\"parameter\")[0]} [{param.get(\"parameter\")}]'\n\n self.usage += f'\\n'\n self.usage += f' -a --api_key Please provide a valid API Key or contact [email protected]\\n'\n self.usage += f' -b --base_url Please provide a valid Base URL or contact [email protected]\\n'\n for param in self.parameters:\n param_name = param.get(\"parameter\")\n param_desc = param.get(\"description\")\n self.usage += ' -{0} --{1:16} {2}\\n'.format(param_name[0], param_name, param_desc)\n\n self.usage += f' -V --version Version of SlashNext phishing IR commands.\\n'\n self.usage += f' -H --help Prints this help/usage.\\n'\n self.usage += f'\\nDeveloped by SlashNext, Inc. ([email protected])\\n'\n\n self.api_url = 'https://oti.slashnext.cloud/api/oti/v1/quota/status'\n self.api_data = {\n 'authkey': self.api_key\n }\n self.quota_response = {\n 'errorMsg': 'Success',\n 'errorNo': 0,\n 'quotaDetails': {\n 'consumedAPIDetail': {\n 'customerApiQuota': 0,\n 'downloadHTML': 0,\n 'downloadScreenshot': 0,\n 'downloadText': 0,\n 'hostReputation': 0,\n 'hostUrls': 0,\n 'scanReportWithScanId': 0,\n 'scanSyncReportWithScanId': 0,\n 'urlReputation': 0,\n 'urlScan': 0,\n 'urlScanSync': 0\n },\n 'consumedPointsDetail': {\n 'customerApiQuota': 0,\n 'downloadHTML': 0,\n 'downloadScreenshot': 0,\n 'downloadText': 0,\n 'hostReputation': 0,\n 'hostUrls': 0,\n 'scanReportWithScanId': 0,\n 'scanSyncReportWithScanId': 0,\n 'urlReputation': 0,\n 'urlScan': 0,\n 'urlScanSync': 0\n },\n 'pointsConsumptionRate': {\n 'customerApiQuota': 0,\n 'downloadHTML': 0,\n 'downloadScreenshot': 0,\n 'downloadText': 0,\n 'hostReputation': 1,\n 'hostUrls': 1,\n 'urlReputation': 1,\n 'urlScan': 3,\n 'urlScanSync': 3,\n 'urlScanSyncWithScanId': 0,\n 'urlScanWithScanId': 0\n },\n 'expiryDate': '2020-12-19',\n 'isExpired': False,\n 'licensedQuota': 'Unlimited',\n 'remainingQuota': 'Unlimited',\n 'note': 'Your annual API quota will be reset to zero, once either the limit is reached or upon quota '\n 'expiration date indicated above.'\n }\n }\n\n self.api_quota_command = SlashNextCommandApiQuota()", "def run(self):\n\n print('Quality script: ' + self.script)\n print('Report file: ' + self.report)\n print('Base dir: ' + self.baseDir)\n\n cont = raw_input('Are these values correct? ' + \\\n 'Press \"A\" to abbort or any other key to proceed ')\n if cont == 'A':\n sys.exit(0)\n\n for packageDir in self.packages.keys():\n localPath = os.path.join(self.baseDir, packageDir)\n # execute the quality script which produces a codeQuality.txt file\n command = self.script + ' ' + localPath\n result = getstatusoutput(command)\n for entry in result:\n print(str(entry))\n # parse the code quality file for the rating:\n reportFile = open(self.report, 'r')\n repNl = reportFile.readline()\n while repNl:\n if repNl.find('Your code has been rated at') == 0:\n relRating = repNl.split(' ')[6]\n absRating = float(relRating.split('/')[0])\n if absRating < self.threshold:\n fileRating = (str(absRating), packageDir)\n authors = self.packages[packageDir]\n if authors not in self.lowQuality:\n self.lowQuality[self.packages[packageDir]] = []\n # add the low rating\n self.lowQuality[authors].append(fileRating)\n break\n repNl = reportFile.readline()\n reportFile.close()", "def test_2_scrnaseq(install_test_files, data_dir):\n with make_workdir() as workdir:\n cl = [\"bcbio_nextgen.py\",\n get_post_process_yaml(data_dir, workdir),\n os.path.join(data_dir, os.pardir, \"Harvard-inDrop\"),\n os.path.join(data_dir, \"run_info-scrnaseq.yaml\")]\n subprocess.check_call(cl)", "def main():\n daq_device = None\n ai_device = None\n status = ScanStatus.IDLE\n\n range_index = 0\n trigger_type_index = 0\n interface_type = InterfaceType.ANY\n low_channel = 0\n high_channel = 0\n samples_per_channel = 100000\n rate = 48000\n # scan_options = ScanOption.CONTINUOUS | ScanOption.EXTTRIGGER\n scan_options = ScanOption.RETRIGGER | ScanOption.EXTTRIGGER | ScanOption.DEFAULTIO\n flags = AInScanFlag.DEFAULT\n\n try:\n # Get descriptors for all of the available DAQ devices.\n devices = get_daq_device_inventory(interface_type)\n number_of_devices = len(devices)\n if number_of_devices == 0:\n raise RuntimeError('Error: No DAQ devices found')\n\n print('Found', number_of_devices, 'DAQ device(s):')\n for i in range(number_of_devices):\n print(' [', i, '] ', devices[i].product_name, ' (',\n devices[i].unique_id, ')', sep='')\n\n # descriptor_index = input('\\nPlease select a DAQ device, enter a number'\n # + ' between 0 and '\n # + str(number_of_devices - 1) + ': ')\n # descriptor_index = int(descriptor_index)\n descriptor_index = 0\n if descriptor_index not in range(number_of_devices):\n raise RuntimeError('Error: Invalid descriptor index')\n\n # Create the DAQ device from the descriptor at the specified index.\n daq_device = DaqDevice(devices[descriptor_index])\n\n # Get the AiDevice object and verify that it is valid.\n ai_device = daq_device.get_ai_device()\n if ai_device is None:\n raise RuntimeError('Error: The DAQ device does not support analog '\n 'input')\n\n # Verify the specified device supports hardware pacing for analog input.\n ai_info = ai_device.get_info()\n if not ai_info.has_pacer():\n raise RuntimeError('\\nError: The specified DAQ device does not '\n 'support hardware paced analog input')\n \n # Establish a connection to the DAQ device.\n descriptor = daq_device.get_descriptor()\n print('\\nConnecting to', descriptor.dev_string, '- please wait...')\n # For Ethernet devices using a connection_code other than the default\n # value of zero, change the line below to enter the desired code.\n daq_device.connect(connection_code=0)\n\n # The default input mode is SINGLE_ENDED.\n input_mode = AiInputMode.DIFFERENTIAL\n # If SINGLE_ENDED input mode is not supported, set to DIFFERENTIAL.\n # if ai_info.get_num_chans_by_mode(AiInputMode.SINGLE_ENDED) <= 0:\n # input_mode = AiInputMode.DIFFERENTIAL\n\n # Get the number of channels and validate the high channel number.\n number_of_channels = ai_info.get_num_chans_by_mode(input_mode)\n if high_channel >= number_of_channels:\n high_channel = number_of_channels - 1\n channel_count = high_channel - low_channel + 1\n\n # Get a list of supported ranges and validate the range index.\n ranges = ai_info.get_ranges(input_mode)\n if range_index >= len(ranges):\n range_index = len(ranges) - 1\n\n # Get a list of trigger types.\n trigger_types = ai_info.get_trigger_types()\n if not trigger_types:\n raise RuntimeError('Error: The device does not support an external '\n 'trigger')\n\n # Set the trigger.\n #\n # This example uses the default values for setting the trigger so there\n # is no need to call this function. If you want to change the trigger\n # type (or any other trigger parameter), uncomment this function call\n # and change the trigger type (or any other parameter)\n #ai_device.set_trigger(trigger_types[trigger_type_index], 0, 0, 0, 0)\n ai_device.set_trigger(trigger_types[trigger_type_index], 0, 0, 0, 1)\n \n data = create_float_buffer(channel_count, samples_per_channel)\n\n print('\\n', descriptor.dev_string, ' ready', sep='')\n print(' Function demonstrated: ai_device.set_trigger()')\n print(' Channels: ', low_channel, '-', high_channel)\n print(' Input mode: ', input_mode.name)\n print(' Range: ', ranges[range_index].name)\n print(' Samples per channel: ', samples_per_channel)\n print(' Rate: ', rate, 'Hz')\n print(' Scan options:', display_scan_options(scan_options))\n print(' Trigger type:', trigger_types[trigger_type_index].name)\n try:\n input('\\nHit ENTER to continue\\n')\n except (NameError, SyntaxError):\n pass\n\n system('clear')\n\n ai_device.a_in_scan(low_channel, high_channel, input_mode,\n ranges[range_index], samples_per_channel, rate,\n scan_options, flags, data)\n\n print('Please enter CTRL + C to quit waiting for trigger\\n')\n print('Waiting for trigger ...\\n')\n\n try:\n n = 1;\n while True:\n try:\n status, transfer_status = ai_device.get_scan_status()\n\n index = transfer_status.current_index\n if index >= 0:\n system('clear')\n print('Please enter CTRL + C to terminate the process')\n print('\\nActive DAQ device: ', descriptor.dev_string,\n ' (', descriptor.unique_id, ')\\n', sep='')\n\n print('currentTotalCount = ',\n transfer_status.current_total_count)\n print('currentScanCount = ',\n transfer_status.current_scan_count)\n print('currentIndex = ', index, '\\n')\n\n for i in range(channel_count):\n print('chan =',\n i + low_channel, ': ',\n '{:.6f}'.format(data[index + i]))\n print(status)\n print(n);\n n = n+1;\n sleep(0.1)\n except (ValueError, NameError, SyntaxError):\n break\n except KeyboardInterrupt:\n pass\n\n except RuntimeError as error:\n print('\\n', error)\n\n finally:\n if daq_device:\n # Stop the acquisition if it is still running.\n if status == ScanStatus.RUNNING:\n ai_device.scan_stop()\n if daq_device.is_connected():\n daq_device.disconnect()\n daq_device.release()", "def qc_qubit(args):\n clarity_epp.qc.qubit.set_qc_flag(lims, args.process_id)", "def setUp(self):\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = \"trivia_test\"\n self.QUESTIONS_PER_PAGE = 10\n #self.database_path = \"postgres://{}/{}\".format('localhost:5432', self.database_name)\n self.database_path = \"postgres://{}/{}\".format('postgres:postgres@localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n\n self.new_question_1 = {'answer': '1', 'category': 1, 'difficulty': 1, 'question': 'new question 1'}\n self.new_question_2 = {'answer': '', 'category': '', 'difficulty': '', 'question':'' }\n\n self.search_term_1 = {'searchTerm': 'actor'}\n self.search_term_2 = {'searchterm': 'actor'} # wrong requst parameter\n self.search_term_3 = {'searchTerm': 'World Cup'} \n self.search_term_4 = {'searchTerm': 'world Cup'} # lower case which doesn't exist in any question\n\n # when category is 'click' which menas ALL category\n self.quizzes_1 = {\n 'previous_questions': [], \n 'quiz_category': {'id': 0, 'type': 'click'}\n }\n\n # when specify category\n self.quizzes_2 = {\n 'previous_questions': [18, 19],\n 'quiz_category': {'id': '1', 'type': 'Art'}\n }\n\n # wrong data for request as category doesn't exist\n self.quizzes_3 = {\n 'previous_questions': [18, 19],\n 'quiz_category': {'id': '1', 'type': 'Full Stack Web Development'}\n }\n\n # previous_questions contains all questions which means we run out of questions\n self.quizzes_4 = {\n 'previous_questions': [2,4,5,6,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23],\n 'quiz_category': {'id': '1', 'type': 'Art'}\n }\n\n # binds the app to the current context\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n # create all tables\n self.db.create_all()", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--process_queue\", action='store_true',\n dest=\"process_queue\",\n help=\"Process also schedd queue (Running/Idle/Pending jobs)\")\n parser.add_argument(\"--feed_es\", action='store_true',\n dest=\"feed_es\",\n help=\"Feed to Elasticsearch\")\n parser.add_argument(\"--feed_es_for_queues\", action='store_true',\n dest=\"feed_es_for_queues\",\n help=\"Feed queue data also to Elasticsearch\")\n parser.add_argument(\"--feed_amq\", action='store_true',\n dest=\"feed_amq\",\n help=\"Feed to CERN AMQ\")\n\n parser.add_argument(\"--schedd_filter\", default='',\n type=str, dest=\"schedd_filter\",\n help=(\"Comma separated list of schedd names to process \"\n \"[default is to process all]\"))\n parser.add_argument(\"--skip_history\", action='store_true',\n dest=\"skip_history\",\n help=\"Skip processing the history. (Only do queues.)\")\n parser.add_argument(\"--read_only\", action='store_true',\n dest=\"read_only\",\n help=\"Only read the info, don't submit it.\")\n parser.add_argument(\"--dry_run\", action='store_true',\n dest=\"dry_run\",\n help=(\"Don't even read info, just pretend to. (Still \"\n \"query the collector for the schedd's though.)\"))\n parser.add_argument(\"--keep_full_queue_data\", action='store_true',\n dest=\"keep_full_queue_data\",\n help=\"Drop all but some fields for running jobs.\")\n parser.add_argument(\"--amq_bunch_size\", default=5000,\n type=int, dest=\"amq_bunch_size\",\n help=(\"Send docs to AMQ in bunches of this number \"\n \"[default: %(default)d]\"))\n parser.add_argument(\"--es_bunch_size\", default=250,\n type=int, dest=\"es_bunch_size\",\n help=(\"Send docs to ES in bunches of this number \"\n \"[default: %(default)d]\"))\n parser.add_argument(\"--query_queue_batch_size\", default=50,\n type=int, dest=\"query_queue_batch_size\",\n help=(\"Send docs to listener in batches of this number \"\n \"[default: %(default)d]\"))\n parser.add_argument(\"--upload_pool_size\", default=8,\n type=int, dest=\"upload_pool_size\",\n help=(\"Number of parallel processes for uploading \"\n \"[default: %(default)d]\"))\n parser.add_argument(\"--query_pool_size\", default=8,\n type=int, dest=\"query_pool_size\",\n help=(\"Number of parallel processes for querying \"\n \"[default: %(default)d]\"))\n\n parser.add_argument(\"--es_hostname\", default='es-cms.cern.ch',\n type=str, dest=\"es_hostname\",\n help=\"Hostname of the elasticsearch instance to be used \"\n \"[default: %(default)s]\")\n parser.add_argument(\"--es_port\", default=9203,\n type=int, dest=\"es_port\",\n help=\"Port of the elasticsearch instance to be used \"\n \"[default: %(default)d]\")\n parser.add_argument(\"--es_index_template\", default='cms',\n type=str, dest=\"es_index_template\",\n help=(\"Trunk of index pattern. \"\n \"Needs to start with 'cms' \"\n \"[default: %(default)s]\"))\n parser.add_argument(\"--log_dir\", default='log/',\n type=str, dest=\"log_dir\",\n help=\"Directory for logging information [default: %(default)s]\")\n parser.add_argument(\"--log_level\", default='WARNING',\n type=str, dest=\"log_level\",\n help=\"Log level (CRITICAL/ERROR/WARNING/INFO/DEBUG) \"\n \"[default: %(default)s]\")\n parser.add_argument(\"--email_alerts\", default=[], action='append',\n dest=\"email_alerts\",\n help=\"Email addresses for alerts [default: none]\")\n\n args = parser.parse_args()\n set_up_logging(args)\n\n # --dry_run implies read_only\n args.read_only = args.read_only or args.dry_run\n\n main_driver(args)", "def __init__(self, examdb, number_of_questions, intended_learning_outcome_used, course_code,\n course_version, exam_date, allow_same_tags=False, existing_questions=None):\n try:\n assert (isinstance(number_of_questions, int))\n self.numQuest = number_of_questions\n self.ILOUsed = list(intended_learning_outcome_used)\n\n assert (isinstance(course_code, str))\n self.course_code = course_code\n\n assert (isinstance(course_version, float))\n self.course_version = course_version\n\n assert (isinstance(exam_date, date))\n self.exam_date = exam_date\n\n assert (isinstance(allow_same_tags, bool))\n self.allow_same_tags = allow_same_tags\n\n except AssertionError as err:\n print(\"Generate Questions By Goal init: \" + str(err))\n return\n\n self.ExamDB = examdb\n self._exam_id = {\n 'exam_id': '',\n 'question_ids': [],\n 'declaration_id': [],\n 'bibliography_id': []\n }\n\n self._objects = {'Declarations': [],\n 'Questions': [],\n }\n self._days = 365 # Number of days that a question is \"quarantined\".\n\n if existing_questions:\n for _qid in existing_questions:\n self._exam_id['question_ids'].append(_qid)\n self._add_question_to_exam(_qid)\n self.numQuest -= len(existing_questions)\n\n if self.numQuest > 0: # If there are more questions to add, run generator algorithm\n self._gen_questions_by_goals()", "def run_Experiment(DP = None, QL = None):\n\n # Path information\n output_path, exp_num = create_new_dir() #dirs Exp/1, Exp/2, ...\n DP_path = join(output_path,'DP') #dirs Exp/1/DP\n QL_path = join(output_path,'QL') #dirs Exp/1/QL\n print(\"************ Exp \", exp_num, \"************ \\n\")\n\n # Exp_summary_data\n method = get_method_str(DP, QL)\n exp_summary = [str(exp_num), method]\n\n\n # Run DP\n if DP != None:\n print(\"In Runner: Executing DP !!\")\n\n prob_file = DP[0]\n createFolder(DP_path)\n # output_params = [V_so, mean, variance, bad_count]\n output_params = run_DP(setup_grid_params, prob_file, output_file, DP_path, threshold = threshold)\n\n \"\"\"CHANGE ARGUMENT if return order of setup_grid() is changed\"\"\"\n input_params = setup_grid_params[9].copy()\n input_params.append(prob_file)\n\n exp_summary = append_params_to_summary(exp_summary, input_params, output_params)\n append_summary_to_summaryFile('Experiments/Exp_summary.csv', exp_summary)\n print(\"In Runner: Executing DP Finished!!\")\n\n # Run QL\n if QL != None:\n print(\"In Runner: Executing QL !!\")\n\n QL_params = QL\n createFolder(QL_path)\n output_parameters_all_cases = run_QL(setup_grid_params, QL_params, QL_path, exp_num)\n # run_QL(setup_grid_params, QL_params, QL_path)\n\n print(\"In Runner: Executing QL Finished !!\")", "def test_fastqc():\n fastqc.FastQC(\"fastqc\")", "def startDQM(run, startLumi, daq, dqmRunKey, ecalIn, esIn, logFile):\n\n logFile.write('Processing run', run)\n\n if dqmRunKey == 'cosmic_run':\n workflowBase = 'Cosmics'\n elif dqmRunKey == 'pp_run':\n workflowBase = 'Protons'\n elif dqmRunKey == 'hi_run':\n workflowBase = 'HeavyIons'\n else:\n workflowBase = 'All'\n\n procs = {}\n\n if daq == 'central':\n# commonOptions = 'runNumber={run} runInputDir={inputDir} workflow=/{dataset}/{period}/CentralDAQ'.format(run = run, inputDir = '/tmp/onlineDQM', dataset = workflowBase, period = config.period)\n\n# if ecalIn:\n# ecalOptions = 'environment=PrivLive outputPath={outputPath} verbosity={verbosity}'.format(outputPath = config.tmpoutdir, verbosity = VERBOSITY)\n#\n# log = open(config.logdir + '/ecal_dqm_sourceclient-privlive_cfg.log', 'a')\n# log.write('\\n\\n\\n')\n# command = 'source $HOME/DQM/cmssw.sh; exec cmsRun {conf} {common} {ecal} {spec}'.format(conf = config.workdir + '/ecalConfigBuilder.py', common = commonOptions, ecal = ecalOptions, spec = 'cfgType=Physics')\n# proc = subprocess.Popen(command, shell = True, stdout = log, stderr = subprocess.STDOUT)\n# logFile.write(command)\n# procs['Physics'] = (proc, log)\n \n# log = open(config.logdir + '/ecalcalib_dqm_sourceclient-privlive_cfg.log', 'a')\n# log.write('\\n\\n\\n')\n# command = 'source $HOME/DQM/cmssw.sh; exec cmsRun {conf} {common} {ecal} {spec}'.format(conf = config.workdir + '/ecalConfigBuilder.py', common = commonOptions, ecal = ecalOptions, spec = 'cfgType=Calibration')\n# proc = subprocess.Popen(command, shell = True, stdout = log, stderr = subprocess.STDOUT)\n# logFile.write(command)\n# procs['Calibration'] = (proc, log)\n\n# if esIn:\n# log = open(config.logdir + '/es_dqm_sourceclient-privlive_cfg.log', 'a')\n# log.write('\\n\\n\\n')\n# command = 'source $HOME/DQM/cmssw.sh; exec cmsRun {conf} {common}'.format(conf = config.workdir + '/es_dqm_sourceclient-privlive_cfg.py', common = commonOptions)\n# proc = subprocess.Popen(command, shell = True, stdout = log, stderr = subprocess.STDOUT)\n# logFile.write(command)\n# procs['ES'] = (proc, log)\n\n elif daq == 'minidaq':\n if not os.path.isdir('/dqmminidaq/run%d' % run):\n logFile.write('DQM stream was not produced')\n return {}\n\n commonOptions = 'runNumber={run} runInputDir={inputDir} workflow=/{dataset}/{period}/MiniDAQ'.format(run = run, inputDir = '/dqmminidaq', dataset = workflowBase, period = config.period)\n\n if ecalIn:\n \n ecalOptions = 'environment=PrivLive outputPath={outputPath} verbosity={verbosity}'.format(outputPath = config.tmpoutdir, verbosity = VERBOSITY)\n \n log = open(config.logdir + '/ecalcalib_dqm_sourceclient-privlive_cfg.log', 'a')\n log.write('\\n\\n\\n')\n command = 'source $HOME/DQM/cmssw.sh; exec cmsRun {conf} {common} {ecal} {spec}'.format(conf = config.workdir + '/ecalConfigBuilder.py', common = commonOptions, ecal = ecalOptions, spec = 'cfgType=CalibrationStandalone')\n proc = subprocess.Popen(command, shell = True, stdout = log, stderr = subprocess.STDOUT)\n logFile.write(command)\n procs['Calibration'] = (proc, log)\n\n if esIn:\n log = open(config.logdir + '/es_dqm_sourceclient-privlive_cfg.log', 'a')\n log.write('\\n\\n\\n')\n command = 'source $HOME/DQM/cmssw.sh; exec cmsRun {conf} {common}'.format(conf = config.workdir + '/es_dqm_sourceclient-privlive_cfg.py', common = commonOptions)\n proc = subprocess.Popen(command, shell = True, stdout = log, stderr = subprocess.STDOUT)\n logFile.write(command)\n procs['ES'] = (proc, log)\n\n logFile.write('Running configurations:', sorted(procs.keys()))\n\n return procs", "def setUp(self):\n TCBase.setUp(self)\n\n # ---\n\n resp = self.request(\n self.client.post,\n '/admin/survey',\n {\n 'title': 'title',\n 'description': 'description',\n 'start_date': '2018-01-01',\n 'end_date': '2018-03-01',\n 'target': ujson.dumps([1, 3])\n },\n self.admin_access_token\n )\n\n survey_id = self.get_response_data(resp)['id']\n\n self.json_request(\n self.client.post,\n '/admin/survey/question',\n {\n 'survey_id': survey_id,\n 'questions': [\n {\n 'title': 'title',\n 'is_objective': False\n },\n {\n 'title': 'title',\n 'is_objective': False\n }\n ]\n },\n self.admin_access_token\n )", "def setUp(self):\n\n app.testing = True\n self.app = app.test_client()\n\n self.valid_question = {\n \"title\" : \"tests\",\n \"question\": \"How do I refactor tests with database?\"\n }\n\n self.invalid_question = {\n \"title\" : \"\",\n \"question\": \"How do I refactor tests with database?\"\n }\n\n self.valid_question2 = {\n \"title\" : \"heroku\",\n \"question\": \"How do I refactor tests?\"\n }", "def setUp(self):\r\n super(TestAnswerDistributions, self).setUp()\r\n\r\n self.homework = self.add_graded_section_to_course('homework')\r\n self.add_dropdown_to_section(self.homework.location, 'p1', 1)\r\n self.add_dropdown_to_section(self.homework.location, 'p2', 1)\r\n self.add_dropdown_to_section(self.homework.location, 'p3', 1)\r\n self.refresh_course()", "def main():\n print(\"This system will complete a sales, item and advisor review.\")\n print(\"\")\n password_request() # must put in MAGIC to proceed\n data = get_sales_data()\n\n menu(data)", "def setUp(self):\n\n pass\n # screenip2 = screenip_model.screenip(0, pd_obj_inputs, pd_obj_exp_out)\n # setup the test as needed\n # e.g. pandas to open screenip qaqc csv\n # Read qaqc csv and create pandas DataFrames for inputs and expected outputs", "def populate_agdd_qc(urma_start, urma_end, acis_start, acis_end, prism_start, prism_end):\r\n logging.info(' ')\r\n logging.info('-----------------beginning climate quality check population-----------------')\r\n\r\n stations = get_stations()\r\n\r\n sources = get_sources()\r\n acis_source_id = None\r\n urma_source_id = None\r\n prism_source_id = None\r\n for source in sources:\r\n if source['name'] == 'ACIS':\r\n acis_source_id = source['id']\r\n elif source['name'] == 'URMA':\r\n urma_source_id = source['id']\r\n elif source['name'] == 'PRISM':\r\n prism_source_id = source['id']\r\n\r\n logging.info(' ')\r\n logging.info('-----------------populating urma qc agdds-----------------')\r\n populate_agdds(urma_start, urma_end, 'URMA', urma_source_id, stations)\r\n\r\n logging.info(' ')\r\n logging.info('-----------------populating acis qc agdds-----------------')\r\n populate_agdds(acis_start, acis_end, 'ACIS', acis_source_id, stations)\r\n\r\n logging.info(' ')\r\n logging.info('-----------------populating prism qc agdds-----------------')\r\n populate_agdds(prism_start, prism_end, 'PRISM', prism_source_id, stations)", "def _child_set_up(self):\n # Set up acquisition optimisation\n self._set_up_acq_opt()\n self.method_name = 'GP-' + str(self.options.acq)", "def administer(self):\n \n # create a dictionary that will count True and False answers\n score = {True: 0, False: 0}\n\n # iterate through each question in the list of questions\n # keep track of user's score. The question and answer are stored as\n # a list, so convert back into Question class first to use\n # ask_and_evaluate\n\n # for test questions in order:\n\n # for i in range(len(self.questions)):\n # question = Question(self.questions[i][0], self.questions[i][1])\n # score_question = question.ask_and_evaluate()\n # score[score_question] = score.get(score_question, 0) + 1\n\n\n # for random order test questions:\n list_of_questions = self.questions\n\n from random import choice\n \n for i in range(len(list_of_questions)):\n # choose a question randomly:\n question_choice = choice(list_of_questions)\n # delete that from the list of questions so it's not chosen again\n list_of_questions.remove(question_choice)\n # create a Question object from the question and answer\n question = Question(question_choice[0], question_choice[1])\n # ask and evaluate the question\n score_question = question.ask_and_evaluate()\n # record the score\n score[score_question] = score.get(score_question, 0) + 1\n\n\n # print the total number of correct and incorrect responses\n print \"Total correct: {}. Total incorrect: {}\".format(score[True], \n score[False])\n\n # return the number of incorrect and correct responses as a dictionary\n return score", "def setup(self, *args):\n\n responses = [\n ('Yes.', 'eq'),\n ('No.', 'eq'),\n ('Nope.', 'eq'),\n ('Maybe.', 'eq'),\n ('Possibly.', 'eq'),\n ('It could be.', 'eq'),\n (\"No. No, I don't think so.\", 'eq/2'),\n ('Without a doubt.', 'eq/2'),\n ('I think... Yes.', 'eq/2'),\n ('Heck yes!', 'eq/2'),\n ('Maybe. Possibly. It could be.', 'eq/2'),\n ('Ask again later.', 'eq/3'),\n (\"I don't know.\", 'eq/3'),\n (\"I'm sorry, I was thinking of bananas\", 'eq/100'),\n ]\n\n responses += [(x, 'eq/10') for x in obliques]\n self.advices = [(x, 1) for x in obliques]\n total_prob = 0\n real_resp = []\n evens = []\n for resp, prob in responses:\n if isinstance(prob, str):\n if prob.startswith('eq'):\n sp = prob.split('/')\n if len(sp) == 1:\n evens.append((resp, 1))\n else:\n div = int(sp[1])\n evens.append((resp, 1.0 / div))\n\n else:\n real_resp.append((resp, prob))\n total_prob += prob\n\n # Share is the probability of a \"eq\" probability. Share/2 would be the\n # probability of a \"eq/2\" probability.\n share = (1 - total_prob) / sum(div for _, div in evens)\n for resp, divisor in evens:\n real_resp.append((resp, share * divisor))\n\n self.responses = real_resp\n self.is_question = re.compile('.*\\?(\\?|!)*$')", "def main(ctx, qa_dir, no_editor, report_dir, vcs, debug, main_branch):\n __main_imp__(ctx, qa_dir, no_editor, report_dir, vcs, debug, main_branch)", "def main(self):\n self.jamf_url = self.env.get(\"JSS_URL\")\n self.jamf_user = self.env.get(\"API_USERNAME\")\n self.jamf_password = self.env.get(\"API_PASSWORD\")\n self.script_path = self.env.get(\"script_path\")\n self.script_name = self.env.get(\"script_name\")\n self.script_category = self.env.get(\"script_category\")\n self.script_priority = self.env.get(\"script_priority\")\n self.osrequirements = self.env.get(\"osrequirements\")\n self.script_info = self.env.get(\"script_info\")\n self.script_notes = self.env.get(\"script_notes\")\n self.script_parameter4 = self.env.get(\"script_parameter4\")\n self.script_parameter5 = self.env.get(\"script_parameter5\")\n self.script_parameter6 = self.env.get(\"script_parameter6\")\n self.script_parameter7 = self.env.get(\"script_parameter7\")\n self.script_parameter8 = self.env.get(\"script_parameter8\")\n self.script_parameter9 = self.env.get(\"script_parameter9\")\n self.script_parameter10 = self.env.get(\"script_parameter10\")\n self.script_parameter11 = self.env.get(\"script_parameter11\")\n self.replace = self.env.get(\"replace_script\")\n self.sleep = self.env.get(\"sleep\")\n # handle setting replace in overrides\n if not self.replace or self.replace == \"False\":\n self.replace = False\n\n # clear any pre-existing summary result\n if \"jamfscriptuploader_summary_result\" in self.env:\n del self.env[\"jamfscriptuploader_summary_result\"]\n script_uploaded = False\n\n # obtain the relevant credentials\n token = self.handle_uapi_auth(self.jamf_url, self.jamf_user, self.jamf_password)\n\n # get the id for a category if supplied\n if self.script_category:\n self.output(\"Checking categories for {}\".format(self.script_category))\n\n # check for existing category - requires obj_name\n obj_type = \"category\"\n obj_name = self.script_category\n category_id = self.get_uapi_obj_id_from_name(\n self.jamf_url,\n obj_type,\n obj_name,\n token,\n )\n\n if not category_id:\n self.output(\"WARNING: Category not found!\")\n category_id = \"-1\"\n else:\n self.output(\n \"Category {} found: ID={}\".format(self.script_category, category_id)\n )\n else:\n self.script_category = \"\"\n category_id = \"-1\"\n\n # handle files with a relative path\n if not self.script_path.startswith(\"/\"):\n found_template = self.get_path_to_file(self.script_path)\n if found_template:\n self.script_path = found_template\n else:\n raise ProcessorError(f\"ERROR: Script file {self.script_path} not found\")\n\n # now start the process of uploading the object\n if not self.script_name:\n self.script_name = os.path.basename(self.script_path)\n\n # check for existing script\n self.output(\n \"Checking for existing '{}' on {}\".format(self.script_name, self.jamf_url)\n )\n self.output(\n \"Full path: {}\".format(self.script_path),\n verbose_level=2,\n )\n obj_type = \"script\"\n obj_name = self.script_name\n obj_id = self.get_uapi_obj_id_from_name(\n self.jamf_url,\n obj_type,\n obj_name,\n token,\n )\n\n if obj_id:\n self.output(\n \"Script '{}' already exists: ID {}\".format(self.script_name, obj_id)\n )\n if self.replace:\n self.output(\n \"Replacing existing script as 'replace_script' is set to {}\".format(\n self.replace\n ),\n verbose_level=1,\n )\n else:\n self.output(\n \"Not replacing existing script. Use replace_script='True' to enforce.\",\n verbose_level=1,\n )\n return\n\n # post the script\n self.upload_script(\n self.jamf_url,\n self.script_name,\n self.script_path,\n category_id,\n self.script_category,\n self.script_info,\n self.script_notes,\n self.script_priority,\n self.script_parameter4,\n self.script_parameter5,\n self.script_parameter6,\n self.script_parameter7,\n self.script_parameter8,\n self.script_parameter9,\n self.script_parameter10,\n self.script_parameter11,\n self.osrequirements,\n token,\n obj_id,\n )\n script_uploaded = True\n\n # output the summary\n self.env[\"script_name\"] = self.script_name\n self.env[\"script_uploaded\"] = script_uploaded\n if script_uploaded:\n self.env[\"jamfscriptuploader_summary_result\"] = {\n \"summary_text\": \"The following scripts were created or updated in Jamf Pro:\",\n \"report_fields\": [\n \"script\",\n \"path\",\n \"category\",\n \"priority\",\n \"os_req\",\n \"info\",\n \"notes\",\n \"P4\",\n \"P5\",\n \"P6\",\n \"P7\",\n \"P8\",\n \"P9\",\n \"P10\",\n \"P11\",\n ],\n \"data\": {\n \"script\": self.script_name,\n \"path\": self.script_path,\n \"category\": self.script_category,\n \"priority\": str(self.script_priority),\n \"info\": self.script_info,\n \"os_req\": self.osrequirements,\n \"notes\": self.script_notes,\n \"P4\": self.script_parameter4,\n \"P5\": self.script_parameter5,\n \"P6\": self.script_parameter6,\n \"P7\": self.script_parameter7,\n \"P8\": self.script_parameter8,\n \"P9\": self.script_parameter9,\n \"P10\": self.script_parameter10,\n \"P11\": self.script_parameter11,\n },\n }", "def __init__(self):\n\n # Primary configuration of the module is via the container environment.\n # We need to recognise that some or all of these may not be defined.\n # All run-time config that's required is given a __CFG prefix to\n # simplify checking whether all that's required has been defined.\n #\n # The SQUONK2_SLUG is limited to 10 characters, when combined with\n # \"Fragalysis {SLUG} \", this leaves (80-22) 58 characters for the\n # use with the target-access-string and session project strings\n # to form Squonk2 Unit and Project names.\n self.__CFG_SQUONK2_ASAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_ASAPI_URL')\n self.__CFG_SQUONK2_DMAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_DMAPI_URL')\n self.__CFG_SQUONK2_UI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_UI_URL')\n self.__CFG_SQUONK2_ORG_UUID: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_UUID')\n self.__CFG_SQUONK2_UNIT_BILLING_DAY: Optional[str] =\\\n os.environ.get('SQUONK2_UNIT_BILLING_DAY')\n self.__CFG_SQUONK2_PRODUCT_FLAVOUR: Optional[str] =\\\n os.environ.get('SQUONK2_PRODUCT_FLAVOUR')\n self.__CFG_SQUONK2_SLUG: Optional[str] =\\\n os.environ.get('SQUONK2_SLUG', '')[:_MAX_SLUG_LENGTH]\n self.__CFG_SQUONK2_ORG_OWNER: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER')\n self.__CFG_SQUONK2_ORG_OWNER_PASSWORD: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER_PASSWORD')\n self.__CFG_OIDC_AS_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_AS_CLIENT_ID')\n self.__CFG_OIDC_DM_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_DM_CLIENT_ID')\n self.__CFG_OIDC_KEYCLOAK_REALM: Optional[str] = \\\n os.environ.get('OIDC_KEYCLOAK_REALM')\n\n # Optional config (no '__CFG_' prefix)\n self.__DUMMY_TARGET_TITLE: Optional[str] =\\\n os.environ.get('DUMMY_TARGET_TITLE')\n self.__DUMMY_USER: Optional[str] =\\\n os.environ.get('DUMMY_USER')\n self.__DUMMY_TAS: Optional[str] =\\\n os.environ.get('DUMMY_TAS')\n self.__SQUONK2_VERIFY_CERTIFICATES: Optional[str] = \\\n os.environ.get('SQUONK2_VERIFY_CERTIFICATES')\n\n # The integer billing day, valid if greater than zero\n self.__unit_billing_day: int = 0\n # True if configured...\n self.__configuration_checked: bool = False\n self.__configured: bool = False\n # Ignore cert errors? (no)\n self.__verify_certificates: bool = True\n\n # The record ID of the Squonk2Org for this deployment.\n # Set on successful 'pre-flight-check'\n self.__org_record: Optional[Squonk2Org] = None\n\n self.__org_owner_as_token: str = ''\n self.__org_owner_dm_token: str = ''\n self.__keycloak_hostname: str = ''\n self.__keycloak_realm: str = ''\n\n # The Safe QuerySet from the security module.\n # Used when we are given a tas (target access string).\n # It allows us to check that a user is permitted to use the access ID\n # and relies on ISPyB credentials present in the environment.\n self.__ispyb_safe_query_set: ISpyBSafeQuerySet = ISpyBSafeQuerySet()", "def main():\n\n # add custom commandline args (if we dont have any we can pass None instead of parser to do_main_commandline_startup()).\n import argparse\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-t\", \"--querytests\", help=\"run some test queries\", action=\"store_true\", default=False)\n\n # Create a site manager and ask it to instantiate a site of the class we specify, and handle some generic commandline options\n # it returns parsed commandline args so we can look for any custom ones\n (args, sitemanager) = MewloSiteManager.do_main_commandline_startup(MewloSite_Test1, parser)\n\n # on successful creation, we can parse and do some stuff\n if (sitemanager != None):\n # sitemanager was created and early commandline processing done\n # now we have some custom commandline arg proessing we might want to do\n if (sitemanager.is_readytoserve()):\n # this stuff only is entertained if sitemanager says all green lights\n if (args.querytests):\n # simulate some simple simulated query requests\n print \"Running query tests.\"\n print sitemanager.test_submit_path('/')\n print sitemanager.test_submit_path('/help/about')\n print sitemanager.test_submit_path('/page/mystery')\n print sitemanager.test_submit_path('/test/hello/name/jesse/age/44')\n\n # now any late generic commandline stuff (including serving the website)\n sitemanager.do_main_commandline_late()", "def test_QSe_Run(self):\n fit_group, result = BayesQuasi(Program='QSe',\n SampleWorkspace=self._sample_ws,\n ResolutionWorkspace=self._res_ws,\n MinRange=-0.547607,\n MaxRange=0.543216,\n SampleBins=1,\n ResolutionBins=1,\n Elastic=False,\n Background='Sloping',\n FixedWidth=False,\n UseResNorm=False,\n WidthFile='',\n Loop=True,\n Save=False,\n Plot='None')\n self._validate_QSe_shape(result, fit_group)\n self._validate_QSe_value(result, fit_group)", "def runDemo(self):\n if self._db.getName() is not 'demo':\n print('only works for a demo database file')\n return\n keepGoing = input(\"Continue?\")\n\n print('Lets add a new Projector ABC, that is a spare')\n self.newProjector('ABC','spare','on site','na','2016-01-01','mfgDate','long','cs','cs','cs','cs','cs','cs','cs','cs') \n keepGoing = input(\"Continue?\")\n \n print('Lets add a new Projector ABD, that is in use in position 1')\n self.newProjector('ABD','in use','on site','1','2016-01-01','mfgDate','long','cs','cs','cs','cs','cs','cs','cs','cs')\n keepGoing = input(\"Continue?\")\n \n print('Since Projector ABD is installed, we need to add a new bulb')\n self.newBulb('unknown','0','in use','ABD','2016-01-01')\n keepGoing = input(\"Continue?\")\n\n print('Lets also create a new bulb as a spare')\n self.newBulb('b123','0','spare','na','2016-01-01')\n keepGoing = input(\"Continue?\")\n\n print('Now lets install projector ABC')\n self.installProjector('ABC','2','2016-01-02','palak','installed projector')\n keepGoing = input(\"Continue?\")\n\n print(\"Remember to put bulb inside the projector\")\n self.installBulb('2','ABC','2016-01-02','palak','installed b123 into ABC')\n keepGoing = input(\"Continue?\")\n\n print('Unfortunately Projector ABD breaks')\n self.uninstallProjector('ABD','broken','on site','2016-01-02','palak','aww man :(')\n keepGoing = input(\"Continue?\")\n \n print('Now we need to ship ABD do Taiwan to get it fixed')\n self.shipProjector('ABD','broken','2016-01-03','Taiwan','shipped')\n keepGoing = input(\"Continue?\")\n\n print('Projector ABD returns from Taiwan fixed!')\n self.recievedProjector('ABD','spare','2016-01-05','palak','it works!')\n keepGoing = input(\"Continue?\")\n\n print('okay, thats enough about ABD, lets say the bulb breaks in ABC')\n self.uninstallBulb('2','ABC','broken','2016-01-06','palak','shattered')\n keepGoing = input(\"Continue?\")\n\n print('The bulb that was in ABC, not gets reLamped')\n self.reLampBulb('2','b123','spare','na','2016-01-08')\n keepGoing = input(\"Continue?\")\n\n print('Time to install the bulb back into ABC')\n self.installBulb('3','ABC','2016-01-10','palak','it works now!')\n keepGoing = input(\"Continue?\")", "def __call__(self, path):\n\n # Iterates through a directory of raw sources and builds staging databases\n databases = self.process(path)\n\n # Output database file\n qafile = os.path.join(path, \"questions.db\")\n\n # Build consolidated SQLite questions database\n db2qa = DB2QA()\n db2qa(databases, qafile)", "def qa_feed(self, feed_class=AcquisitionFeed):\n def factory(library, facets):\n return JackpotWorkList(library, facets)\n\n return self._qa_feed(\n feed_factory=feed_class.groups,\n feed_title=\"QA test feed\",\n controller_name=\"qa_feed\",\n facet_class=JackpotFacets,\n worklist_factory=factory\n )", "def create_qa_bulk(self, product, job_id):\n\n qa = yaml.load(open(product, 'r'))\n name = os.path.basename(product)\n\n for item in ('PANAME', 'METRICS', 'PARAMS'):\n if item not in qa:\n logger.warning('{} not found.'.format(item))\n return None\n\n paname = qa['PANAME']\n metrics = self.jsonify(qa['METRICS'])\n params = self.jsonify(qa['PARAMS'])\n\n return QA(\n name=name,\n description='',\n paname=paname,\n metrics=metrics,\n params=params,\n job_id=job_id\n )", "def main():\n\n if not os.path.isdir('./results'):\n # results directory is needed\n os.mkdir('./results')\n\n # Run bess daemon\n print('start bess daemon')\n ret = bessctl_do('daemon start')\n if ret.returncode != 0:\n print('failed to start bess daemon')\n return 1\n\n #sleep(2)\n\n cnt_prt_q = [(2,2), (4,2), (8, 2), (2, 8), (4, 8), (8, 8), (16, 8)]\n cnt_prt_q = [(2,128),]\n # cnt_prt_q = [0]\n # Warning: SINGLE_PMD_MULTIPLE_Q is not supported any more.\n # (it needs EXCESS variable to be defined)\n exp_types = ['MULTIPLE_PMD_MULTIPLE_Q',] # 'SINGLE_PMD_MULTIPLE_Q']\n agents = ['BKDRFT', 'BESS']\n agents = ['BKDRFT',]\n for _type in exp_types:\n for agent in agents:\n results = []\n for cnt_ports, cnt_queues in cnt_prt_q:\n res = run_exp(_type, agent, cnt_ports, cnt_queues)\n results.append(res)\n generate_report_file(results,\n './results/{}_{}_results.txt'.format(_type, agent))", "def __init__(self, job_ini, event_info, oq_version, dir_info=None, no_distribute=False):\n\n self.vtag = int(oq_version.split('.')[1])\n self.dir_info = dir_info\n\n from openquake.baselib import config, performance, general, zeromq, hdf5, parallel\n from openquake.hazardlib import const, calc, gsim\n from openquake import commonlib\n from openquake.commonlib import readinput, logictree, logs\n if self.vtag >= 12:\n from openquake.commonlib import datastore\n else:\n from openquake.baselib import datastore\n from openquake.calculators import base\n from openquake.server import dbserver\n from openquake.commands import dbserver as cdbs\n\n user_name = getpass.getuser()\n\n if no_distribute:\n os.environ['OQ_DISTRIBUTE'] = 'no'\n\n # check if the datadir exists\n datadir = datastore.get_datadir()\n if not os.path.exists(datadir):\n os.makedirs(datadir)\n\n #dbserver.ensure_on()\n if dbserver.get_status() == 'not-running':\n if config.dbserver.multi_user:\n sys.exit('Please start the DbServer: '\n 'see the documentation for details')\n # otherwise start the DbServer automatically; NB: I tried to use\n # multiprocessing.Process(target=run_server).start() and apparently\n # it works, but then run-demos.sh hangs after the end of the first\n # calculation, but only if the DbServer is started by oq engine (!?)\n # Here is a trick to activate OpenQuake's dbserver\n # We first cd to the openquake directory and invoke subprocess to open/hold on dbserver\n # Then, we cd back to the original working directory \n owd = os.getcwd()\n os.chdir(os.path.dirname(os.path.realpath(__file__)))\n self.prc = subprocess.Popen([sys.executable, '-m', 'openquake.commands', 'dbserver', 'start'])\n os.chdir(owd)\n\n # wait for the dbserver to start\n waiting_seconds = 30\n while dbserver.get_status() == 'not-running':\n if waiting_seconds == 0:\n sys.exit('The DbServer cannot be started after 30 seconds. '\n 'Please check the configuration')\n time.sleep(1)\n waiting_seconds -= 1\n else:\n self.prc = False\n\n # check if we are talking to the right server\n err = dbserver.check_foreign()\n if err:\n sys.exit(err)\n\n # Copy the event_info\n self.event_info = event_info\n\n # Create a job\n #self.job = logs.init(\"job\", job_ini, logging.INFO, None, None, None)\n if self.vtag >= 11:\n dic = readinput.get_params(job_ini)\n else:\n dic = readinput.get_params([job_ini])\n #dic['hazard_calculation_id'] = self.job.calc_id\n\n if self.vtag >= 12:\n # Create the job log\n self.log = logs.init('job', dic, logging.INFO, None, None, None)\n # Get openquake parameters\n self.oqparam = self.log.get_oqparam()\n self.calculator = base.calculators(self.oqparam, self.log.calc_id)\n else:\n # Create the job log\n self.calc_id = logs.init('job', logging.INFO)\n # Get openquake parameters\n self.oqparam = readinput.get_oqparam(dic)\n self.calculator = base.calculators(self.oqparam, self.calc_id)\n\n # Create the calculator\n self.calculator.from_engine = True\n\n print('FetchOpenQuake: OpenQuake Hazard Calculator initiated.')", "def test_dq_rules(self,DQ):\r\n pass", "async def app_questions(self, ctx: commands.Context):\n app_questions = await self.config.guild(ctx.guild).app_questions.get_raw()\n question_1 = app_questions[\"name\"]\n question_2 = app_questions[\"timezone\"]\n question_3 = app_questions[\"age\"]\n question_4 = app_questions[\"days\"]\n question_5 = app_questions[\"hours\"]\n question_6 = app_questions[\"experience\"]\n question_7 = app_questions[\"reasonforinterest\"]\n question_8 = app_questions[\"question8\"]\n question_9 = app_questions[\"question9\"]\n question_10 = app_questions[\"question10\"]\n question_11 = app_questions[\"question11\"]\n question_12 = app_questions[\"question12\"]\n question_13 = app_questions[\"finalcomments\"]\n\n await ctx.send(\n \"There are 13 questions in this application feature, with a few preloaded already for you.\\nHere is the current configuration:\"\n )\n e = discord.Embed(colour=await ctx.embed_colour())\n e.add_field(\n name=\"Question 1\", value=f\"{question_1}\" if question_1 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 2\", value=f\"{question_2}\" if question_2 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 3\", value=f\"{question_3}\" if question_3 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 4\", value=f\"{question_4}\" if question_4 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 5\", value=f\"{question_5}\" if question_5 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 6\", value=f\"{question_6}\" if question_6 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 7\", value=f\"{question_7}\" if question_7 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 8\", value=f\"{question_8}\" if question_8 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 9\", value=f\"{question_9}\" if question_9 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 10\", value=f\"{question_10}\" if question_10 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 11\", value=f\"{question_11}\" if question_11 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 12\", value=f\"{question_12}\" if question_12 else \"Not Set\", inline=False\n )\n e.add_field(\n name=\"Question 13\", value=f\"{question_13}\" if question_13 else \"Not Set\", inline=False\n )\n await ctx.send(embed=e)", "def test_ai_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def fastqc():\n\n mkdir(FASTQC_DIR)\n\n printp(\"\"\"\\n#\\n# run FastQC on initial data\\n#\"\"\")\n printp(\"\"\"\\n# drmr:label fastqc\"\"\")\n printp(\"\"\"\\n# drmr:job time_limit=2h working_directory={}\"\"\".format(FASTQC_DIR))\n\n for sample, info in DATA.items():\n for x in ['treatment', 'control']:\n fastq = get_fastq(get_srr(sample)) if x == 'treatment' else get_fastq(get_input_control_srr(sample))\n symlink(fastq, FASTQC_DIR)\n printp(\"\"\"fastqc {}\"\"\".format(os.path.basename(fastq)), timed=True, ioniced=True)\n\n printp(\"\"\"\\n# drmr:wait\"\"\")", "def test_aqua_function_for_multiple_ddos(aquarius_instance):\n assert aquarius_instance.list_assets()\n assert aquarius_instance.list_assets_ddo()", "def setUp(self):\n self.driver.get('http://taqc-opencart.epizy.com/')", "def test_execute_deployment(self):\n pass", "def prepare(self,avahiInstallOnly=False,ignoreUpgradeError=False): \n content=\"\"\"<?xml version=\\\"1.0\\\" standalone=\\'no\\'?>\n<!--*-nxml-*-->\n<!DOCTYPE service-group SYSTEM \"avahi-service.dtd\">\n<!-- $Id$ -->\n<service-group>\n<name replace-wildcards=\"yes\">daascluster %h</name>\n\n<service>\n<type>_daascluster._tcp</type>\n<port>9999</port>\n</service>\n\n<service>\n<type>_ssh._tcp</type>\n<port>22</port>\n</service>\n\n</service-group>\n\"\"\"\n content=content.replace(\"daascluster\",self.cluster.domainname.replace(\".\",\"__\"))\n tmpfile=q.system.fs.joinPaths(q.dirs.tmpDir,\"avahi\")\n q.system.fs.writeFile(tmpfile,content) \n \n q.transaction.start(\"Try to configure nodes for cluster usage (will use SSH to do so).\") \n q.transaction.start(\"Ping machine %s\" %self.ipaddr)\n if not q.system.net.pingMachine(self.ipaddr,5):\n q.console.echo(\"ERROR: Could not ping to machine %s, please check machine is reacheable.\"%self.ipaddr)\n q.transaction.stop()\n else:\n q.transaction.stop() #ping\n ##q.transaction.start(\"Open SSH connection to %s\" %self.ipaddr)\n ##sshclient=q.clients.ssh.createClient(ipaddr,\"root\",rootpasswd,60) \n if avahiInstallOnly==False:\n q.transaction.start(\"Upgrade ubuntu on %s to newest packages, this can take a long time (apt-get update & upgrade).\" %self.ipaddr) \n self.execute(\"apt-get update\",False)\n #returncode,stdout,stderr=self.execute(\"apt-get upgrade -y\",False)\n #if returncode>0:\n #if not ignoreUpgradeError or q.qshellconfig.interactive==False or not q.console.askYesNo(\"Could not upgrade system, do you want to ignore and continue?\"):\n #raise \"Could not upgrade system (apt-get upgrade), probably because there was interactivity required.\"\n q.transaction.start(\"Install mc on %s\" %self.ipaddr) \n self.execute(\"apt-get install mc -y\")\n q.transaction.stop()\n q.transaction.stop()\n else:\n q.transaction.start(\"Update ubuntu package metadata on %s (apt-get update).\" %self.ipaddr) \n self.execute(\"apt-get update\",False)\n q.transaction.stop() \n \n q.transaction.start(\"Install avahi on %s\" %self.ipaddr) \n self.execute(\"apt-get install avahi-daemon avahi-utils -y\",False)\n self.execute(\"mkdir -p /etc/avahi/services\") \n ftp=self.getSftpConnection()\n q.logger.log(\"put %s to /etc/avahi/services/daascluster.service\" % tmpfile)\n ftp.put(tmpfile,\"/etc/avahi/services/daascluster.service\")\n q.transaction.stop() #reload avahi\n q.transaction.start(\"Reload Avahi Config\")\n self.execute(\"avahi-daemon --reload\")\n q.transaction.stop() #end of avahi\n q.transaction.start(\"Disable ssh name resolution\")\n self.execute(\"echo 'UseDNS no' >> /etc/ssh/sshd_config\",silent=True)\n self.execute(\"/etc/init.d/ssh restart\",silent=True)\n q.transaction.stop() \n \n q.transaction.stop() #end of ssh connection\n \n \n #if q.qshellconfig.interactive:\n #if copyqbase or q.console.askYesNo(\"Do you want to copy qbasedir to remote node over ssh?\"):\n ##self._removeRedundantFiles()\n #if rsync==False:\n #sshclient.copyDirTree(\"/opt/qbase3/\")\n #sshclient.copyDirTree(\"/opt/code/\")\n #else:\n #q.system.process.executeWithoutPipe(\"rsync -avzEp -e ssh /opt/qbase3/ root@%s:/opt/qbase3/ \" %self.ipaddr)\n #q.system.process.executeWithoutPipe(\"rsync -avzEp -e ssh /opt/qbase3/ root@%s:/opt/code/ \" %self.ipaddr)", "def __init__(self,host,testcaseStr,database = 0, raiseOnErrors = 1,mntPoint = None):\n self.host = host\n\t# variable to hold number of test cases\n\tself.testcaseStr = testcaseStr # Unique name for the whole testcase suite\n\tself.testcaseCounter = 0\n\tself.cardsConfig = {} # dictionary to store card config\n\tself.testcases = []\n\t\n\tself.host.clear_dmesg_syslogs()\n\t\n\tself.CardAttribChecked = 0 # variable to check if card attributes were checked\n\n self.hostOsVersion = self.host.cat_etc_issue()\n self.hostKernelVersion = self.host.get_kernel_ver()\n self.hostName = self.host.name\n self.hostCpus = self.host.getCPUs()\n self.hostCpuModel = self.host.getCPUModel()\n # build found from vgc-monitor\n \n # build passed to install\n self.build = \" \"\n self.foundCardbuild = \"\"\n \n self.cardType = \"\"\n self.cardSerial = \"\"\n \n self.isCardSerialSet = 0\n \n self.logFile = \"\"\n \n # if you want to log into the database\n self.database = database\n \n # log file to be used to storep the output\n self.logFile = \"\"\n self.logFileHttpPath = \"\"\n self.initialConfigStr = \"\"\n self.tbr = 0\n self.testLink = testLink()\n\tself.raiseOnErrors = raiseOnErrors\n #self.configure()\n #sys.exit(1)\n self.mntPoint = mntPoint", "def setUp(self):\n # Set logging level\n logging.basicConfig(level=logging.DEBUG)\n\n # Ask for url & login information\n https_url = raw_input(\n \"\\nEnter the http url [default: https://imagen2.cea.fr/database/]: \")\n if not https_url:\n https_url = \"https://imagen2.cea.fr/database/\"\n login = raw_input(\"\\nEnter the login: \")\n password = getpass.getpass(\"Enter the password: \")\n\n # Create dummy rqls\n self.rql = (\"Any C, G Where X is Subject, X code_in_study C, \"\n \"X handedness 'ambidextrous', X gender G\")\n\n # HTTP test\n self.connection = CWInstanceConnection(https_url, login, password,\n realm=\"Imagen\")", "def test_escalate_questions_cron(self, submit_ticket):\n\n questions_to_escalate = [\n # Questions over 24 hours old without an answer.\n question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n save=True),\n question(\n created=datetime.now() - timedelta(hours=24, minutes=50),\n save=True),\n ]\n\n # Question about Firefox OS\n fxos = product(slug='firefox-os', save=True)\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n product=fxos,\n save=True)\n questions_to_escalate.append(q)\n\n questions_not_to_escalate = [\n # Questions newer than 24 hours without an answer.\n question(save=True),\n question(created=datetime.now() - timedelta(hours=11), save=True),\n question(created=datetime.now() - timedelta(hours=21), save=True),\n ]\n\n # Question older than 24 hours with a recent answer.\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n save=True)\n answer(created=datetime.now() - timedelta(hours=10), question=q,\n save=True)\n answer(created=datetime.now() - timedelta(hours=1), creator=q.creator,\n question=q, save=True)\n questions_not_to_escalate.append(q)\n\n # Question older than 24 hours with a recent answer by the asker.\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n save=True)\n answer(\n created=datetime.now() - timedelta(hours=15), creator=q.creator,\n question=q, save=True)\n questions_not_to_escalate.append(q)\n\n # Question older than 24 hours without an answer already escalated.\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n save=True)\n q.tags.add(config.ESCALATE_TAG_NAME)\n questions_not_to_escalate.append(q)\n\n # Question with an inactive user.\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n save=True)\n q.creator.is_active = False\n q.creator.save()\n questions_not_to_escalate.append(q)\n\n # Question about Thunderbird, which is one of the products we exclude.\n tb = product(slug='thunderbird', save=True)\n q = question(\n created=datetime.now() - timedelta(hours=24, minutes=10),\n product=tb,\n save=True)\n questions_not_to_escalate.append(q)\n\n # Run the cron job and verify only 3 questions were escalated.\n eq_(len(questions_to_escalate), escalate_questions())", "def __init__(self):\r\n self.label = \"ProcessAirQuality\"\r\n self.alias = \"ProcessAirQuality\"\r\n\r\n # List of tool classes associated with this toolbox\r\n self.tools = [AirQuality]", "def test_quick_answer(self):\n pass", "def plan_production() -> str:\r\n monthly_forecasts = app.config[\"monthly_sales_forecasts\"]\r\n # True if the dictionary monthly_forecasts is empty.\r\n if not monthly_forecasts:\r\n return (\"No prediction has been made. Please click first on the \"\r\n + \"button 'First: predict sales' on the tracking screen.\")\r\n batches = app.config[\"batches\"]\r\n tanks = app.config[\"tanks\"]\r\n inventory = app.config[\"inventory\"]\r\n # Holds actual number of beers in inventory and actual number of beers that\r\n # will be finished in the next three months on basis of production stage.\r\n three_month_end_inv = {\"dunkers\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0},\r\n \"pilsner\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0},\r\n \"red_helles\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0}}\r\n # Calculates when product. stage will be finished for 3 consecutive months.\r\n for batch in batches.values():\r\n # True if batch is already finished, continue with next batch.\r\n if batch.bottles_put_in_inventory:\r\n continue\r\n # Implies batch isn't assigned to phase, values are set in next if-else\r\n end_phase4 = \"\"\r\n end_phase3_4 = \"\"\r\n end_phase2_4 = \"\"\r\n end_phase1_4 = \"\"\r\n # Calculates end time of each batch assuming that each batch goes\r\n # directly to the next production phase without any delays.\r\n # True if the batch is in production phase 4 (bottling).\r\n if batch.time_end_phase4 != \"\":\r\n # Phase 4 ends when time_end_phase4 is reached.\r\n end_phase4 = batch.time_end_phase4\r\n # Else True if the batch is in production phase 3 (conditioning).\r\n elif batch.time_end_phase3 != \"\":\r\n # If the product is in phase 3, then the product will be finished\r\n # after the duration of phase 3 + phase 4 ends.\r\n # One minute per bottle (1/60) and each bottle contains 0.5 litres.\r\n duration_p4 = (1 / 60) * batch.volume * 2 # In hours.\r\n end_phase3_4 = batch.time_end_phase3 + timedelta(hours=duration_p4)\r\n # Else True if the batch is in production phase 2 (fermentation).\r\n elif batch.time_end_phase2 != \"\":\r\n # P2 product will be finished after duration p2 + p3 + p4 ends.\r\n duration_p3 = 336 # In hours.\r\n duration_p4 = (1 / 60) * batch.volume * 2\r\n end_phase2_4 = (batch.time_end_phase2\r\n + timedelta(hours=duration_p3)\r\n + timedelta(hours=duration_p4))\r\n # Else True if the batch is in production phase 1 (hot brewing).\r\n elif batch.time_end_phase1 != \"\":\r\n # P1 product will be finished after durat. p1 + p2 + p3 + p4 ends.\r\n duration_p2 = 672 # In hours.\r\n duration_p3 = 336\r\n duration_p4 = (1 / 60) * batch.volume * 2\r\n end_phase1_4 = (batch.time_end_phase1\r\n + timedelta(hours=duration_p2)\r\n + timedelta(hours=duration_p3)\r\n + timedelta(hours=duration_p4))\r\n # Gets current month and is incremented for each iteration of for loop\r\n # to represent month number of this_month, next_month, and third_month.\r\n incre_month = datetime.now().month\r\n months = [\"this_month\", \"next_month\", \"third_month\"]\r\n # Calculates end of month inv. values for this, next, and third month.\r\n for index, _ in enumerate(months):\r\n # * 2 to get the number of bottles, 1 litre equals 2 bottles.\r\n volume = batch.volume * 2\r\n # True if phase4 has been reached and end month matches inc month.\r\n if end_phase4 != \"\" and end_phase4.month == incre_month:\r\n three_month_end_inv[batch.beer_type][months[index]] += volume\r\n # Elif True if p3 has been reached and end month matches inc month.\r\n elif end_phase3_4 != \"\" and end_phase3_4.month == incre_month:\r\n three_month_end_inv[batch.beer_type][months[index]] += volume\r\n # Elif True if p2 has been reached and end month matches inc month.\r\n elif end_phase2_4 != \"\" and end_phase2_4.month == incre_month:\r\n three_month_end_inv[batch.beer_type][months[index]] += volume\r\n # Elif True if p1 has been reached and end month matches inc month.\r\n elif end_phase1_4 != \"\" and end_phase1_4.month == incre_month:\r\n three_month_end_inv[batch.beer_type][months[index]] += volume\r\n incre_month += 1\r\n # If month number is incremented to 13, it is set to 1 (January).\r\n if incre_month == 13:\r\n incre_month = 1\r\n # Adds actual inventory quantities to calculated end of month quantities.\r\n for beer_type in three_month_end_inv:\r\n inventory_item_quantity = inventory.get_inv_items_quantity(beer_type)\r\n inventory_quantity = inventory_item_quantity[\"num\"]\r\n # Actual inventory quantities are only added to this_month inventory.\r\n three_month_end_inv[beer_type][\"this_month\"] += inventory_quantity\r\n # Holds three months (end of month) forecasted sales values.\r\n three_month_forecast = {\"dunkers\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0},\r\n \"pilsner\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0},\r\n \"red_helles\": {\"this_month\": 0, \"next_month\": 0,\r\n \"third_month\": 0}}\r\n # Builds date-index to access forecast value for this, next, and 3rd month.\r\n # 1. Builds date-index for this month.\r\n # Gets current datetime.\r\n current_datetime = datetime.now()\r\n # Gets current month.\r\n current_month = current_datetime.month\r\n # Gets current year.\r\n current_year = current_datetime.year\r\n # Creates date-index to access forecast value for this month.\r\n dt1st_month = datetime(current_year, current_month, 1) # 1st day of month.\r\n # Gets number of days of the current month.\r\n number_days = monthrange(current_year, current_month)[1]\r\n # 2. Builds date-index for next month.\r\n # Gets next datetime.\r\n next_datetime = current_datetime + timedelta(days=number_days)\r\n # Gets next month.\r\n next_month = next_datetime.month\r\n # Gets year in next month.\r\n next_months_year = next_datetime.year\r\n # Creates date-index to access forecast value for next month.\r\n dt2nd_month = datetime(next_months_year, next_month, 1)\r\n # Gets number of days of the next month.\r\n number_days = monthrange(next_months_year, next_month)[1]\r\n # 3. Builds date-index for 3rd month.\r\n # Gets 3rd datetime.\r\n third_datetime = next_datetime + timedelta(days=number_days)\r\n # Gets 3rd month.\r\n third_month = third_datetime.month\r\n # Gets year in 3rd month.\r\n third_months_year = third_datetime.year\r\n # Creates date-index to access forecast value for 3rd month.\r\n dt3rd_month = datetime(third_months_year, third_month, 1)\r\n # Gets and stores forecast values for three months in three_month_forecast.\r\n for beer_type in monthly_forecasts:\r\n forecast_1st = monthly_forecasts[beer_type].predicted_mean[dt1st_month]\r\n forecast_2nd = monthly_forecasts[beer_type].predicted_mean[dt2nd_month]\r\n forecast_3rd = monthly_forecasts[beer_type].predicted_mean[dt3rd_month]\r\n try:\r\n three_month_forecast[beer_type][\"this_month\"] = int(forecast_1st)\r\n three_month_forecast[beer_type][\"next_month\"] = int(forecast_2nd)\r\n three_month_forecast[beer_type][\"third_month\"] = int(forecast_3rd)\r\n except ValueError as error:\r\n app.config[\"logger\"].error(error)\r\n # Holds 3 months differ. between forecast and finished inv. for each beer.\r\n diff_3months_forecast_actual = {\"dunkers\": 0, \"pilsner\": 0,\r\n \"red_helles\": 0}\r\n # Calculates for each beer differ. between forecast and finished inventory.\r\n for beer_type in three_month_end_inv:\r\n # Holds finished inventory quantity for 3 months per beer type.\r\n fin_inv_beer_3months = 0\r\n # Holds forecasted sales quantity for 3 months per beer type.\r\n forecast_beer_3months = 0\r\n # 2nd for loop to iterate over dict in dict to calculate difference.\r\n for month in three_month_end_inv[beer_type]:\r\n fin_inv_beer_3months += three_month_end_inv[beer_type][month]\r\n forecast_beer_3months += three_month_forecast[beer_type][month]\r\n diff_beer_3months = fin_inv_beer_3months - forecast_beer_3months\r\n diff_3months_forecast_actual[beer_type] = diff_beer_3months\r\n # Determines which beer should be produced next;\r\n # beer type with highest negative difference between finished inventory and\r\n # sales forecast is recommended to be produced if equipment is available.\r\n # Gets beer type with highest negative difference.\r\n produce_beer = min(diff_3months_forecast_actual,\r\n key=lambda beer: diff_3months_forecast_actual[beer])\r\n used_tanks = []\r\n # Adds the names of all used tanks to used_tanks' list.\r\n for batch in batches.values():\r\n # True if tank is used.\r\n if batch.phase_current_tank != \"\":\r\n used_tanks.append(batch.phase_current_tank)\r\n all_tanks = tanks.get_tank_names()\r\n available_tanks = [tank for tank in all_tanks if tank not in used_tanks]\r\n capable_tanks = {}\r\n # Checks if tank with right capability is available.\r\n for tank_name in available_tanks:\r\n # Uses tank_name to get value of Tanks' instance var with same name.\r\n tank_value = tanks.get_tank_value(tank_name)\r\n # True if tank with right capability is available.\r\n if \"ferm\" in tank_value[\"capability\"]:\r\n # Puts tank's volume into capable_tanks dictionary.\r\n capable_tanks[tank_name] = tank_value[\"volume\"]\r\n # Selects tank with highest volume if tank with right capab. is available.\r\n if capable_tanks:\r\n use_tank = max(capable_tanks, key=lambda beer: capable_tanks[beer])\r\n use_tank_volume = capable_tanks[use_tank]\r\n else:\r\n use_tank = \"'currently no tank with right capability available'\"\r\n use_tank_volume = 0\r\n # Creates recommendation for the user.\r\n recommendation = (\"Based on the three-month forecast and production phases\"\r\n + \", available tanks, capabilities and volumes, it is \"\r\n + \"recommended to produce <b>{0}</b> in tank <b>{1}</b> \"\r\n + \"next.\").format(produce_beer, use_tank)\r\n # Creates reasoning for the user.\r\n # Creates HTML table containing three months end inventory.\r\n html_3months_end_inv_table = update_three_months_table(three_month_end_inv)\r\n # Creates HTML table containing three months forecasted sales.\r\n html_3months_foreca_table = update_three_months_table(three_month_forecast)\r\n reason = (\"Actual number of beers in inventory and actual number of beers \"\r\n + \"that will be finished in the next three months are:<br>\"\r\n + \"\"\"<table>\r\n <tr>\r\n <th></th>\r\n <th>This month</th>\r\n <th>Next month</th>\r\n <th>Third month</th>\r\n </tr>\"\"\" + html_3months_end_inv_table + \"</table><br>\"\r\n + \"Three months forecasted sales (in bottles) are:\"\r\n + \"\"\"<table>\r\n <tr>\r\n <th></th>\r\n <th>This month</th>\r\n <th>Next month</th>\r\n <th>Third month</th>\r\n </tr>\"\"\" + html_3months_foreca_table + \"</table><br>\"\r\n + \"Beer type with highest difference between forecast and \"\r\n + \"finished inventory is recommended to be produced if \"\r\n + \"equipment is available. The difference is (in bottles):<br>\"\r\n + \"\"\"<table>\r\n <tr>\r\n <th>Dunkers</th>\r\n <th>Pilsner</th>\r\n <th>Red Helles</th>\r\n </tr>\r\n <tr>\r\n <td>{0}</td>\r\n <td>{1}</td>\r\n <td>{2}</td>\r\n </tr></table><br>\r\n \"\"\".format(diff_3months_forecast_actual[\"dunkers\"],\r\n diff_3months_forecast_actual[\"pilsner\"],\r\n diff_3months_forecast_actual[\"red_helles\"])\r\n + \"Available tank(s) with right capability is/are:<b>\"\r\n + str(capable_tanks) + \"</b>, where the highest available \"\r\n + \"volume is <b>{}</b> litres. \".format(use_tank_volume)\r\n + (\"Thus, it is recommended to produce <b>{0}</b> in tank \"\r\n + \"<b>{1}</b>.\").format(produce_beer, use_tank))\r\n return (\"\"\"<style>\r\n h1, h2, h3 {\r\n font-family: arial, sans-serif;\r\n }\r\n table {\r\n font-family: arial, sans-serif;\r\n border-collapse: collapse;\r\n width: 100%;\r\n }\r\n td, th {\r\n border: 1px solid #dddddd;\r\n text-align: left;\r\n padding: 8px;\r\n }\r\n tr:nth-child(even) {\r\n background-color: #dddddd;\r\n }\r\n </style>\r\n <h2>Plan production</h2>\"\"\"\r\n + recommendation\r\n + \"<br><br><b>Reasoning:</b></br>\"\r\n + reason\r\n + \"\"\"<form action=\"/\" method=\"POST\">\r\n <input type=\"hidden\">\r\n <br>\r\n <input type=\"submit\" value=\"Go back to tracking screen\">\r\n </form>\"\"\")", "def put_qa(self, num_qa_records, qa_record):\n ierr = exolib.py_expqa(self.exoid, num_qa_records, qa_record.T)\n if ierr:\n raise ExodusIIWriterError(\"Error putting QA record\")", "def with_manual_kb_program(agent):\n\n helping = ['?', 'help']\n stopping = ['quit', 'stop', 'exit']\n actions = ['TurnRight', 'TurnLeft', 'Forward', 'Grab', 'Release', 'Shoot', 'Wait']\n queries = [('qp','Query a single proposition;\\n' \\\n + ' E.g. \\'qp B1_1\\' or \\'qp OK1_1_3\\', \\'qp HeadingWest4\\''),\n ('qpl','Query a-temporal location-based proposition at all x,y locations;\\n' \\\n + ' E.g., \\'qpl P\\' runs all queries of P<x>_<y>'),\n ('qplt','Query temporal and location-based propositions at all x,y locations;\\n' \\\n + ' E.g., \\'qplt OK 4\\' runs all queries of the OK<x>_<y>_4'),\n ('q!','Run ALL queries for optionally specified time (default is current time);\\n'\\\n + ' (can be time consuming!)')]\n\n def show_commands():\n print \"Available Commands:\"\n print \" The following are valid Hunt The Wumpus actions:\"\n print \" {0}\".format(', '.join(map(lambda a: '\\'{0}\\''.format(a), actions)))\n print \" Enter {0} to get this command info\" \\\n .format(' or '.join(map(lambda a: '\\'{0}\\''.format(a), helping)))\n print \" Enter {0} to stop playing\" \\\n .format(' or '.join(map(lambda a: '\\'{0}\\''.format(a), stopping)))\n print \" Enter 'env' to display current wumpus environment\"\n print \" Enter 'kbsat' to check if the agent's KB is satisfiable\"\n print \" If the KB is NOT satisfiable, then there's a contradiction that needs fixing.\"\n print \" NOTE: A satisfiable KB does not mean there aren't other problems.\"\n print \" Enter 'save-axioms' to save all of the KB axioms to 'kb-axioms.txt'\"\n print \" This will overwrite any existing 'kb-axioms.txt'\"\n print \" Enter 'save-clauses' to save all of the KB clauses to text file 'kb-clauses.txt'\"\n print \" This will overwrite any existing 'kb-clauses.txt'\"\n print \" Enter 'props' to list all of the proposition bases\"\n print \" Queries:\"\n for query,desc in queries:\n print \" {0} : {1}\".format(query,desc)\n\n def show_propositions():\n print \"Proposition Bases:\"\n print \" Atemporal location-based propositions (include x,y index: P<x>_<y>)\"\n print \" '\" + '\\', \\''.join(proposition_bases_atemporal_location) + '\\''\n print \" Perceptual propositions (include time index: P<t>)\"\n print \" '\" + '\\', \\''.join(proposition_bases_perceptual_fluents) + '\\''\n print \" Location fluent propositions (include x,y and time index: P<x>_<y>_<t>)\"\n print \" '\" + '\\', \\''.join(proposition_bases_location_fluents) + '\\''\n print \" State fluent propositions (include time index: P<t>)\"\n print \" '\" + '\\', \\''.join(proposition_bases_state_fluents[:4]) + '\\','\n print \" '\" + '\\', \\''.join(proposition_bases_state_fluents[4:]) + '\\''\n print \" Action propositions (include time index: P<t>)\"\n print \" '\" + '\\', \\''.join(proposition_bases_actions) + '\\''\n\n def write_list_to_text_file(filename,list):\n outfile = file(filename, 'w')\n for item in list:\n outfile.write('{0}\\n'.format(item))\n outfile.close()\n\n def check_kb_status():\n \"\"\"\n Tests whether the agent KB is satisfiable.\n If not, that means the KB contains a contradiction that needs fixing.\n However, being satisfiable does not mean the KB is correct.\n \"\"\"\n result = minisat(agent.kb.clauses)\n if result:\n print \"Agent KB is satisfiable\"\n else:\n print \"Agent KB is NOT satisfiable!! There is contradiction that needs fixing!\"\n\n def simple_query(proposition):\n \"\"\"\n Executes a simple query to the agent KB for specified proposition.\n \"\"\"\n result = agent.kb.ask(expr(proposition))\n if result == None:\n print \"{0}: Unknown!\".format(proposition)\n else:\n print \"{0}: {1}\".format(proposition,result)\n\n def location_based_query(proposition_base):\n \"\"\"\n Executes queries for the specified type of proposition, for\n each x,y location.\n proposition_base := as all of the propositions include in their\n name 1 or more indexes (for time and/or x,y location), the\n proposition_base is the simple string representing the base\n of the proposition witout the indexes, which are added in\n code, below.\n time := the time index of the propositions being queried\n \"\"\"\n display_env = WumpusEnvironment(agent.width, agent.height)\n start_time = clock()\n print \"Running queries for: {0}<x>_<y>\".format(proposition_base)\n for x in range(1,agent.width+1):\n for y in range(1,agent.height+1):\n query = expr('{0}{1}_{2}'.format(proposition_base,x,y))\n result = agent.kb.ask(query)\n if result == None:\n display_env.add_thing(Proposition(query,'?'),(x,y))\n else:\n display_env.add_thing(Proposition(query,result),(x,y))\n end_time = clock()\n print \" >>> time elapsed while making queries:\" \\\n + \" {0}\".format(end_time-start_time)\n print display_env.to_string(agent.time,\n title=\"All {0}<x>_<y> queries\".format(proposition_base))\n\n def location_time_based_query(proposition_base, time):\n \"\"\"\n Executes queries for the specified type of proposition, for\n each x,y location, at the specified time.\n proposition_base := as all of the propositions include in their\n name 1 or more indexes (for time and/or x,y location), the\n proposition_base is the simple string representing the base\n of the proposition witout the indexes, which are added in\n code, below.\n time := the time index of the propositions being queried\n \"\"\"\n display_env = WumpusEnvironment(agent.width, agent.height)\n start_time = clock()\n print \"Running queries for: {0}<x>_<y>_{1}\".format(proposition_base,time)\n for x in range(1,agent.width+1):\n for y in range(1,agent.height+1):\n query = expr('{0}{1}_{2}_{3}'.format(proposition_base,x,y,time))\n result = agent.kb.ask(query)\n if result == None:\n display_env.add_thing(Proposition(query,'?'),(x,y))\n else:\n display_env.add_thing(Proposition(query,result),(x,y))\n end_time = clock()\n print \" >>> time elapsed while making queries:\" \\\n + \" {0}\".format(end_time-start_time)\n print display_env.to_string(agent.time,\n title=\"All {0}<x>_<y>_{1} queries\".format(proposition_base,\n time))\n\n def run_all_queries(time):\n check_kb_status()\n for p in proposition_bases_perceptual_fluents:\n simple_query(p + '{0}'.format(time))\n for p in proposition_bases_atemporal_location:\n location_based_query(p)\n for p in proposition_bases_location_fluents:\n location_time_based_query(p,time)\n for p in proposition_bases_state_fluents:\n simple_query(p + '{0}'.format(time))\n # remove the quotes below and add quotes to the following if-statement\n # in order to query all actions from time 0 to now\n '''\n print \"Querying actions from time 0 to {0}\".format(time)\n for p in propositions_actions:\n for t in range(time+1):\n simple_query(p + '{0}'.format(t))\n '''\n if time-1 > 0:\n print \"Actions from previous time: {0}\".format(time-1)\n for p in proposition_bases_actions:\n simple_query(p + '{0}'.format(time-1))\n \n print \"FINISHED running all queries for time {0}\".format(time)\n\n def manual_kb_program(percept):\n\n print \"------------------------------------------------------------------\"\n print \"At time {0}\".format(agent.time)\n # update current location and heading based on current KB knowledge state\n print \" HWA.infer_and_set_belief_location()\"\n agent.infer_and_set_belief_location()\n print \" HWA.infer_and_set_belief_heading()\"\n agent.infer_and_set_belief_heading()\n\n percept_sentence = agent.make_percept_sentence(percept)\n print \" HWA.agent_program(): kb.tell(percept_sentence):\"\n print \" {0}\".format(percept_sentence)\n agent.kb.tell(percept_sentence) # update the agent's KB based on percepts\n\n clauses_before = len(agent.kb.clauses)\n print \" HWA.agent_program(): Prepare to add temporal axioms\"\n print \" Number of clauses in KB before: {0}\".format(clauses_before)\n agent.add_temporal_axioms()\n clauses_after = len(agent.kb.clauses)\n print \" Number of clauses in KB after: {0}\".format(clauses_after)\n print \" Total clauses added to KB: {0}\".format(clauses_after - clauses_before)\n agent.number_of_clauses_over_epochs.append(len(agent.kb.clauses))\n\n action = None\n while not action:\n print \"[{0}] You perceive: {1}\".format(agent.time,\n agent.pretty_percept_vector(percept))\n val = raw_input(\"Enter Action ('?' for list of commands): \")\n val = val.strip()\n if val in helping:\n print\n show_commands()\n print\n elif val in stopping:\n action = 'Stop'\n elif val in actions:\n action = val\n elif val == 'env':\n print\n print \"Current wumpus environment:\"\n print agent.env.to_string()\n elif val == 'props':\n print\n show_propositions()\n print\n elif val == 'kbsat':\n check_kb_status()\n print\n elif val == 'save-axioms':\n write_list_to_text_file('kb-axioms.txt',agent.kb.axioms)\n print \" Saved to 'kb-axioms.txt'\"\n print\n elif val == 'save-clauses':\n write_list_to_text_file('kb-clauses.txt',agent.kb.clauses)\n print \" Saved to 'kb-clauses.txt'\"\n print\n else:\n q = val.split(' ')\n if len(q) == 2 and q[0] == 'qp':\n simple_query(q[1])\n print\n elif len(q) == 2 and q[0] == 'qpl':\n location_based_query(q[1])\n print\n elif len(q) == 3 and q[0] == 'qplt':\n location_time_based_query(q[1],q[2])\n print\n elif q[0] == 'q!':\n if len(q) == 2:\n t = int(q[1])\n run_all_queries(t)\n else:\n run_all_queries(agent.time)\n print\n else:\n print \"'{0}' is an invalid command;\".format(val) \\\n + \" try again (enter '?' for list of commands)\"\n print\n\n # update KB with selected action\n agent.kb.tell(add_time_stamp(action, agent.time))\n\n agent.time += 1\n \n return action\n\n agent.program = manual_kb_program\n return agent", "def run(self):\n\t\tself.endpoint = self.from_reference.coord\t# the endpoint of every job is the thing ordering this job\n\n\t\tif to_do == \"carry\":\t# carry sth. from A to B\n\t\t\tpass\n\t\telif to_do == \"grade\":\t# grade an area for a building\n\t\t\tunit = self.searchUnit(self.endpoint, \"grader\")\n\t\t\t\n\t\telif to_do == \"build\":\t# build a building\n\t\t\tunit = self.searchUnit(self.endpoint, \"builder\")\n\t\t#TODO: if no settler fits to the building to be seized, one settler has to learn the job\n\t\telif to_do == \"learn a job\":\t# learn a job like lumberjack, butcher ... also for the soldier training\n\t\t\tpass\n\t\telif to_do == \"seize a building\":\t# civil and also military buildings\n\t\t\tpass", "def main():\n now = time.strftime('%Y%m%d%H%M%S')\n\n # info = get_info(now)\n # info_filename = 'info_' + now + '.csv'\n # info.to_csv(os.path.join('..', '..', 'data', 'raw', info_filename), index=False)\n\n questions = get_questions(now)\n\n # don't talk about all this detail in the talk", "def qualified_item(self):\n # Price check with base item\n self.log.info(\"Price checking Qual 1 via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"030\", after='enter')\n if self.selection_list_visible():\n pos.select_list_item(\"Qual 1 ($5.00)\")\n pos.click(\"enter\")\n else:\n tc_fail(\"Selection list didn't appear.\")\n\n # Confirm the right item, at the right price\n self.read_price_check(\"Qual 1\", \"$5.00\")\n # Add the item\n pos.click(\"Sell Item\")\n\n # Price check with qualifier\n self.log.info(\"Price checking Qual 1 via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"030\", after='enter')\n if self.selection_list_visible():\n pos.select_list_item(\"Test Type ($10.00)\")\n pos.click(\"enter\")\n else:\n tc_fail(\"Selection list didn't appear.\")\n\n # Confirm the right item, at the right price\n self.read_price_check(\"Qualifier 1\", \"$10.00\")\n # Add the item\n pos.click(\"Sell Item\")\n\n # Confirm we are in a transaction\n if not self.in_transaction():\n self.tc_fail(\"POS did not start a transaction; can not confirm item was added\")\n else:\n self.log.info(\"Confirmed we are in a transaction\")\n \n # Confirm we added the item\n ret1 = self.confirm_line(-2, \"Qual 1\", \"$5.00\")\n if ret1:\n self.log.info(\"Confirmed Qual 1 item added\")\n else:\n self.tc_fail(ret1)\n \n # Confirm we added the linked item\n ret2 = self.confirm_line(-1, \"Qualifier 1\", \"$10.00\")\n if ret2:\n self.log.info(\"Confirmed Qualifier 1 item added\")\n else:\n self.tc_fail(ret2)\n \n # Setup for next test\n self.recover()", "def test_answer_question(self):\n with self.client:\n \n \"\"\"first insert a question\"\"\"\n response = self.add_question(\"1\",\"hello\",\"hello world\",\"java\",\"kenneth\")\n self.assertEqual(response.status_code, 201)\n \"\"\"then get a specific question to answer\"\"\"\n response = self.post_answer(1,\"1\",\"try removing errors\")\n self.assertEqual(response.status_code, 201)", "def main(argv):\n # -- load our run database and make it global --\n global crysDB\n with open(\"crysDB.json\") as f:\n crysDB = json.load(f)\n\n # -- parse args --\n par = argparse.ArgumentParser(description=\"coherent crystal characterization suite\")\n arg = par.add_argument\n arg(\"-c\", \"--crys\", type=str, help=\"set crystal S/N\")\n arg(\"-p\", \"--proc\", type=str, help=\"process a crystal\")\n arg(\"-t\", \"--temp\", type=str, help='start temperature data taking')\n arg(\"-pt\", \"--printtemp\", type=str, help='print current temperature')\n arg(\"-a\", \"--all\", action=\"store_true\", help=\"process all crystals in the DB\")\n arg(\"-o\", \"--over\", action=\"store_true\", help=\"overwrite existing files\")\n arg(\"-z\", \"--zip\", action=\"store_true\", help='run gzip on raw files (on cenpa-rocks)')\n arg(\"-s\", \"--sync\", action=\"store_true\", help='sync DAQ with cenpa-rocks')\n args = vars(par.parse_args())\n\n # -- set parameters --\n crys_sn, overwrite = None, False\n\n if args[\"crys\"]:\n crys_sn = args[\"crys\"]\n\n if args[\"over\"]:\n overwrite = args[\"over\"]\n\n # -- run analysis --\n if args[\"proc\"]:\n sn = args[\"proc\"]\n process_crystal(sn, overwrite)\n\n if args[\"all\"]:\n all_sns = [k for k in crysDB if \"SN\" in k]\n for sn in all_sns:\n process_crystal(sn, overwrite)\n\n if args[\"sync\"]:\n sync_data()\n\n if args[\"zip\"]:\n # clean_gzip()\n zip_data(overwrite)\n\n if args[\"temp\"]:\n \"\"\"\n Run number should be the first run number entry (for 600V) in the ELOG.\n \"\"\"\n run_num = args[\"temp\"]\n measure_temp(run_num)\n\n if args[\"printtemp\"]:\n print_temp()", "def main():\n parser = argparse.ArgumentParser(description=\"\"\"Tester for YT Data API and different inputs\"\"\")\n parser.add_argument('-a', '--analytics', help='Performs a basic analytics lookup for the user\\'s channel entered')\n parser.add_argument('-c', '--comments', help='Performs a lookup of comments for the video id entered')\n args = parser.parse_args()\n\n if args.analytics:\n analytics = args.analytics\n analyt(analytics)\n\n if args.comments:\n comments = args.comments\n get_comments(comments)", "def organise_qa_output(metadata, base_dir, write_tag):\n filenames = metadata['FITSImageFilename']\n for i, fits_file in enumerate(filenames):\n kat_target = katpoint.Target(metadata['KatpointTargets'][i])\n\n # Move QA report and create metadata\n pb_filebase = os.path.splitext(fits_file)[0] + '_PB'\n qa_report = pb_filebase + '_continuum_validation_snr5.0_int'\n pb_dir = _productdir(metadata, base_dir, i, '_PB', write_tag)\n\n qa_dir = _productdir(metadata, base_dir, i, '_QA', write_tag)\n os.mkdir(qa_dir)\n os.rename(os.path.join(pb_dir, qa_report), qa_dir)\n make_report_metadata(metadata, qa_dir)\n\n # Move RMS image and create metadata\n rms_dir = _productdir(metadata, base_dir, i, '_RMS', write_tag)\n os.mkdir(rms_dir)\n rms_image = pb_filebase + '_aegean_rms'\n mean_pb_rms = _calc_rms(os.path.join(pb_dir, rms_image + FITS_EXT))\n\n make_image_metadata(metadata, '_PB', pb_dir, i,\n 'Continuum Image PB corrected',\n 'Continuum image PB corrected',\n mean_pb_rms)\n\n os.rename(os.path.join(pb_dir, rms_image + FITS_EXT),\n os.path.join(rms_dir, rms_image + FITS_EXT))\n _add_missing_axes(os.path.join(rms_dir, rms_image + FITS_EXT))\n _caption_pngs(rms_dir, rms_image, kat_target, 'RMS PB Corrected')\n make_image_metadata(metadata, '_PB_aegean_rms', rms_dir, i,\n 'Continuum PB Corrected RMS Image',\n 'Continuum PB Corrected RMS image',\n mean_pb_rms)\n\n # Move MEAN image and create metadata\n bkg_dir = _productdir(metadata, base_dir, i, '_BKG', write_tag)\n os.mkdir(bkg_dir)\n bkg_image = pb_filebase + '_aegean_bkg'\n os.rename(os.path.join(pb_dir, bkg_image + FITS_EXT),\n os.path.join(bkg_dir, bkg_image + FITS_EXT))\n _add_missing_axes(os.path.join(bkg_dir, bkg_image + FITS_EXT))\n _caption_pngs(bkg_dir, bkg_image, kat_target, 'MEAN PB Corrected')\n make_image_metadata(metadata, '_PB_aegean_bkg', bkg_dir, i,\n 'Continuum PB Corrected Mean Image',\n 'Continuum PB Corrected Mean image',\n mean_pb_rms)\n\n # Remove .writing tag\n dir_list = [pb_dir, qa_dir, rms_dir, bkg_dir]\n for product_dir in dir_list:\n os.rename(product_dir, os.path.splitext(product_dir)[0])", "def dump_qa(self):\n #- QA level outputs\n #qa_outfile = {}\n qa_outfig = {}\n for PA in self.palist:\n for QA in self.qalist[PA]:\n #qa_outfile[QA] = self.io_qa(QA)[0]\n qa_outfig[QA] = self.io_qa(QA)[1]\n \n #- make path if needed\n path = os.path.normpath(os.path.dirname(qa_outfig[QA]))\n if not os.path.exists(path):\n os.makedirs(path)\n\n return (qa_outfig)", "def main_driver(args):\n starttime = time.time()\n\n # Get all the schedd ads\n schedd_ads = get_schedds(args)\n logging.warning(\"&&& There are %d schedds to query.\", len(schedd_ads))\n\n pool = multiprocessing.Pool(processes=args.query_pool_size)\n\n if not args.skip_history:\n htcondor_es.history.process_histories(schedd_ads=schedd_ads,\n starttime=starttime,\n pool=pool,\n args=args)\n\n # Now that we have the fresh history, process the queues themselves.\n if args.process_queue:\n htcondor_es.queues.process_queues(schedd_ads=schedd_ads,\n starttime=starttime,\n pool=pool,\n args=args)\n\n pool.close()\n pool.join()\n\n logging.warning(\"@@@ Total processing time: %.2f mins\", ((time.time()-starttime)/60.))\n\n return 0", "def test_question_products(self):\n p1 = ProductFactory(slug='b2g')\n p2 = ProductFactory(slug='mobile')\n p3 = ProductFactory(slug='desktop')\n\n QuestionFactory(product=p2)\n QuestionFactory(product=p2)\n QuestionFactory(product=p3)\n\n self.refresh()\n\n product_vals = (\n (p1.slug, 0),\n (p2.slug, 2),\n (p3.slug, 1),\n )\n\n qs = {'a': 1, 'w': 2, 'format': 'json'}\n for products, number in product_vals:\n qs.update({'product': products})\n response = self.client.get(reverse('search.advanced'), qs)\n eq_(number, json.loads(response.content)['total'])", "def __init__(self, question=u\"\", tier=0, answers=[], game_round=0,\n media=(\"\", \"\", \"\"), media_path=\"data\", web_root=\"data\"):\n self.question = question\n self.answers = answers\n self.tier = tier\n self.game_round = game_round\n self.media = media\n self.media_path = media_path\n self.web_root = web_root", "def scn2quicklook(self, unq_id):\n if (self.quicklookPath is None) or (not os.path.exists(self.quicklookPath)):\n raise EODataDownException(\"The quicklook path does not exist or not provided, please create and run again.\")\n\n if not os.path.exists(self.ardProdTmpPath):\n raise EODataDownException(\"The tmp path does not exist, please create and run again.\")\n\n logger.debug(\"Creating Database Engine and Session.\")\n db_engine = sqlalchemy.create_engine(self.db_info_obj.dbConn)\n session_sqlalc = sqlalchemy.orm.sessionmaker(bind=db_engine)\n ses = session_sqlalc()\n logger.debug(\"Perform query to find scene.\")\n query_result = ses.query(EDDSentinel1ASF).filter(EDDSentinel1ASF.PID == unq_id).one_or_none()\n if query_result is not None:\n if not query_result.ARDProduct:\n raise EODataDownException(\"Cannot create a quicklook as an ARD product has not been created.\")\n if query_result.Invalid:\n raise EODataDownException(\"Cannot create a quicklook as image has been assigned as 'invalid'.\")\n\n scn_json = query_result.ExtendedInfo\n if (scn_json is None) or (scn_json == \"\"):\n scn_json = dict()\n\n ard_img_path = query_result.ARDProduct_Path\n eodd_utils = eodatadown.eodatadownutils.EODataDownUtils()\n ard_img_file = eodd_utils.findFile(ard_img_path, '*dB*.tif')\n\n out_quicklook_path = os.path.join(self.quicklookPath,\n \"{}_{}\".format(query_result.Product_File_ID, query_result.PID))\n if not os.path.exists(out_quicklook_path):\n os.mkdir(out_quicklook_path)\n\n tmp_quicklook_path = os.path.join(self.ardProdTmpPath,\n \"quicklook_{}_{}\".format(query_result.Product_File_ID, query_result.PID))\n if not os.path.exists(tmp_quicklook_path):\n os.mkdir(tmp_quicklook_path)\n\n # VV, VH, VV/VH\n bands = '1,2,3'\n\n ard_img_basename = os.path.splitext(os.path.basename(ard_img_file))[0]\n\n quicklook_imgs = list()\n quicklook_imgs.append(os.path.join(out_quicklook_path, \"{}_250px.jpg\".format(ard_img_basename)))\n quicklook_imgs.append(os.path.join(out_quicklook_path, \"{}_1000px.jpg\".format(ard_img_basename)))\n\n import rsgislib.tools.visualisation\n rsgislib.tools.visualisation.createQuicklookImgs(ard_img_file, bands, outputImgs=quicklook_imgs,\n output_img_sizes=[250, 1000], scale_axis='auto',\n img_stats_msk=None, img_msk_vals=1,\n stretch_file=self.std_vis_img_stch,\n tmp_dir=tmp_quicklook_path)\n\n if not (\"quicklook\" in scn_json):\n scn_json[\"quicklook\"] = dict()\n\n scn_json[\"quicklook\"][\"quicklookpath\"] = out_quicklook_path\n scn_json[\"quicklook\"][\"quicklookimgs\"] = quicklook_imgs\n query_result.ExtendedInfo = scn_json\n flag_modified(query_result, \"ExtendedInfo\")\n ses.add(query_result)\n ses.commit()\n else:\n raise EODataDownException(\"Could not find input image with PID {}\".format(unq_id))\n ses.close()\n logger.debug(\"Closed the database session.\")", "def run_data_prep_confirmtion(context=''):\n try:\n message_types_operation_permission = ['MT600', 'MT699', 'MT620']\n message_type_archive_task = ['MT600', 'MT699', 'MT620']\n print((\"-\"*100))\n print(\"Running Data Prep for FSwiftCommodityConfirmationOut\")\n print(\"\\nStep-1\")\n print(\"Creating State Charts\")\n FCommodityConfirmationOutSC.create_commodity_conf_out_sc()\n FCommodityConfirmationOutSC.create_free_format_out_sc()\n print(\"State Chart creation completed.\")\n print(\"\\nStep-2\")\n print(\"Creating Archiving tasks\")\n FSwiftMLUtils.create_archive_task(message_type_archive_task, STATES, inorout)\n print(\"Creation of Archiving tasks completed\")\n print(\"\\nStep-3\")\n print(\"Creating operation permissions\")\n FSwiftMLUtils.create_operation_permissions(message_types_operation_permission, inorout)\n print(\"Creation of operation permissions completed\")\n print(\"\\nStep-4\")\n print(\"Creating Additional Infos/Choice list\")\n adm_prepare_status_message()\n print(\"Creation of Additional Infos/Choice completed\")\n print(\"\\n\")\n print(\"FSwiftCommodityConfirmationOut_DataPrep is successfully executed.\")\n print((\"-\"*100))\n except Exception as e:\n print((\"Exception in running FSwiftCommodityConfirmationOut DataPrep :\", str(e)))", "def main(self):\n self.jamf_url = self.env.get(\"JSS_URL\")\n self.jamf_user = self.env.get(\"API_USERNAME\")\n self.jamf_password = self.env.get(\"API_PASSWORD\")\n self.ea_script_path = self.env.get(\"ea_script_path\")\n self.ea_name = self.env.get(\"ea_name\")\n self.replace = self.env.get(\"replace_ea\")\n self.ea_data_type = self.env.get(\"ea_data_type\")\n self.ea_inventory_display = self.env.get(\"ea_inventory_display\")\n self.sleep = self.env.get(\"sleep\")\n # handle setting replace in overrides\n if not self.replace or self.replace == \"False\":\n self.replace = False\n\n # clear any pre-existing summary result\n if \"jamfextensionattributeuploader_summary_result\" in self.env:\n del self.env[\"jamfextensionattributeuploader_summary_result\"]\n ea_uploaded = False\n\n # handle files with a relative path\n if not self.ea_script_path.startswith(\"/\"):\n found_template = self.get_path_to_file(self.ea_script_path)\n if found_template:\n self.ea_script_path = found_template\n else:\n raise ProcessorError(f\"ERROR: EA file {self.ea_script_path} not found\")\n\n # now start the process of uploading the object\n self.output(f\"Checking for existing '{self.ea_name}' on {self.jamf_url}\")\n\n # obtain the relevant credentials\n token, send_creds, _ = self.handle_classic_auth(\n self.jamf_url, self.jamf_user, self.jamf_password\n )\n\n # check for existing - requires obj_name\n obj_type = \"extension_attribute\"\n obj_name = self.ea_name\n obj_id = self.get_api_obj_id_from_name(\n self.jamf_url,\n obj_name,\n obj_type,\n enc_creds=send_creds,\n token=token,\n )\n\n if obj_id:\n self.output(\n \"Extension Attribute '{}' already exists: ID {}\".format(\n self.ea_name, obj_id\n )\n )\n if self.replace:\n self.output(\n \"Replacing existing Extension Attribute as 'replace_ea' is set to {}\".format(\n self.replace\n ),\n verbose_level=1,\n )\n else:\n self.output(\n \"Not replacing existing Extension Attribute. Use replace_ea='True' to enforce.\",\n verbose_level=1,\n )\n return\n\n # upload the EA\n self.upload_ea(\n self.jamf_url,\n self.ea_name,\n self.ea_data_type,\n self.ea_inventory_display,\n self.ea_script_path,\n obj_id=obj_id,\n enc_creds=send_creds,\n token=token,\n )\n ea_uploaded = True\n\n # output the summary\n self.env[\"extension_attribute\"] = self.ea_name\n self.env[\"ea_uploaded\"] = ea_uploaded\n if ea_uploaded:\n self.env[\"jamfextensionattributeuploader_summary_result\"] = {\n \"summary_text\": (\n \"The following extension attributes were created or \"\n \"updated in Jamf Pro:\"\n ),\n \"report_fields\": [\"name\", \"path\"],\n \"data\": {\"name\": self.ea_name, \"path\": self.ea_script_path},\n }", "def __init__(self, project, user_paths):\n super(Quartus, self).__init__(project, self.executables, user_paths)\n self.quartus_sh = os.path.join(self.path, 'quartus_sh')", "def details(self):\n print \"ABC - Deployer.details()\"", "def main():\n\t# GET THE DIALOG CONTENT\n\tpolicyFlag = int(sys.argv[3])\n\tif policyFlag == 0:\n\t\texistedIterNum = 15000\n\t\texistedThetaFileName = 'algorithms/theta/cycle_tree/of0w1.0g5v0.0625l0.05'\n\telif policyFlag == 1:\n\t\texistedIterNum = 10000\n\t\texistedThetaFileName = 'algorithms/theta/cycle_tree/of0w1.0g5v0.0625l0.05Retroflex'\n\n\tqLearnOpts = {'gamma': 1.0, \n\t\t\t\t 'alpha': 0.0, \n\t\t\t\t 'epsilon': 0.0}\n\tnumOfTurn = util.returnConvertedIndexListCount('b','cycle_tree')\n\tnumofgauss = 5\n\tvar = 0.0625\n\tlamda = 0.05\n\tunitNum = 101\n\ta = qlearningAgents.FittedQLearningAgent(numOfTurn,numofgauss,var,lamda,unitNum, **qLearnOpts)\t\t\n\ta.openThetaFile(existedThetaFileName,existedIterNum)\n\n\tturnNum = int(sys.argv[1])\n\tuserUnitScore = []\n\tuserUnitScoreVector = sys.argv[2].split(',')\n\tfor i in userUnitScoreVector:\n\t\t\tuserUnitScore.append(float(i)/100.0)\n\n\tstate = State.State(turnNum, userUnitScore)\n\tprint a.getAction(state)", "def __init__(self) -> None:\n self.path_config = '/home/equipment/EQ-scripts/equipment.conf'\n self.configParse()\n self.request_devices = \"\"\"With arm_address as (SELECT av.obj_id device_id,\n av.value_raw house_id\n FROM os_usr.dev_attr_values av\n WHERE av.attr_id = 3),\n swithes as (SELECT device_type_id\n FROM os_eqm.device_types\n WHERE device_class IN\n (\n SELECT device_class_id\n FROM os_eqm.device_classes\n WHERE guid IN\n (\n SELECT obj_guid\n FROM os_lib.objects_in_nav_categories\n WHERE nav_cat_id in\n (\n SELECT nav_cat_id\n FROM nav_categories\n WHERE guid = '75C0F3733B084DBDAC604167D298B2F5'\n )\n )\n ))\n SELECT d.device_id,\n na.net_address,\n dt.name,\n trim(os_usr.ertel_utils.get_prop_str(d.device_id,'MAC_ADRES_USTROJSTVA')) \n mac_sw\n FROM os_usr.geo_addresses ga,\n os_eqm.net_addresses na,\n arm_address arm ,\n device_types dt,\n devices d,\n swithes sw\n WHERE arm.house_id = ga.house_id\n and arm.device_id = d.device_id\n and na.device_id = d.device_id and na.is_management = '1'\n AND dt.device_type_id = d.device_type\n and dt.device_type_id in sw.device_type_id\n and ga.unified_house_id = '<house_id>'\n \"\"\"\n self.request_adresses = \"\"\"SELECT av.obj_id device_id, av.value_raw house_id\n FROM os_usr.dev_attr_values av \n WHERE av.attr_id = 2 AND av.VALUE_RAW LIKE '%<house>%'\"\"\"", "def execute(self):\n\n super(BasicInstaller, self).execute()", "def execute(self):\n\n super(BasicInstaller, self).execute()", "def admin(port: int, debug: bool) -> None:\n from quiz_bot.admin import quizbot_app, set_basic_settings\n\n set_basic_settings()\n cloud_maker = CloudMaker(\n wordcloud=WordCloud(background_color=\"white\", width=1280, height=640),\n storage=MessageStorage(MessageCloudSettings()),\n )\n statistics_collector = StatisticsCollector(\n user_storage=UserStorage(), challenge_storage=ChallengeStorage(), participant_storage=ParticipantStorage()\n )\n quizbot_app(cloud_maker=cloud_maker, statistics_collector=statistics_collector).run(\n host='0.0.0.0', port=port, debug=debug\n )", "def main():\n create_all_views()\n outlog_q1 = question_1()\n outlog_q2 = question_2()\n outlog_q3 = question_3()\n\n db.close()\n\n generateLog(outlog_q1, outlog_q2, outlog_q3)", "def __init__(self, options):#dbname, outfile, save_dir, table, idcolumn, txtcolumn, doGraphs):\n\n t = time.localtime()\n\n self.doGraphs = options.doGraphs\n self.allow_uncertainty = options.allow_uncertainty\n self.proc_category = options.category\n\n self.reports = Report.objects.filter(dataset=options.dataset)[:options.number]\n\n #print \"number of reports to process\",len(self.reports)\n #raw_input('continue')\n\n # create context objects for each of the questions we want to be answering\n self.context = {\"disease\":pyConText.pyConText()}\n\n rsltsDB = options.odbname\n\n alerts=Alert.objects.all()\n alerts.delete()\n rslts=Result.objects.all()\n rslts.delete()\n\n # Create the itemData object to store the modifiers for the analysis\n # starts with definitions defined in pyConText and then adds\n # definitions specific for peFinder\n\n #label specifies whether the user wants a domain or linguistic set.\n\n #items returns an array of contextItems (e.g. getCategory(), getLiteral() )\n items_modifiers = itemData.instantiateFromSQLite(\"../pyConTextWeb.db\",options.label_modifiers,\"pyConTextKit_lexical\")\n items_targets = itemData.instantiateFromSQLite(\"../pyConTextWeb.db\",options.label_targets,\"pyConTextKit_lexical\")\n\t\t#itemData = itemData.itemData(items)\n \"\"\"\n probableNegations = itemData('PROBABLE_NEGATED_EXISTENCE')\n definiteNegations = itemData('DEFINITE_NEGATED_EXISTENCE')\n pseudoNegations = itemData('PSEUDONEG')\n indications = itemData('INDICATION')\n historicals = itemData('HISTORICAL')\n conjugates = itemData('CONJ')\n probables = itemData('PROBABLE_EXISTENCE')\n definites = itemData('DEFINITE_EXISTENCE')\n future = itemData('FUTURE')\n critItems = itemData('CRIT_ITEMS')\n\n self.modifiers = {\"disease\":itemData('')}\n self.modifiers[\"disease\"].prepend(pseudoNegations)\n self.modifiers[\"disease\"].prepend(definiteNegations)\n self.modifiers[\"disease\"].prepend(probableNegations)\n self.modifiers[\"disease\"].prepend(probables)\n self.modifiers[\"disease\"].prepend(definites)\n self.modifiers[\"disease\"].prepend(indications)\n self.modifiers[\"disease\"].prepend(conjugates)\n self.modifiers[\"disease\"].prepend(future)\n self.modifiers[\"disease\"].prepend(historicals)\n \t\"\"\"\n\n # Quality targets (generated from category parameter set by parser)\n if( options.category.lower() == 'all'):\n targetItems = critItems\n else:\n targetItems = itemData(options.category)\n self.targets = {\"disease\":targetItems}\n self.models = {}", "def setup_cmd_args():\n parser = argparse.ArgumentParser(description=\"This program will query G-POD and COPHUB on the same datasets, in order to obtain the number of data results, compare them compile a report with the differences.\", formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n # parser.add_argument(\"root_dir\", help=\"The root directory containing data to check\")\n # parser.add_argument(\"--workspace\", help=\"Set Workspace manually\")\n parser.add_argument(\"--outputlist\", help=\"Folder to write the output lists with the un-synced products.\", default=\"c:\\\\temp\\\\\")\n parser.add_argument(\"--daysback\", help=\"Report with a given number of days back from today\", default=0)\n parser.add_argument(\"--dataset\", help=\"Set which dataset to query (chose S3A_SR_1_SRA_A_PREOPS or S3B_SR_1_SRA_A_NTC)\")\n parser.add_argument(\"--startdate\", help=\" The Start Date (format: YYYY-MM-DD) \", default=\"2016-06-01\")\n parser.add_argument(\"--enddate\",help=\" The End Date (format: YYYY-MM-DD)\")\n parser.add_argument(\"--cphubuser\",help=\"COPHUB username\", required=True)\n parser.add_argument(\"--cphubpw\",help=\"COPHUB password\", required=True)\n parser.add_argument(\"-email\", type=str, help=\"Email to send the results\", action=\"append\")\n parser.add_argument('-t', action='store_true', help=\"Today as enddate. Otherwise the last day of the previous month is considered.\")\n parser.add_argument('-n', action='store_true', help=\"Normal numeric check\")\n parser.add_argument('-m', action='store_true', help=\"Monthly check with product listing.\")\n return parser.parse_args()", "def multiqc():\n \n parser = argparse.ArgumentParser(\n \n description='pyrpipe diagnostic utility\\nGenerate report with multiqc.',\n \n usage='''pyrpipe_diagnostic multiqc [<args>] <logfile>\n \n ''') \n parser.add_argument('-o', help='out directory \\ndefault: <./>',action=\"store\")\n parser.add_argument('-c',help='Dump command options [(a)ll,fa(i)l,(p)ass]\\ndefault: a',default='a',action=\"store\")\n parser.add_argument('-v',help='verbose',action=\"store_true\")\n parser.add_argument('-f',help='Filter by programs. Provide a comma-separated list e.g., prefetch,STAR,bowtie2 \\ndefault None')\n parser.add_argument('-t',help='Temporary directory. \\ndefault ./tmp',action=\"store\")\n parser.add_argument('-r',help='Remove stdout files after processing. \\ndefault ./tmp',action=\"store_true\")\n parser.add_argument('logfile', help='The log file generated by pyrpipe or root directory to search available logs',action=\"store\")\n args = parser.parse_args(sys.argv[2:])\n \n logFile=args.logfile\n \n #parse args\n vFlag=args.v\n if vFlag:\n print(\"Generating MutiQC report\")\n outDir=\"\"\n if args.o is None:\n outDir=os.getcwd()\n else:\n outDir=args.o\n \n \n filters=[]\n if args.f is not None:\n filters= args.f.split(',')\n \n #create temp dir\n tempDir=\"\"\n if args.t is not None:\n tempDir= args.t\n else:\n tempDir=os.path.join(os.getcwd(),\"tmp\")\n #create tmp dir\n if not pu.check_paths_exist(tempDir):\n pu.mkdir(tempDir) \n \n #run multiqc\n #if log file is used\n if pu.check_files_exist(logFile):\n reports.generate_multiqc_from_log(logFile,filters,tempDir,outDir=outDir,coverage=args.c,verbose=args.v,cleanup=args.r)\n else:\n reports.generate_multiqc(logFile,tempDir,outDir=outDir,coverage=args.c,verbose=args.v,cleanup=args.r)", "def test_dex_abilities(app, qtbot):\n for dex in range(1, 26):\n app.dex.setText(str(dex))\n app.init_adj.setText(str(dex_abilities.get_dex_ability(dex, 0)))\n app.missile_adj.setText(str(dex_abilities.get_dex_ability(dex, 1)))\n app.ac_adj.setText(str(dex_abilities.get_dex_ability(dex, 2)))\n\n assert app.Dex_Output_label.text() == str(dex)\n if 7 <= dex <= 14:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_7_14.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_7_14.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_7_14.AC_Adj)\n elif dex == 15:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_15.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_15.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_15.AC_Adj)\n elif dex == 16:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_16.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_16.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_16.AC_Adj)\n elif dex == 17:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_17.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_17.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_17.AC_Adj)\n elif 18 <= dex <= 20:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_18_20.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_18_20.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_18_20.AC_Adj)\n elif 21 <= dex <= 23:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_21_23.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_21_23.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_21_23.AC_Adj)\n elif dex == 24 or dex == 25:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_24_25.Init_Adj)\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_24_25.Missile_Attack)\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_24_25.AC_Adj)\n else:\n assert app.Init_Adj_Output_label.text() == str(dex_abilities.dex_abilities[dex][0])\n assert app.Missile_Bonus_Output_label.text() == str(dex_abilities.dex_abilities[dex][1])\n assert app.AC_Adj_Output_label_55.text() == str(dex_abilities.dex_abilities[dex][2])", "async def set_questions(self, ctx: commands.Context):\n\n def check(m):\n return m.author == ctx.author and m.channel == ctx.channel\n\n await ctx.send(\n \"Let's set up those questions we've not pre-filled:\\nYou will be setting questions 8-12. You can view the preloaded questions by passing `{}appq`. To begin, reply with `admin abuse` *spelled exact*\".format(\n ctx.prefix\n )\n )\n try:\n confirmation = await ctx.bot.wait_for(\"message\", check=check, timeout=20)\n if confirmation.content.lower() != \"admin abuse\":\n return await ctx.send(\"Alright, let's do these later then\")\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took to long to respond, gotta be smarter than the users you're hiring for sure.\"\n )\n\n app_questions = await self.config.guild(ctx.guild).app_questions.get_raw()\n question_8 = app_questions[\"question8\"]\n question_9 = app_questions[\"question9\"]\n question_10 = app_questions[\"question10\"]\n question_11 = app_questions[\"question11\"]\n question_12 = app_questions[\"question12\"]\n await ctx.send(\n \"Alright, let's start with question 8: You have 5min to decide and respond with question you'd like, or respond with cancel to do this later\"\n )\n\n if question_8 is not None:\n await ctx.send(\n f\"Looks like question 8 is currently `{question_8}`:\\n Do you want to change this? Type `no` to skip or the question you wish to change to if you want to change.\"\n )\n try:\n submit_8 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_8.content.lower() != \"no\":\n if len(submit_8.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question8.set(\n submit_8.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n\n if question_8 is None:\n try:\n submit_8 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_8.content.lower() != \"cancel\":\n if len(submit_8.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question8.set(\n submit_8.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 9: Please respond with your next app question\")\n\n if question_9 is not None:\n await ctx.send(\n f\"Looks like question 9 is currently `{question_9}`:\\n Do you want to change this? Type `no` to skip or the question you wish to change to if you want to change.\"\n )\n try:\n submit_9 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_9.content.lower() != \"no\":\n if len(submit_9.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question9.set(\n submit_9.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 10: Please respond with your next app question\")\n\n if question_9 is None:\n try:\n submit_9 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_9.content.lower() != \"cancel\":\n if len(submit_9.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question9.set(\n submit_9.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 10: Please respond with your next app question\")\n\n if question_10 is not None:\n await ctx.send(\n f\"Looks like question 10 is currently `{question_10}`:\\n Do you want to change this? Type `no` to skip or the question you wish to change to if you want to change.\"\n )\n try:\n submit_10 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_10.content.lower() != \"no\":\n if len(submit_10.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question10.set(\n submit_10.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 11: Please respond with your next app question\")\n\n if question_10 is None:\n try:\n submit_10 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_10.content.lower() != \"cancel\":\n if len(submit_10.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question10.set(\n submit_10.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 11: Please respond with your next app question\")\n\n if question_11 is not None:\n await ctx.send(\n f\"Looks like question 11 is currently `{question_11}`:\\n Do you want to change this? Type `no` to skip or the question you wish to change to if you want to change.\"\n )\n try:\n submit_11 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_11.content.lower() != \"no\":\n if len(submit_11.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question11.set(\n submit_11.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 12: Please respond with your next app question\")\n\n if question_11 is None:\n try:\n submit_11 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_11.content.lower() != \"cancel\":\n if len(submit_11.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question11.set(\n submit_11.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n await ctx.send(\"Moving to question 12: Please respond with your next app question\")\n\n if question_12 is not None:\n await ctx.send(\n f\"Looks like question 12 is currently `{question_12}`:\\n Do you want to change this? Type `no` to skip or the question you wish to change to if you want to change.\"\n )\n try:\n submit_12 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_12.content.lower() != \"no\":\n if len(submit_12.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question12.set(\n submit_12.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n\n if question_12 is None:\n try:\n submit_12 = await ctx.bot.wait_for(\"message\", check=check, timeout=300)\n if submit_12.content.lower() != \"cancel\":\n if len(submit_12.content) > 750:\n return await ctx.send(\n \"Talkitive are we? Too many characters to fit in final embed, shorten the question some\"\n )\n await self.config.guild(ctx.guild).app_questions.question12.set(\n submit_12.content\n )\n except asyncio.TimeoutError:\n return await ctx.send(\n \"Took too long bud. Let's be coherent for this and try again.\"\n )\n\n await ctx.send(\n \"That's all the questions and your apps are set *maybe, if you answered, anyway*. Check this with `{}appq`\".format(\n ctx.prefix\n )\n )", "def test_basic_info(self):\n\n url = reverse('stock-item-detail', kwargs={'pk': 1})\n\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n\n html = str(response.content)\n\n # Part name\n self.assertIn('Stock Item: M2x4 LPHS', html)\n\n # Quantity\n self.assertIn('<h5>Available Quantity</h5>', html)\n self.assertIn('<h5>4000', html)\n\n # Batch code\n self.assertIn('Batch', html)\n self.assertIn('<td>B123</td>', html)\n\n # Actions to check\n actions = [\n \"id=\\\\\\'stock-count\\\\\\' title=\\\\\\'Count stock\\\\\\'\",\n \"id=\\\\\\'stock-add\\\\\\' title=\\\\\\'Add stock\\\\\\'\",\n \"id=\\\\\\'stock-remove\\\\\\' title=\\\\\\'Remove stock\\\\\\'\",\n \"id=\\\\\\'stock-move\\\\\\' title=\\\\\\'Transfer stock\\\\\\'\",\n \"id=\\\\\\'stock-duplicate\\\\\\'\",\n \"id=\\\\\\'stock-edit\\\\\\'\",\n \"id=\\\\\\'stock-delete\\\\\\'\",\n ]\n\n # Initially we should not have any of the required permissions\n for act in actions:\n self.assertNotIn(act, html)\n\n # Give the user all the permissions\n self.assignRole('stock.add')\n self.assignRole('stock.change')\n self.assignRole('stock.delete')\n\n response = self.client.get(url)\n html = str(response.content)\n\n for act in actions:\n self.assertIn(act, html)", "def run(ctx, test_plan, only):\n\n handler = ValidateCommandHandler(ctx.obj['qa_dir'])\n if handler.validate():\n handler = RunCommandHandler(ctx.obj['qa_dir'],\n vcs_adapter=__vcs_factory__.create_cvs_adapter(ctx.obj['vcs']),\n test_plan=test_plan,\n report_dir=ctx.obj['report_dir'],\n debug=ctx.obj['debug'])\n\n handler.run_test_cases(only=only)\n\n else:\n exit(1)", "def main(self):\n\n self.setRecordAudio(True)\n self.general()\n sleep(1)\n self.introduction()\n sleep(1)\n self.get_name()\n sleep(1)\n self.get_age()\n sleep(1)\n self.get_origin()\n sleep(1)\n self.get_company()\n sleep(1)\n self.get_travel_route()\n sleep(1)\n self.get_entrance()\n sleep(1)\n self.get_documentation()\n sleep(1)\n self.sayAnimated(\n 'OK ' + self.name + '. We would like to know why you came to The Netherlands. Can you please answer the following '\n 'questions with yes, or, no?')\n self.speechLock.acquire()\n self.stopListening()\n self.get_exclusion()\n sleep(1)\n self.get_conflict()\n sleep(1)\n self.get_inhumanity()\n sleep(1)\n self.get_family()\n sleep(1)\n\n # end interview and save answers\n self.wrapup()\n self.store_story()", "def execute(self):", "def execute(self):", "def execute(self):", "def execute(self):", "def oq_run_classical_psha(job_ini, exports='csv', oq_version=default_oq_version, dir_info=None): \n # the run() method has been turned into private since v3.11\n # the get_last_calc_id() and get_datadir() have been moved to commonlib.logs since v3.12\n # the datastore has been moved to commonlib since v3.12\n # Note: the extracting realizations method was kindly shared by Dr. Anne Husley\n vtag = int(oq_version.split('.')[1])\n if vtag <= 10:\n try:\n print('FetchOpenQuake: running Version {}.'.format(oq_version))\n # reloading \n from openquake.commands.run import run\n from openquake.calculators.export.hazard import export_realizations\n\n #run.main([job_ini], exports=exports)\n # invoke/modify deeper openquake commands here to make it compatible with \n # the pylauncher on stampede2 for parallel runs... \n from openquake.baselib import datastore, performance, general\n from openquake.server import dbserver\n from openquake.calculators import base\n from openquake.commonlib import readinput, logs\n dbserver.ensure_on()\n global calc_path\n loglevel = 'info'\n params = {}\n reuse_input = False\n concurrent_tasks = None\n pdb = None\n hc_id = None\n for i in range(1000):\n try:\n calc_id = logs.init('nojob', getattr(logging, loglevel.upper()))\n except:\n time.sleep(0.01)\n continue\n else:\n print('FetchOpenQuake: log created.')\n break\n # disable gzip_input\n base.BaseCalculator.gzip_inputs = lambda self: None\n with performance.Monitor('total runtime', measuremem=True) as monitor:\n if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):\n os.environ['OQ_DISTRIBUTE'] = 'processpool'\n oqparam = readinput.get_oqparam(job_ini, hc_id=hc_id)\n if hc_id and hc_id < 0: # interpret negative calculation ids\n calc_ids = datastore.get_calc_ids()\n try:\n hc_id = calc_ids[hc_id]\n except IndexError:\n raise SystemExit(\n 'There are %d old calculations, cannot '\n 'retrieve the %s' % (len(calc_ids), hc_id))\n calc = base.calculators(oqparam, calc_id)\n calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,\n exports=exports, hazard_calculation_id=hc_id,\n rlz_ids=())\n\n calc_id = datastore.get_last_calc_id()\n path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)\n dstore = datastore.read(path)\n export_realizations('realizations', dstore)\n except:\n print('FetchOpenQuake: Classical PSHA failed.')\n return 1\n elif vtag == 11:\n try:\n print('FetchOpenQuake: running Version {}.'.format(oq_version))\n # reloading \n from openquake.commands import run\n from openquake.calculators.export.hazard import export_realizations\n\n #run.main([job_ini], exports=exports)\n # invoke/modify deeper openquake commands here to make it compatible with \n # the pylauncher on stampede2 for parallel runs... \n from openquake.baselib import datastore, performance, general\n from openquake.server import dbserver\n from openquake.calculators import base\n from openquake.commonlib import readinput, logs\n dbserver.ensure_on()\n global calc_path\n loglevel = 'info'\n params = {}\n reuse_input = False\n concurrent_tasks = None\n pdb = False\n for i in range(1000):\n try:\n calc_id = logs.init('nojob', getattr(logging, loglevel.upper()))\n except:\n time.sleep(0.01)\n continue\n else:\n print('FetchOpenQuake: log created.')\n break\n # disable gzip_input\n base.BaseCalculator.gzip_inputs = lambda self: None\n with performance.Monitor('total runtime', measuremem=True) as monitor:\n if os.environ.get('OQ_DISTRIBUTE') not in ('no', 'processpool'):\n os.environ['OQ_DISTRIBUTE'] = 'processpool'\n if 'hazard_calculation_id' in params:\n hc_id = int(params['hazard_calculation_id'])\n else:\n hc_id = None\n if hc_id and hc_id < 0: # interpret negative calculation ids\n calc_ids = datastore.get_calc_ids()\n try:\n params['hazard_calculation_id'] = str(calc_ids[hc_id])\n except IndexError:\n raise SystemExit(\n 'There are %d old calculations, cannot '\n 'retrieve the %s' % (len(calc_ids), hc_id))\n oqparam = readinput.get_oqparam(job_ini, kw=params)\n calc = base.calculators(oqparam, calc_id)\n if reuse_input: # enable caching\n oqparam.cachedir = datastore.get_datadir()\n calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb,exports=exports)\n \n calc_id = datastore.get_last_calc_id()\n path = os.path.join(datastore.get_datadir(), 'calc_%d.hdf5' % calc_id)\n dstore = datastore.read(path)\n export_realizations('realizations', dstore)\n except:\n print('FetchOpenQuake: Classical PSHA failed.')\n return 1 \n else:\n try:\n print('FetchOpenQuake: running Version {}.'.format(oq_version))\n # reloading \n from openquake.commands import run\n from openquake.commonlib import logs, datastore\n from openquake.calculators.export.hazard import export_realizations\n\n #run.main([job_ini], exports=exports)\n # invoke/modify deeper openquake commands here to make it compatible with \n # the pylauncher on stampede2 for parallel runs... \n from openquake.baselib import performance, general\n from openquake.server import dbserver\n from openquake.calculators import base\n dbserver.ensure_on()\n global calc_path\n loglevel = 'info'\n params = {}\n reuse_input = False\n concurrent_tasks = None\n pdb = False\n for i in range(1000):\n try:\n log = logs.init(\"job\", job_ini, getattr(logging, loglevel.upper()))\n except:\n time.sleep(0.01)\n continue\n else:\n print('FetchOpenQuake: log created.')\n break\n log.params.update(params)\n base.BaseCalculator.gzip_inputs = lambda self: None\n with log, performance.Monitor('total runtime', measuremem=True) as monitor:\n calc = base.calculators(log.get_oqparam(), log.calc_id)\n if reuse_input: # enable caching\n calc.oqparam.cachedir = datastore.get_datadir()\n calc.run(concurrent_tasks=concurrent_tasks, pdb=pdb, exports=exports)\n\n logging.info('Total time spent: %s s', monitor.duration)\n logging.info('Memory allocated: %s', general.humansize(monitor.mem))\n print('See the output with silx view %s' % calc.datastore.filename)\n\n calc_id = logs.get_last_calc_id()\n path = os.path.join(logs.get_datadir(), 'calc_%d.hdf5' % calc_id)\n dstore = datastore.read(path)\n export_realizations('realizations', dstore)\n except:\n print('FetchOpenQuake: Classical PSHA failed.')\n return 1 \n\n # h5 clear for stampede2 (this is somewhat inelegant...)\n if 'stampede2' in socket.gethostname():\n # h5clear\n if oq_h5clear(path) == 0:\n print('FetchOpenQuake.oq_run_classical_psha: h5clear completed')\n else:\n print('FetchOpenQuake.oq_run_classical_psha: h5clear failed')\n \n # copy the calc file to output directory\n if dir_info:\n dir_output = dir_info['Output']\n try:\n shutil.copy2(path, dir_output)\n print('FetchOpenQuake: calc hdf file saved.')\n except:\n print('FetchOpenQuake: failed to copy calc hdf file.')\n \n return 0" ]
[ "0.64223295", "0.6202825", "0.5899661", "0.57702637", "0.56131524", "0.5524281", "0.5509671", "0.5425418", "0.53912246", "0.53891236", "0.53559035", "0.53552413", "0.53085", "0.5271458", "0.5259535", "0.5251413", "0.524817", "0.5218205", "0.52015793", "0.51848274", "0.51798725", "0.51498103", "0.51473534", "0.51421624", "0.51254344", "0.51157933", "0.5091453", "0.507356", "0.5070835", "0.50596446", "0.50395125", "0.50288635", "0.50206476", "0.4999896", "0.49944872", "0.49892536", "0.49891856", "0.49789354", "0.49756408", "0.4973418", "0.49716285", "0.49683586", "0.4966695", "0.49654314", "0.4951209", "0.49509352", "0.49485776", "0.49482405", "0.49384242", "0.4934809", "0.49290562", "0.49022052", "0.4888975", "0.4874503", "0.48703662", "0.48696497", "0.4869485", "0.4868689", "0.48656934", "0.48610774", "0.48601177", "0.4858574", "0.48583522", "0.48546895", "0.4851634", "0.48498553", "0.4848804", "0.4848179", "0.48400328", "0.4833297", "0.48326465", "0.4821627", "0.48190552", "0.4814284", "0.4813526", "0.48052043", "0.48027778", "0.48027754", "0.47948775", "0.47857824", "0.47848946", "0.4781404", "0.4776101", "0.47742987", "0.47742987", "0.47627246", "0.47603044", "0.47594124", "0.47591624", "0.47519863", "0.47519854", "0.47437674", "0.4736587", "0.4735234", "0.47336367", "0.4730679", "0.4730679", "0.4730679", "0.4730679", "0.47305283" ]
0.5289428
13
Load QA data from night objects on disk
def load_data(self, inroot=None): self.data = {} # Load for night in self.mexp_dict.keys(): qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir) qaNight.load_data() # self.data[night] = qaNight.data[night]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_data(self):", "def load_data(self) -> None:", "def load_rentedout():", "def _load_test_data(self):\n self._save_test_data()", "def load_mp_data():\n tree = Artifact.load(os.path.join(PREFIX_DIR, \"rooted-tree.qza\"))\n table = Artifact.load(os.path.join(PREFIX_DIR, \"table.qza\"))\n pcoa = Artifact.load(\n os.path.join(PREFIX_DIR, \"unweighted_unifrac_pcoa_results.qza\")\n )\n md = Metadata.load(os.path.join(PREFIX_DIR, \"sample_metadata.tsv\"))\n # We have to transform the taxonomy QZA to Metadata ourselves\n taxonomy = Artifact.load(os.path.join(PREFIX_DIR, \"taxonomy.qza\"))\n fmd = taxonomy.view(Metadata)\n return tree, table, md, fmd, pcoa", "def load(self):", "def load_data(self):\n if self.debug:\n print(\"Loading data\")", "def load_data(self):\n raise NotImplementedError()", "def loadData(catalog):\n loadArtworks(catalog)\n loadArtists(catalog)", "def load_raw_data(apps, schema_editor):\n from season.import_raw_data import InitialDataProcessor\n matches_path = str(BASE_DIR) + '/season/migrations/matches.csv'\n deliveries_path = str(BASE_DIR) + '/season/migrations/deliveries.csv'\n # Initialization path to read data\n load_data = InitialDataProcessor(matches_path=matches_path, deliveries_path=deliveries_path)\n # transform data frame and save the data step by step\n # only support new season import for the first tym when data structure is ready to use\n load_data.transform_input_save()", "def test_load_data_base(self):\n pltp = get_object_or_404(Loaded_Pltp, name=\"test\")\n pl = pltp.loaded_pl_set.all()\n \n self.assertEqual(pltp.name, \"test\")\n #test json\n \n self.assertEqual(len(pl), 2)\n \n self.assertEqual(pl[0].name, \"test1\")\n #test dirname + json\n self.assertEqual(pl[1].name, \"test2\")\n #test dirname + json", "def loadData(catalog):\n loadArtists(catalog)\n loadArtworks(catalog)", "def loadData(catalog):\n loadArtists(catalog)\n loadArtworks(catalog)", "def load_data_from_files(self):\n # separated method to allow mock easier\n logger.info(\"Loading data...\")\n parent = Path(__file__).parent\n path = parent / \"resources\" / \"scores.txt\"\n self.scorer.load_from_file(path)\n path = parent / \"resources\" / \"american-english-large.txt\"\n self.trie.load_from_file(path)\n path = parent / \"resources\" / \"reels.txt\"\n self.reels = Reel.get_from_file(path)\n logger.info(\"Data loaded!\")", "def load_data(self, read_shelf):\n if read_shelf:\n try:\n # Attempt reading pre-shelved objects first\n self.__read_shelf()\n except Exception as e:\n print(f'Exception while reading the data shelf ({e})')\n # Otherwise, read data from the the json files\n self.__read_json()\n else:\n self.__read_json()", "def test_quest_load_data_fail(testing_quest_page):\n testing_quest_page.save()\n\n # fetch the data\n doc = testing_quest_page.doc_ref.get()\n data = testing_quest_page.storage_model.parse_obj(doc.to_dict())\n\n # mess with the data\n data.serialized_data = json.dumps({\"this\": \"nonesense\"})\n testing_quest_page.doc_ref.set(data.dict())\n\n # try to load with the bad version\n with pytest.raises(QuestLoadError):\n testing_quest_page.load()\n\n # cleanup\n testing_quest_page.delete()", "def load(self):\n pass", "def load(self):\n pass", "def load(self):\n pass", "def load(self):\n pass", "def load_xqa_wrapper(path: str, part: str):\n if part == \"all\":\n parts = [\"dev\", \"test\"]\n data = {}\n if os.path.exists(join(path, \"train_doc.json\")):\n parts.append(\"train\")\n for part in parts:\n question_data = join(path, f\"{part}_doc.json\")\n gold_data = join(path, f\"{parts}.txt\")\n data = {**data, **load_xqa(question_data, gold_data)}\n else:\n question_data = join(path, f\"{part}_doc.json\")\n gold_data = join(path, f\"{part}.txt\")\n data = load_xqa(question_data, gold_data)\n logging.info(f\"Data loaded of size {len(data)}\")\n return data", "def loadData(catalog):\n\n loadArtwork(catalog)\n loadArtists(catalog)", "def _loadData(self, data):\n Episode._loadData(self, data)\n PlexHistory._loadData(self, data)", "def _load_training_data(self):\n self._save_training_data()", "def load(self):\n #self.df = read_file(\"../data/yelp_academic_dataset_user.json\") #Full Data.\n self.df = read_file(\"../data/user300.json\") #For local machine.\n #self.get_friend_list()\n #self.save_friend_nodes()", "def _load_disk(self):", "def _load_disk(self):", "def importData():\n #importChallengeDataToDB()\n importTrendingDataToDB()", "def _loadData(self, data):\n Movie._loadData(self, data)\n PlexHistory._loadData(self, data)", "def test_loadData():\n \n sys = LVsystem.Ecosystem()\n \n sys.loadSetup('2Prey1Predator')\n \n \n data = sys.create_data()\n \n assert data[0] == 3\n assert data[1] == ['rabbit', 'hen', 'fox']\n assert data[2] == [30,10,20]\n assert data[3] == [0.09,0.07,-0.06] \n assert data[4] == [10000,10000,1]\n assert data[5] == [400,500,250]\n assert data[6][1][2] == -data[6][2][1]\n assert data[6][2][2] == 0\n\n sys.removeSpecies('rabbit')\n sys.removeSpecies('fox')\n sys.removeSpecies('hen')", "def load_objects(self, queue):\n pass", "def loadProducts():\n dump = os.path.dirname(os.path.abspath(__file__)) + \"/dump.json\"\n data = open(dump, 'r')\n for deserialized_object in serializers.deserialize(\"json\", data):\n deserialized_object.save()", "def load_new_data():\n require('settings', provided_by=[production, staging])\n \n maintenance_up()\n load_data()\n maintenance_down()", "def test_load_model_data(self):\n add_components_and_load_data(\n prereq_modules=IMPORTED_PREREQ_MODULES,\n module_to_test=MODULE_BEING_TESTED,\n test_data_dir=TEST_DATA_DIRECTORY,\n subproblem=\"\",\n stage=\"\",\n )", "def test_load_model_data(self):\n add_components_and_load_data(\n prereq_modules=IMPORTED_PREREQ_MODULES,\n module_to_test=MODULE_BEING_TESTED,\n test_data_dir=TEST_DATA_DIRECTORY,\n subproblem=\"\",\n stage=\"\",\n )", "def test_load_an_object_json_file(self):\n from test.resources import malaga\n self.assertEqual(len(malaga.data), 5018112)\n self.assertEqual(malaga.Model, 'iPhone 4')", "def test_load_file_contents():\n\n file_name = 'test_fooof_all'\n loaded_data = load_json(file_name, TEST_DATA_PATH)\n\n # Check settings\n for setting in OBJ_DESC['settings']:\n assert setting in loaded_data.keys()\n\n # Check results\n for result in OBJ_DESC['results']:\n assert result in loaded_data.keys()\n\n # Check results\n for datum in OBJ_DESC['data']:\n assert datum in loaded_data.keys()", "def test_data_dump_and_load():\n raw_data = {\"A\": 1, \"B\": 2}\n\n # Dump json\n json_file = os.path.join(tempfile.gettempdir(), \"jade-unit-test-file.json\")\n dump_data(raw_data, json_file)\n assert os.path.exists(json_file)\n\n # Load json\n json_data = load_data(json_file)\n assert json_data == raw_data\n\n if os.path.exists(json_file):\n os.remove(json_file)\n\n # Dump toml\n toml_file = os.path.join(tempfile.gettempdir(), \"jade-unit-test-file.toml\")\n dump_data(raw_data, toml_file)\n assert os.path.exists(toml_file)\n\n # Load toml\n toml_data = load_data(toml_file)\n assert toml_data == raw_data\n\n if os.path.exists(toml_file):\n os.remove(toml_file)\n\n # Re-enable if we add support again.\n # Dump yaml\n # yaml_file = os.path.join(tempfile.gettempdir(), \"jade-unit-test-file.yaml\")\n # dump_data(raw_data, yaml_file)\n # assert os.path.exists(yaml_file)\n\n ## Load yaml\n # yaml_data = load_data(yaml_file)\n # assert yaml_data == raw_data\n\n # if os.path.exists(yaml_file):\n # os.remove(yaml_file)", "def _loadData(self, data):\n Episode._loadData(self, data)\n PlexSession._loadData(self, data)", "def main():\n for tourney in tourneys:\n read_tourney(tourney)\n with open(\"obj/tournaments.pkl\", \"wb\") as f:\n pickle.dump(tournaments, f, pickle.HIGHEST_PROTOCOL)\n with open(\"obj/participants.pkl\", \"wb\") as f:\n pickle.dump(raw_participants, f, pickle.HIGHEST_PROTOCOL)\n with open(\"obj/matches.pkl\", \"wb\") as f:\n pickle.dump(all_matches, f, pickle.HIGHEST_PROTOCOL)", "def load_data(ctx, klass=None):\n if klass:\n if klass and not klass.startswith(\"public_data.models\"):\n klass = f\"public_data.models.{klass}\"\n options = {\"class\": klass}\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"load_data\", **options)", "def load(self):\n\n raise NotImplementedError", "def loadData(catalog):\n loadArtworks(catalog)\n loadArtists(catalog)\n loadAdquires(catalog)\n loadNacionalities(catalog)\n load2DArtworks(catalog)\n loadArtistMediumsTags(catalog)\n loadDptments(catalog)\n catalog['artists'] = sortArtists(catalog, 3)\n fillArtistMediums(catalog)\n fillMostUsedMediums(catalog)\n catalog['artists_tags'] = sortArtistTags(catalog, 3)\n sort_dptments(catalog)", "def __init__(self, specprod_dir=None, **kwargs):\n if specprod_dir is None:\n specprod_dir = specprod_root()\n self.specprod_dir = specprod_dir\n # Init\n QA_MultiExp.__init__(self, specprod_dir=specprod_dir, **kwargs)\n # Load up exposures for the full production\n nights = get_nights(specprod_dir=self.specprod_dir)\n for night in nights:\n self.mexp_dict[night] = {}\n for exposure in get_exposures(night, specprod_dir = self.specprod_dir):\n # Object only??\n frames_dict = get_files(filetype = str('frame'), night = night,\n expid = exposure, specprod_dir = self.specprod_dir)\n self.mexp_dict[night][exposure] = frames_dict\n # Output file names\n self.qaexp_outroot = self.qaprod_dir+'/'+self.prod_name+'_qa'\n # Nights list\n self.qa_nights = []", "def test1_loading(self):\n\t\tprint \"\\nTEST 1: Loading ontologies from %s folder.\\n=================\" % DATA_FOLDER\n\t\t\n\t\tfor f in os.listdir(DATA_FOLDER):\n\t\t\tif not f.startswith('.'):\n\t\t\t\tprint \"Loading... >\", f\t\t\n\t\t\t\t\n\t\t\t\to = ontospy.Ontology(DATA_FOLDER + f)\n\t\t\t\t\n\t\t\t\tself.assertEqual(type(o), ontospy.Ontology)\n\t\t\t\tprint \"Success.\"", "def load_and_fix(self):\n # Read in json\n self.read_json()\n\n if self.size_to_load:\n self.data = self.data[:self.size_to_load]\n\n # Add names from database given _bsn:\n self.extend_dataframe_with_personnames()\n\n # Clean rows in the data_frame where the names column is empty - > thus no response from the database\n self.clean_none_response()\n\n # Fix path from A09.pdf to A09.json\n self.fix_path()\n\n # Get the correct names from the database response\n self.parse_names_from_response()\n\n print(\" --- Final Shape Data ---\")\n print(self.data.shape)\n print(list(self.data))\n\n # Save pickled object in ./data map\n self.save_obj(self.data, self.file_name_to_save)", "def load_data():\n\n dump_path = dump_base + '/micro_poi/mpoi_info/'\n\n assert os.path.exists(dump_path)\n\n dpath = dump_path + 'shortest_path.pickle'\n paths = joblib.load(dpath)\n\n dpath = dump_path + 'path_list.pickle'\n path_list = joblib.load(dpath)\n\n dpath = dump_path + 'gain.pickle'\n gain = joblib.load(dpath)\n\n dpath = dump_path + 'stay.pickle'\n stay_time = joblib.load(dpath)\n\n dpath = dump_path + 'reach.pickle'\n reach_time = joblib.load(dpath)\n\n spath = dump_base + '/micro_poi/model_params.list'\n model_params = np.loadtxt(spath)\n\n return np.array(paths), path_list, gain, stay_time, reach_time, model_params", "def _load(self):\n raise NotImplementedError()", "def load(self):\n raise NotImplementedError", "def load(self):\n raise NotImplementedError", "def preload(self):\n # load the objects\n for otype, fname in self.TYPE2NAME.items():\n if fname:\n path = os.path.join(self.anodir, fname + \".gz\")\n if os.path.isfile(path):\n with gzip.open(path, \"rt\") as handler:\n for line in handler:\n omap = json.loads(line)\n cls = self.TYPE2CLASS[otype]\n item = cls.from_map(omap, self)\n self.caches[otype][item.id] = item", "def load_eli5(self, opt):\n dp = opt['datapath']\n dt = opt['datatype'].split(':')[0]\n eli_path = \"eli5/processed_data/selected_15_1/explainlikeimfive_\"\n fname = os.path.join(dp, eli_path + dt + \".json\")\n if not PathManager.exists(fname):\n raise FileNotFoundError(\n f\"{fname} not found. Please follow the instructions found at \"\n \"https://github.com/facebookresearch/ParlAI/tree/main/parlai/tasks/eli5/README.md\"\n \" to construct the dataset.\"\n )\n opt['datafile'] = fname\n with PathManager.open(fname) as json_file:\n data = json.load(json_file)\n ds = []\n for d in data:\n if self.opt['knowledge']:\n text = d['document'] + \"\\n\" + d['question']\n else:\n text = d['question']\n act = {\n 'id': 'eli5',\n 'text': text,\n 'labels': [d['answer']],\n 'episode_done': True,\n }\n ds.append(act)\n return ds", "def load_synthetic_data():\n\n pickle_object = FM().data_file \n\n with pickle_object.open('rb') as data_file: \n return pickle.load(data_file)", "def load_inst(self):\n self.sanity_check()\n\n fname_pub_auth_all = ''.join([self.config.dir_data, '/',\n self.config.fname_pub_auth_all, '_',\n self.config.experiment_id, '.pk'])\n fname_pub_auth_top = ''.join([self.config.dir_data, '/',\n self.config.fname_pub_auth_top, '_',\n self.config.experiment_id, '.pk'])\n fname_pub_inst_all = ''.join([self.config.dir_data, '/',\n self.config.fname_pub_inst_all, '_',\n self.config.experiment_id, '.pk'])\n fname_pub_inst_top = ''.join([self.config.dir_data, '/',\n self.config.fname_pub_inst_top, '_',\n self.config.experiment_id, '.pk'])\n self.pub_auth_all = pickle.load(open(fname_pub_auth_all, 'rb'))\n self.pub_auth_top = pickle.load(open(fname_pub_auth_top, 'rb'))\n self.pub_inst_all = pickle.load(open(fname_pub_inst_all, 'rb'))\n self.pub_inst_top = pickle.load(open(fname_pub_inst_top, 'rb'))\n\n fname_pub_history = ''.join([self.config.dir_data, '/history_',\n self.config.experiment_id, '.pk'])\n self.history = pickle.load(open(fname_pub_history, 'rb'))\n\n fname_pub_staff = ''.join([self.config.dir_data, '/staff_',\n self.config.experiment_id, '.pk'])\n self.staff = pickle.load(open(fname_pub_staff, 'rb'))", "def __init__(self, data_config):\n self._brands = self._load_from_directory(data_config['targeted_brands_dir'])\n self._keywords = self._load_from_directory(data_config['keywords_dir'])\n self._fqdn_keywords = self._load_from_directory(data_config['fqdn_keywords_dir'])\n self._similarity_words = self._load_from_directory(data_config['similarity_words_dir'])\n self._tlds = self._load_from_directory(data_config['tld_dir'])", "def load_cleaned_data(self):\n try:\n self.train = pd.read_pickle('../input/train_clean.pkl')\n self.test = pd.read_pickle('../input/test_clean.pkl')\n except FileNotFoundError:\n self.load_raw_data()", "def load_fhwa_records():\n print('--- Importing FHWA DFLTD v.2 records ---')\n for i in tqdm(range(len(tbl_project))):\n prj_id = tbl_project['lng_KeyProject'][i]\n\n expl_in_project = tbl_exploration[\n tbl_exploration.lng_KeyProject == prj_id].index\n for i_exp in expl_in_project:\n expl_id = tbl_exploration['txt_KeyExplorationName'][i_exp]\n\n piles_in_project = tbl_deepfoundation[\n tbl_deepfoundation.lng_KeyProject == prj_id].index\n for i_pile in piles_in_project:\n pile_id = tbl_deepfoundation['lng_KeyDeepFoundation'][i_pile]\n\n tests_for_pile = tbl_loadtest[\n (tbl_loadtest.lng_KeyProject == prj_id) &\n (tbl_loadtest.lng_KeyDeepFoundation == pile_id)\n ].index\n for i_lt in tests_for_pile:\n test_id = tbl_loadtest['lng_KeyLoadTest'][i_lt]\n\n # -- Adding Project Data -------------------------------- #\n if len(piles_in_project) > 1 and len(expl_in_project) < 2:\n wrn = 'Expanded from a project with multiple piles '\\\n 'and/or retests'\n prj = add_loc_proj(i, wrn)\n elif len(piles_in_project) < 2 and len(expl_in_project) > 1:\n wrn = 'Expanded from a project with multiple '\\\n 'explorations'\n prj = add_loc_proj(i, wrn)\n elif len(piles_in_project) > 1 and len(expl_in_project) > 1:\n wrn = 'Expanded from a project with multiple '\\\n 'explorations and multiple piles/retests'\n prj = add_loc_proj(i, wrn)\n else:\n prj = add_loc_proj(i)\n db.session.add(prj)\n\n # -- Adding Exploration Data ---------------------------- #\n exploration = add_expl_data(i_exp, expl_id, prj)\n db.session.add(exploration)\n\n # -- Adding Layer Data ---------------------------------- #\n add_layer_data(prj_id, expl_id, exploration)\n\n # -- Adding Pile Data ----------------------------------- #\n pile = add_pile_data(i_pile, prj_id, pile_id, prj)\n db.session.add(pile)\n\n # -- Adding Load Test Data ------------------------------ #\n load_test = add_load_test_data(i_lt, pile)\n db.session.add(load_test)\n\n # -- Adding Static Test Data ---------------------------- #\n add_static_test_data(prj_id, pile_id, test_id, load_test)\n\n # -- Adding Interpreted Data ---------------------------- #\n add_interp_data(prj_id, pile_id, test_id, load_test)\n\n db.session.commit()", "def load_quests(self):\n\n raise NotImplementedError()", "def loadData():\n project_dir = \"/home/c/chandanchowdhury/Documents/CIS-833/CSSearch/indexer/\"\n\n index_file = \"index_file.pkl\"\n link_file = \"link_file.pkl\"\n\n index_data = loadPickle(project_dir+index_file)\n link_data = loadPickle(project_dir+link_file)\n\n return index_data, link_data", "def loadData(catalog):\n loadVideos(catalog)\n loadCategories(catalog)", "def _loadData(self, data):\n Movie._loadData(self, data)\n PlexSession._loadData(self, data)", "def extra_object_files(self):", "def loadData(self):\n\n for info in os.walk(settings.BEHAVIOR_PATH):\n path = info[0]\n\n # Get the files, if there are any\n for element in info[2]:\n split = element.split(\".\")\n\n # If there's only one '.' in the filename, then we know it's not a .old.h5 file, or a file without an extension.\n if(len(split) == 2):\n name, extension = element.split(\".\")\n\n if(self.log):\n logging.debug(\"Name: \" + name + \" Extension: \" + extension)\n\n for animal in self.subjects:\n\n # Get the date from the name and format it in ISO format to compare to the current date.\n experimentDate = name.split(\"_\")[-1]\n isoDate = experimentDate[:4] + \"-\" + experimentDate[4:6] + \"-\" + experimentDate[6:8]\n\n if(self.log):\n logging.debug(\"Comparing date: \" + str(isoDate) + \" to \" + str(self.date) + \" (today)\")\n\n # We only want data from today from an animal that we care about\n if(self.date == extrafuncs.parse_isodate(isoDate) and extension == \"h5\" and animal in name):\n try:\n full_path = os.path.join(path, element)\n self.behavData.append((full_path, loadbehavior.BehaviorData(full_path, readmode='full')))\n if(self.log):\n logging.info(\"Successfully loaded data from: \" + full_path)\n except:\n self.sendToAllSubscribers(\"Error when attempting to load \" + full_path + \".\", \"Alert: Alarm error\")\n if(self.log):\n logging.error(\"Could not load \" + full_path + \".\")", "def load_data(self):\n @Logger.runtime\n def process_coords():\n \"\"\"\n The placement of locations on our minimap is crucial. Panda3D objects however have a coordinate range from\n -1 to 1 on all axis, meaning that if we read a coordinate of a location from some image processing software\n by hand, we have to transform those coordinates into coordinates Panda would understand. This function does\n just that.\n :return: Normalized coordinates of location coordinates.\n \"\"\"\n split_coords = row[\"map_coord\"].split(',')\n map_x, map_y = [int(i) for i in split_coords]\n map_x_normed = ((map_x*2) / self.MINIMAP_DIM) - 1\n map_y_normed = -(((map_y*2) / self.MINIMAP_DIM) - 1)\n return map_x_normed, map_y_normed\n\n @Logger.runtime\n def process_texture():\n texture_path = Path(\"resource/textures/{}\".format(row[\"texture\"]))\n texture = self.loader.loadTexture(texture_path)\n return texture\n\n # the cylinder is loaded here but it does not yet show up, until it's specifically asked to\n self.scene_3d_model = self.loader.loadModel(self.PATHS[\"3D_SCENE_MODEL\"])\n\n try:\n with open(self.PATHS[\"LOCATIONS_DB\"], \"r\") as l_file:\n data = csv.DictReader(l_file, delimiter=\"|\")\n for row in data:\n id = int(row[\"id\"])\n x, y = process_coords()\n neighbors = [int(neighbor_id) for neighbor_id in row[\"neighbors\"].split(',')]\n texture = process_texture()\n location = Location(id, x, y, neighbors, texture)\n location.reparentTo(self.render2d)\n self.locations.append(location)\n Logger.log_info('The locations_db has been loaded')\n except:\n Logger.error('{} file not found!'.format(self.PATHS[\"LOCATIONS_DB\"]))\n\n self.active_location = self.locations[0]", "def load(cls):\n playerdata = Data.raw_load(\"savedata.dat\")\n for key in playerdata:\n cls.name = playerdata[\"name\"]\n cls.max_hp = playerdata[\"max_hp\"]\n cls.hp = playerdata[\"hp\"]\n cls.lv = playerdata[\"lv\"]\n cls.exp = playerdata[\"exp\"]\n cls.atk = playerdata[\"atk\"]\n cls._def = playerdata[\"_def\"]\n cls.inventory = playerdata[\"inventory\"]\n cls.pin = playerdata[\"pin\"]", "def load(self):\n self._really_load()", "def loadData(catalog):\r\n controller.loadData(catalog)", "def test_quest_load_save(testing_quest_page):\n\n # generate a save data and edit a bit\n testing_quest_page.delete()\n assert not testing_quest_page.exists\n\n testing_quest_page.save()\n assert testing_quest_page.exists\n\n testing_quest_page.load()", "def loadParts(self):\n for i in range(15):\n self.model_parts[i] = loadModel(\"ato_{}.pkl\".format(str(i)))", "def __init__(self):\n f = open(configuration.dataDirectory+'MuonEfficiencies_Run_2012A_2012_B_53X.pkl', 'r')\n if f :\n self._map = pickle.load(f)\n self._eta_range = ''\n self._pt_range = ''\n else :\n print 'ERROR: Input file for Trigger efficiencies not existing!'", "def __init__(self):\n f = open(configuration.dataDirectory+'MuonEfficiencies_Run_2012A_2012_B_53X.pkl', 'r')\n if f :\n self._map = pickle.load(f)\n self._eta_range = ''\n self._pt_range = ''\n else :\n print 'ERROR: Input file for Trigger efficiencies not existing!'", "def create_dicts():\n load_data_for_dict('data/atis/train/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/valid/seq.in', 'data/atis/voc/vocabulary.json')\n load_data_for_dict('data/atis/test/seq.in', 'data/atis/voc/vocabulary.json') \n load_data_for_dict('data/atis/train/seq.out', 'data/atis/voc/slot_vocabulary.json')", "def test_AFQ_data():\n _, bids_path, _ = get_temp_hardi()\n\n for mapping in [SynMap(use_prealign=False), AffMap()]:\n myafq = api.AFQ(\n bids_path=bids_path,\n dmriprep='vistasoft',\n mapping=mapping)\n npt.assert_equal(nib.load(myafq.b0[\"01\"]).shape,\n nib.load(myafq.dwi_file[\"01\"]).shape[:3])\n npt.assert_equal(nib.load(myafq.b0[\"01\"]).shape,\n nib.load(myafq.dti_params[\"01\"]).shape[:3])\n myafq.rois\n shutil.rmtree(op.join(\n bids_path,\n 'derivatives/afq'))", "def _collect_quizzes():\n data_path = join(dirname(abspath(__file__)), 'data')\n for _, _, filenames in os.walk(data_path):\n for filename in filenames:\n if filename.endswith('.yml'):\n quiz_type = filename.replace('.yml', '').capitalize()\n QUIZ_DICT[quiz_type] = []\n with open(join(data_path, filename), encoding='utf-8') as f:\n data = yaml.load(f)\n for class_name, settings in data.items():\n Q = type(class_name, (Quiz, ), settings)\n QUIZ_DICT[quiz_type].append(Q)\n QUIZ_DICT_FLAT[class_name] = Q", "def load(self, path=\"qtable.npy\"):\n self.Q = np.load(path)", "def detect_serialized_datasets(self):\n prepared_data_dir = str(utils.prepared_data_folder / self.dir_str / self.period)\n os.makedirs(prepared_data_dir, exist_ok=True)\n self.prepared_data_dir = prepared_data_dir\n print(f'Looking for pickles in {self.prepared_data_dir}')\n\n if len(utils.find('*serialized.pkl', self.prepared_data_dir)) == 2:\n print('This domain-period combination has been serialized before, loading objects...')\n for pkl in utils.find('*.pkl', self.prepared_data_dir):\n if \"input_ds\" in pkl: self.input_ds_serialized_path = pkl\n elif \"rf_ds\" in pkl: self.rf_ds_serialized_path = pkl\n else: \n print('Proceeding to load & serialize raw data. ')\n self.raw_input_dir = prepare.get_raw_input_data(self)\n self.raw_rf_dir = prepare.get_raw_target_data(self)\n print(f'Raw input datasets taken from @: \\n{self.raw_input_dir}')\n print(f'Raw rainfall datasets taken from @: \\n{self.raw_rf_dir}')\n self.input_ds_serialized_path, self.rf_ds_serialized_path = prepare.prepare_dataset(self, self.prepared_data_dir)\n print(f'Serialized raw input datasets @: \\n{self.input_ds_serialized_path}')\n print(f'Serialized raw RF datasets @: \\n{self.rf_ds_serialized_path}')", "def load_data(self):\n overlength_num = title_num = 0\n with open(self.path, 'r', encoding='utf-8') as r:\n for line in r:\n inst = json.loads(line)\n is_title = inst['sent_id'].endswith('-3') and inst['tokens'][-1] != '.'\n if self.ignore_title and is_title:\n title_num += 1\n continue\n\n # TODO: add back coarse type\n for event in inst['event_mentions']:\n event_type = event['event_type']\n if ':' in event_type:\n event['event_type'] = event_type.split(':')[1].upper()\n self.data.append(inst)\n\n if title_num:\n print('Discarded {} titles'.format(title_num))\n print('Loaded {} instances from {}'.format(len(self), self.path))", "def setUpClass(cls):\n super(Module05Tests, cls).setUpClass()\n cls.datasets = {\n 0: DATASETS_ROOT + 'diffusion_synthetic_normal_L8_r2_slices_41_50_gr15_b1200',\n 1: DATASETS_ROOT + 'filtered',\n 2: DATASETS_ROOT + 'noise'\n }\n cls.data = smns.load_object(file_path=cls.datasets[2])", "def load(self):\n self.results = pickle_load('results', self.main_dir)", "def get_demand_data():\r\n with open(\"{}DEMAND.txt\".format(db_folder_path), \"r\", encoding=\"utf8\") as demand_file:\r\n return eval(demand_file.read(), {'__builtins__':None}, {})", "def _load_static_data(module_path):\n equi7_data = None\n fname = os.path.join(os.path.dirname(module_path), \"data\", \"equi7grid.dat\")\n with open(fname, \"rb\") as f:\n equi7_data = pickle.load(f)\n return equi7_data", "def setUp(self):\n with open('test/0a6a357e.json') as read_file:\n self.tx_json_0a6a357e = json.load(read_file)\n with open('test/bip69-synth.json') as read_file:\n self.bip69_synth = json.load(read_file)", "def _load(self, data):\n raise NotImplementedError(\"Don't know how to load the task\")", "def setup():\n if not os.path.isfile(etymology_file):\n page = re.compile(r'index.php\\?l=\\w+&p=\\d+&allowed_in_frame=0.html')\n pages = list(find_files(directory=site, pattern=page, recursive=False))\n etymology = etymologies(pages)\n dump(etymology, etymology_file)\n for affix, dictionary in affixes(etymology):\n affix_file = os.path.join('resources', '{}.json'.format(affix))\n if not os.path.isfile(affix_file):\n dump(dictionary, affix_file)", "def load_data(self):\n try:\n self.manager.load()\n except error:\n show_error_message(title='Initialization error!',\n message='File lords.sdb was not found!')\n else:\n self.update_widgets_values()", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def loadData(catalog):\n controller.loadData(catalog)", "def test_loads_all_example_data(self):\n for file_name in EXAMPLE_CARTS:\n with open(f\"examples/{file_name}\", \"r\") as f:\n cart = json.load(f)\n\n Item.Schema(many=True).load(cart)", "def main():\n season_2019_preds_for_tipresias_2020 = Q(ml_model__name=\"tipresias_2020\") & Q(\n match__start_date_time__year=2019\n )\n season_2020_preds_for_round_1 = Q(match__start_date_time__year=2020) & (\n Q(match__round_number=1)\n )\n\n prediction_records = Prediction.objects.filter(\n season_2019_preds_for_tipresias_2020 | season_2020_preds_for_round_1\n ).values(*_get_fields_for(Prediction))\n\n prediction_dump = [\n _reshape_record_fields(\"prediction\", record) for record in prediction_records\n ]\n dump_filepath = os.path.join(\n settings.BASE_DIR, APP_NAME, \"fixtures\", f\"{date.today()}-prediction-dump.json\",\n )\n\n with open(dump_filepath, \"w\") as file:\n json.dump(prediction_dump, file, indent=2)", "def load(self):\r\n\r\n pickle_file = 'paderborn.pickle'\r\n\r\n\r\n if os.path.isfile(pickle_file):\r\n with open(pickle_file, 'rb') as handle:\r\n acquisitions = pickle.load(handle)\r\n else:\r\n self.download()\r\n acquisitions = self.acquisitions()\r\n with open(pickle_file, 'wb') as handle:\r\n pickle.dump(acquisitions, handle, protocol=pickle.HIGHEST_PROTOCOL)\r\n\r\n return acquisitions", "def load(self):\n raise NotImplementedError()", "def load(self):\n raise NotImplementedError()", "def _load_disk(self):\r\n pass", "def build_data(self):\n from desiutil.io import combine_dicts\n # Loop on exposures\n odict = {}\n for qanight in self.qa_nights:\n for qaexp in qanight.qa_exps:\n # Get the exposure dict\n idict = write_qa_exposure('foo', qaexp, ret_dict=True)\n odict = combine_dicts(odict, idict)\n # Finish\n self.data = odict", "def readAssembledObjects(self):\n # get the classifier to use, if any, from the Assembler\n ## this is used to cluster the ROM segments\n self._divisionClassifier = self._assembledObjects.get('Classifier', [[None]*4])[0][3]\n self._metricClassifiers = self._assembledObjects.get('Metric', None)", "def __init__(self, data_path=root.joinpath(\"data\")):\n self.data_path = data_path", "def load_data(self, bpod_only=False, download_data=True):\n self.extractor = TaskQCExtractor(\n self.session_path, one=self.one, download_data=download_data, bpod_only=bpod_only)" ]
[ "0.65798295", "0.6398732", "0.6165075", "0.61242807", "0.5998567", "0.58897173", "0.58770794", "0.58206344", "0.57725376", "0.5756605", "0.5740402", "0.5737037", "0.5737037", "0.57252693", "0.5663166", "0.5662534", "0.56499773", "0.56499773", "0.56499773", "0.56499773", "0.5649456", "0.5644375", "0.5639408", "0.5625725", "0.55910057", "0.55572975", "0.55572975", "0.55492", "0.55483365", "0.552295", "0.5522398", "0.5508761", "0.5495338", "0.5470491", "0.5470491", "0.54658437", "0.5455484", "0.5444356", "0.5440722", "0.5439307", "0.5431165", "0.54261595", "0.53895295", "0.53844535", "0.53825283", "0.5355783", "0.53522795", "0.53519374", "0.53513473", "0.53513473", "0.53471184", "0.5345671", "0.53429943", "0.533353", "0.53313464", "0.5323259", "0.53132707", "0.5305962", "0.530023", "0.52999043", "0.5292779", "0.5292564", "0.5276725", "0.52735883", "0.52716", "0.5264531", "0.5261263", "0.52570766", "0.52565706", "0.52520454", "0.52520454", "0.5243316", "0.5238261", "0.5230295", "0.5229867", "0.5222403", "0.52122325", "0.5206892", "0.5205499", "0.5203887", "0.520329", "0.51978904", "0.5196957", "0.5188388", "0.5185127", "0.51843995", "0.51843995", "0.51843995", "0.51843995", "0.51843995", "0.517628", "0.5173274", "0.51722366", "0.5168442", "0.5168442", "0.51624763", "0.5159223", "0.51523274", "0.5148955", "0.5148578" ]
0.6338221
2
Build QA data dict from the nights
def build_data(self): from desiutil.io import combine_dicts # Loop on exposures odict = {} for qanight in self.qa_nights: for qaexp in qanight.qa_exps: # Get the exposure dict idict = write_qa_exposure('foo', qaexp, ret_dict=True) odict = combine_dicts(odict, idict) # Finish self.data = odict
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def load_data(self, inroot=None):\n self.data = {}\n # Load\n for night in self.mexp_dict.keys():\n qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir)\n qaNight.load_data()\n #\n self.data[night] = qaNight.data[night]", "def raw_data() -> Dict:\n return {\"neighbourhood\":\"Buttes-Montmartre\",\"room_type\":\"Entire home/apt\",\"minimum_nights\":1.555,\"mois\":2,\"voyageurs\":2.5,\"chambres\":1,\"lits\":1,\"salle_de_bains\":1}", "def to_dict(self) -> Dict[str, Any]:\n return {\n \"1Q\": {\n \"{}\".format(qs.id): {\n \"f1QRB\": qs.f1QRB,\n \"f1QRB_std_err\": qs.f1QRB_std_err,\n \"f1Q_simultaneous_RB\": qs.f1Q_simultaneous_RB,\n \"f1Q_simultaneous_RB_std_err\": qs.f1Q_simultaneous_RB_std_err,\n \"fRO\": qs.fRO,\n \"T1\": qs.T1,\n \"T2\": qs.T2,\n \"fActiveReset\": qs.fActiveReset,\n }\n for qs in self.qubits_specs\n },\n \"2Q\": {\n \"{}-{}\".format(*es.targets): {\n \"fBellState\": es.fBellState,\n \"fCZ\": es.fCZ,\n \"fCZ_std_err\": es.fCZ_std_err,\n \"fCPHASE\": es.fCPHASE,\n \"fCPHASE_std_err\": es.fCPHASE_std_err,\n \"fXY\": es.fXY,\n \"fXY_std_err\": es.fXY_std_err,\n \"fISWAP\": es.fISWAP,\n \"fISWAP_std_err\": es.fISWAP_std_err,\n }\n for es in self.edges_specs\n },\n }", "def _build_results(self):\n results = {}\n cols = []\n for pol in POLLUTANTS:\n for adj in ADJUSTMENTS:\n cols.append(get_rate_column(pol, adjustment=adj, generated=False))\n cols.append(get_column(pol, adjustment=adj))\n cols.append(\"net_consumed_mwh\")\n for ba in self.regions:\n results[ba] = pd.DataFrame(\n index=self.generation.index, columns=cols, dtype=np.float64\n )\n return results", "def nutrition_data(self):\n data = dict()\n\n # get required data, generally from nutrient fields but some special cases\n data['cost'] = self.best_price\n data['grams'] = settings.STANDARD_WEIGHT # data stored per KG or 100g\n if self.serving:\n data['grams_serve'] = self.serving # optional serving size\n for k in settings.NUTRITION_DATA_ITEMS_BASIC:\n data[k] = getattr(self,k)\n\n return add_nutrition_ratios(data) # generate ratios and values from above", "def _build_contest_kwargs(self, row, sheet):\n\n jurisdiction = str(\n sheet.cell(\n rowx=row,\n colx=self.jurisdiction_index).value).strip()\n\n return {\n 'office': sheet.cell(\n rowx=row,\n colx=self.contest_index).value.strip(),\n 'jurisdiction': jurisdiction\n }", "def _aiida_ndb_qp(self, data ):\n pdata = ArrayData()\n pdata.set_array('Eo', numpy.array(data['Eo']))\n pdata.set_array('E_minus_Eo', numpy.array(data['E-Eo']))\n pdata.set_array('Z', numpy.array(data['Z']))\n pdata.set_array('qp_table', numpy.array(data['qp_table']))\n try:\n pdata.set_array('So', numpy.array(data['So']))\n except KeyError:\n pass\n return pdata", "def test1():\n print( 'testing state data processing...')\n fname = \"HPI_PO_state.txt\"\n data = indexTools.read_state_house_price_data( \"data/\" + fname )\n\n answer = dict()\n answer[\"HPI_PO_state.txt 1993 1\"] = [('UT', 117.69), ('OR', 116.94)]\n answer[\"HPI_PO_state.txt 1993 3\"] = [('UT', 128.49), ('CO', 125.16)]\n answer[\"HPI_PO_state.txt 1993 None\"] = [('UT', 125.77499999999999), ('CO', 122.3775)]\n answer[\"HPI_PO_state.txt 1997 1\"] = [('OR', 162.61), ('MT', 162.09)]\n answer[\"HPI_PO_state.txt 1997 3\"] = [('OR', 166.34), ('CO', 162.8)]\n answer[\"HPI_PO_state.txt 1997 None\"] = [('OR', 164.875), ('MT', 162.20499999999998)]\n answer[\"HPI_PO_state.txt 2010 1\"] = [('MT', 298.92), ('WY', 281.91)]\n answer[\"HPI_PO_state.txt 2010 3\"] = [('MT', 293.55), ('WY', 281.33)]\n answer[\"HPI_PO_state.txt 2010 None\"] = [('MT', 292.9875), ('WY', 281.6325)]\n\n for year in [ 1993, 1997, 2010]:\n for qtr in [ 1, 3, None]:\n\n if qtr != None:\n results = periodRanking.quarter_data( data, year, qtr )\n else:\n results = periodRanking.annual_data( indexTools.annualize( data), year )\n key = fname + \" \" + str(year) + \" \" + str(qtr) \n #print( key )\n #if key in answer:\n print( fname, year, qtr, \":\", ( results[1:3] == answer[ key] ))\n #else:\n # print( fname, year, qtr, \":\", \"incorrect\", results[1:3] )\n return", "def extractData(self) -> Dict[str, str]:\n if self.hasScore():\n result = {}\n result[\"date\"] = self.getMatchDate()\n result[\"score\"] = self.getMatchScore()\n for team in self.TEAMS:\n result[\"team_\" + team] = self.getSquadName(team)\n\n return result\n return None", "def serialize(self):\n return{\n # 'date': self.date,\n 'q1': self.q1,\n 'q2': self.q2,\n 'q3': self.q3,\n 'q4': self.q4,\n 'finalscore': self.finalscore,\n 'id': self.id,\n }", "def process_question(qu):\n\n ## global ranking\n rank_info = {}\n rank_info_k = [\"viewcount\",\"score\",\"favoritecount\"]\n for k in rank_info_k:\n rank_info[k] = int(qu[k])\n qu.pop(k,None)\n\n rank_info[\"creationdate\"] = qu[\"creationdate\"]\n\n if qu[\"acceptedanswer\"]:\n qu[\"acceptedanswer\"] = list(qu[\"acceptedanswer\"])\n else:\n qu[\"acceptedanswer\"] = []\n\n qu.pop('comments',None) # discard comments, maybe add back later\n qu[\"rank_info\"] = rank_info\n\n return qu", "def make_params(query: str = \"bangladesh floods\", site: str = \"www.thedailystar.net\", date_start: str = '1/1/2020',\n date_end: str = '1/1/2021', num_results: int = 100, paper: str = 'theDailyStar') -> (dict,dict):\n query_r = {\n 'query': query,\n 'paper': paper,\n 'date_range': [date_start, date_end]\n }\n params = {\n \"engine\": \"google\",\n \"q\": \"{} site:{}\".format(query, site),\n \"google_domain\": \"google.com\",\n \"gl\": \"bd\",\n \"hl\": \"en\",\n \"tbm\": \"nws\",\n 'filter':'0',\n \"num\": num_results,\n \"tbs\": \"cdr:1,cd_min:{},cd_max:{}\".format(date_start, date_end),\n \"api_key\": os.getenv('SERPAPI_KEY')\n }\n return query_r, params", "def __init_q_values(self, game_state):\n encoded_game_state = self.__encode_state(game_state)\n if encoded_game_state in self.q_values:\n return\n self.q_values[encoded_game_state] = {}\n for free_seat in self.__get_free_seats(game_state):\n self.q_values[encoded_game_state][free_seat] = (self.INITIAL_STATE_VALUE, 0)", "def _setData(self):\n #offset = datetime.timedelta(prefs.getNoOfDaysBeforeQuestionSchedule())\n date_formatter = date.getLocaleFormatter(self.request, \"date\", \"long\")\n def _q_data_item(q):\n item = {}\n item[\"qid\"]= \"q_%s\" % q.question_id\n if q.question_number:\n item[\"subject\"] = u\"Q %s %s\" % (q.question_number, q.short_name)\n else:\n item[\"subject\"] = q.short_name\n item[\"title\"] = q.short_name\n item[\"result_item_class\"] = \"workflow-state-%s\" % q.status\n item[\"url\"] = url.set_url_context(\"questions/obj-%s\" % q.question_id)\n item[\"status\"] = misc.get_wf_state(q)\n item[\"status_date\"] = date_formatter.format(q.status_date)\n item[\"owner\"] = \"%s %s\" %(q.owner.first_name, q.owner.last_name)\n item[\"type\"] = _(q.type)\n item[\"to\"] = q.ministry.short_name\n return item\n self._data = [ _q_data_item(question) for question in self.query.all() ]", "def as_dict(self):\n\n data = {}\n data['text'] = self.question\n data['tier'] = self._get_points(int(self.game_round), int(self.tier))\n try:\n data['source'] = self.source\n except AttributeError:\n data['source'] = False\n print self.question\n print self.answers\n data['answers'] = [\n {'text': answer[False]} if answer.has_key(False) \\\n else {'text': answer[True], 'right': True} \\\n for answer in self.answers\n ]\n if hasattr(self, 'media'):\n def gen_questions():\n q_data = {}\n for f in self.media['question']:\n q_data[self.__type_by_extension(\n os.path.sep.join(os.path.join([self.media_path, f]))\n )] = os.sep.join([self.web_root, f])\n return q_data\n def gen_explanation():\n \"\"\"Sorry, hacky. Quick fix required only 1st element is taken\"\"\"\n f = self.media['explanation'][0]\n k = self.__type_by_extension(os.path.sep.join(\n os.path.join([self.media_path, f])))\n v = [os.sep.join([self.web_root, expl]) \\\n for expl in self.media['explanation']]\n if v:\n v = v[0]\n else:\n v = \"\"\n return {'explanation': {k: v}}\n #): os.sep.join([self.web_root, f])\n\n #[os.sep.join([self.web_root, expl]) \\\n # for expl in self.media['explanation']]}\n def k_not_found():\n raise KeyError(\"Media keyword not found\")\n\n for k in self.media.keys():\n m_data = dict(\n question = gen_questions,\n explanation= gen_explanation,\n k_not_found = \"lambda x: pass\",\n ).get(k, 'k_not_found')()\n for key, value in m_data.items():\n data[key] = value\n return data", "def build_response_dict(self):\n return {\n \"release\": self.settings['bookstore'][\"release\"],\n \"features\": self.settings['bookstore'][\"features\"],\n }", "def fill_testing_dates(self):\r\n \r\n now = datetime.now()\r\n month = now.strftime('%m')\r\n year = now.year \r\n most_recent_date = '{}-{}-01'.format(year, month)\r\n self.testing_dates[1] = {'cv_start': '1972-01-01', \r\n 'cv_end': '1975-12-01', \r\n 'pred_start': '1976-01-01',\r\n 'pred_end': '1981-07-01'}\r\n self.testing_dates[2] = {'cv_start': '1976-01-01', \r\n 'cv_end': '1981-07-01', \r\n 'pred_start': '1981-08-01',\r\n 'pred_end': '1983-07-01'}\r\n self.testing_dates[3] = {'cv_start': '1976-01-01', \r\n 'cv_end': '1983-07-01', \r\n 'pred_start': '1983-08-01',\r\n 'pred_end': '1992-12-01'}\r\n self.testing_dates[4] = {'cv_start': '1983-08-01', \r\n 'cv_end': '1992-12-01', \r\n 'pred_start': '1993-01-01',\r\n 'pred_end': '2003-07-01'}\r\n self.testing_dates[5] = {'cv_start': '1993-01-01', \r\n 'cv_end': '2003-07-01', \r\n 'pred_start': '2003-08-01',\r\n 'pred_end': '2010-09-01'}\r\n self.testing_dates[6] = {'cv_start': '2003-08-01', \r\n 'cv_end': '2010-09-01', \r\n 'pred_start': '2010-10-01',\r\n 'pred_end': '2021-07-01'}\r\n self.testing_dates[7] = {'cv_start': '2010-10-01', \r\n 'cv_end': '2021-07-01', \r\n 'pred_start': '2021-08-01',\r\n 'pred_end': most_recent_date}", "def create_dicts_for_results(dict_all_embeddings, dict_mission, our_initial, n):\r\n keys_ours, keys_state_of_the_art = divide_to_keys(dict_all_embeddings)\r\n keys = list(dict_all_embeddings.keys())\r\n\r\n list_dicts = []\r\n\r\n for key in keys:\r\n if key in keys_ours:\r\n embd_algo = dict_all_embeddings[key][1]\r\n regression = dict_all_embeddings[key][0]\r\n initial = our_initial\r\n else:\r\n embd_algo = key\r\n regression = \"\"\r\n initial = [n]\r\n t = round(dict_all_embeddings[key][2], 3)\r\n dict_results_by_arr = dict_mission[key]\r\n ratio_arr = list(dict_results_by_arr.keys())\r\n for r in ratio_arr:\r\n all_micro = dict_results_by_arr[r][0]\r\n all_macro = dict_results_by_arr[r][1]\r\n all_auc = dict_results_by_arr[r][3]\r\n for i in range(len(initial)):\r\n std_micro, std_macro, std_auc = calculate_std(r, i, dict_mission, keys_ours, keys_state_of_the_art)\r\n if key in keys_ours:\r\n t = round(dict_all_embeddings[key][8][i])\r\n initial_size = initial[i]\r\n test_ratio = r\r\n micro_f1 = float(round(all_micro[i], 3))\r\n macro_f1 = float(round(all_macro[i], 3))\r\n auc = float(round(all_auc[i], 3))\r\n if key in keys_state_of_the_art:\r\n initial_size = \"\"\r\n dict_results = {\"initial size\": initial_size, \"embed algo\": embd_algo, \"regression\": regression,\r\n \"test\": test_ratio, \"micro-f1\": str(micro_f1)+\"+-\"+std_micro,\r\n \"macro-f1\": str(macro_f1)+\"+-\"+std_macro, \"auc\": str(auc)+\"+-\"+std_auc, \"time\": t}\r\n list_dicts.append(dict_results)\r\n return list_dicts", "def initDictionary(bands):\r\n for x in bands:\r\n d[\"{}\".format(x)] = {ProdCost: [], AlbumSales: []}", "def dataExtract(queryResults):\n days = ['MondayCollect',\n 'TuesdayCollect',\n 'WednesdayCollect',\n 'ThursdayCollect',\n 'FridayCollect',\n 'SaturdayCollect',\n 'SundayCollect']\n\n #counting the instances of bin collections\n parkCount = 0\n roadingCount = 0\n otherCount = 0\n\n #output totals of bin collections\n parkOutput = []\n roadingOutput = []\n otherOutput = []\n \n #iterate over each day\n for day in days:\n \n #iterate over the number of bins\n for i in range(len(queryResults)):\n \n #check if the bin was collected on the day...\n if str(queryResults[i]['attributes'][day]).strip().lower() == 'yes':\n \n #unknown formatting issue with the data, these lines fix it\n strResult = str(queryResults[i]['attributes']['Owner'])\n strResultForm = strResult.lower().strip()\n \n #update the counts if True\n if strResultForm == 'roading':\n roadingCount += 1\n elif strResultForm == 'parks':\n parkCount += 1\n elif strResultForm == 'private':\n otherCount += 1\n else:\n otherCount +=1\n\n #print \"Day: {} \\nparkCount: {} \\nroadingCount: {} \\notherCount: {} \\n\\n\".format(day,parkCount,roadingCount,otherCount)\n \n parkOutput.append(parkCount)\n roadingOutput.append(roadingCount)\n otherOutput.append(otherCount)\n \n parkCount = 0\n roadingCount =0\n otherCount =0\n \n return parkOutput,roadingOutput,otherOutput", "def qasmCircuitResults(self):\n returnedDictionary={}\n self.circutDrawing = self.draw()\n self.blochSpheres=self.separatedBlochSpheres()\n returnedDictionary[\"wires\"]=self.num_qubits\n returnedDictionary[\"probabilities\"] = self.separatedProbabilities()\n #returnedDictionary[\"blochSpheres\"] = self.separatedBlochSpheres()\n returnedDictionary[\"diracNotation\"] = self.diracNotation()\n returnedDictionary['chart'] = self.graph()\n returnedDictionary[\"link\"] = \"\"\n #returnedDictionary[\"qasmRows\"] = np.transpose(cols).tolist()\n \n if self.API_TOKEN != \"\":\n returnedDictionary[\"link\"] = self.runOnIBMQ()\n \n return returnedDictionary", "def get_soup_general_data(soup):\n data_dict = {}\n\n name = soup.find(class_='product_title')\n if name:\n data_dict['name_of_game'] = name.h1.text\n\n pub = soup.find('li', class_='summary_detail publisher')\n if pub:\n data_dict['publisher'] = pub.a.text.strip()\n\n rel_date = soup.find('li', class_='summary_detail release_data')\n if rel_date:\n rel_date = rel_date.find('span', class_='data')\n if rel_date:\n data_dict['release_date'] = rel_date.text.strip()\n\n num_p = soup.find(\"li\", class_=\"summary_detail product_players\")\n if num_p:\n data_dict['num_players'] = num_p.find(class_=\"data\").text\n\n genres = soup.find(\"li\", class_='summary_detail product_genre')\n if genres:\n genres = genres.find_all('span', class_='data')\n data_dict['genres'] = [genre.text for genre in genres]\n\n age = soup.find(\"li\", class_=\"summary_detail product_rating\")\n if age:\n data_dict['age_rating'] = age.find('span', class_=\"data\").text\n\n return data_dict", "def extract_games(self) -> Dict[int, Dict[str, Any]]:\n optadocument = self._get_doc()\n attr = assertget(optadocument, '@attributes')\n matchdata = assertget(optadocument, 'MatchData')\n matches = {}\n for match in matchdata:\n matchattr = assertget(match, '@attributes')\n matchinfo = assertget(match, 'MatchInfo')\n matchinfoattr = assertget(matchinfo, '@attributes')\n game_id = int(assertget(matchattr, 'uID')[1:])\n matches[game_id] = dict(\n # Fields required by the base schema\n game_id=game_id,\n competition_id=int(assertget(attr, 'competition_id')),\n season_id=int(assertget(attr, 'season_id')),\n game_day=int(assertget(matchinfoattr, 'MatchDay')),\n game_date=datetime.strptime(assertget(matchinfo, 'Date'), '%Y-%m-%d %H:%M:%S'),\n # home_team_id=see below,\n # away_team_id=see below,\n # Optional fields\n # home_score=see below,\n # away_score=see below,\n # duration=?\n # referee=?\n # venue=?,\n # attendance=?\n # home_manager=?\n # away_manager=?\n )\n teamdata = assertget(match, 'TeamData')\n for team in teamdata:\n teamattr = assertget(team, '@attributes')\n side = assertget(teamattr, 'Side')\n teamid = assertget(teamattr, 'TeamRef')\n score = assertget(teamattr, 'Score')\n if side == 'Home':\n matches[game_id]['home_team_id'] = int(teamid[1:])\n matches[game_id]['home_score'] = int(score)\n else:\n matches[game_id]['away_team_id'] = int(teamid[1:])\n matches[game_id]['away_score'] = int(score)\n return matches", "def get_faulty_scenario_data():\n return [\n {\n \"population_count\": 100,\n \"county\": \"oxford\",\n \"season\": \"cold_month\",\n \"year\": 2017,\n },\n {\n \"population_count\": 150,\n \"county\": \"oxford\",\n \"season\": \"spring_month\",\n \"year\": 2017,\n },\n {\n \"population_count\": 200,\n \"county\": \"oxford\",\n \"season\": \"hot_month\",\n \"year\": 2017,\n },\n {\n \"population_count\": 210,\n \"county\": \"oxford\",\n \"season\": \"fall_month\",\n \"year\": 2017,\n },\n ]", "def get_county() -> Dict:\n model = get_data_model()\n\n chart_ids = {\n \"cases\": \"Eq6Es\",\n \"deaths\": \"bSxdG\",\n \"age\": \"zSHDs\",\n \"gender\": \"FEciW\",\n \"race_eth\": \"aBeEd\",\n \"tests\": \"7sHQq\",\n }\n # The time series data for negative tests is gone, so I've just scraped positive test data using the new chart referenced above.\n\n with MarinDashboardPage() as page:\n model['name'] = \"Marin County\"\n model['update_time'] = datetime.now(tz=timezone.utc).isoformat()\n model[\"meta_from_baypd\"] = \"\"\n model['source_url'] = page.url\n model['meta_from_source'] = get_chart_meta(page, chart_ids.values())\n\n model[\"series\"][\"cases\"] = get_series_data(page, chart_ids[\"cases\"], ['Date', 'Total Cases', 'Total Recovered*'], \"cumul_cases\", 'Total Cases', 'cases')\n model[\"series\"][\"deaths\"] = get_series_data(page, chart_ids[\"deaths\"], ['Event Date', 'Total Hospitalizations', 'Total Deaths'], \"cumul_deaths\", 'Total Deaths', 'deaths', date_column='Event Date')\n\n model[\"series\"][\"tests\"] = get_test_series(page, chart_ids[\"tests\"])\n model[\"case_totals\"][\"age_group\"], model[\"death_totals\"][\"age_group\"] = get_breakdown_age(page, chart_ids[\"age\"])\n model[\"case_totals\"][\"gender\"], model[\"death_totals\"][\"gender\"] = get_breakdown_gender(page, chart_ids[\"gender\"])\n model[\"case_totals\"][\"race_eth\"], model[\"death_totals\"][\"race_eth\"] = get_breakdown_race_eth(page, chart_ids[\"race_eth\"])\n\n return model", "def prepare_data_with_warehouse(self,from_date,to_date,warehouses,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for warehouse in warehouses:\n all_locations = self.get_all_locations(warehouse)\n if not all_locations:\n continue\n \n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,warehouse,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,warehouse,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n \n dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n if any(all_locations) and any(dest_location_lst):\n #fidning warehouse in qty \n warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n #fidning warehouse out qty for specific product.\n warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n if warehouse_out_qty:\n warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n if warehouse_in_qty:\n warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration[0][0] or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(warehouse.id):\n data_lst=data_dict.get(warehouse.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({warehouse.id:data_lst})\n continue\n data_dict.update({warehouse.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict", "def _build_data(self):\n licence_types = [('all', 'All')] + [(lt.pk, lt.display_name) for lt in LicenceType.objects.all()]\n data = {\n 'applications': {\n 'columnDefinitions': [],\n 'filters': {\n 'licenceType': {\n 'values': licence_types,\n },\n 'status': {\n 'values': [],\n }\n },\n 'ajax': {\n 'url': ''\n }\n },\n 'licences': {\n 'columnDefinitions': [],\n 'filters': {\n 'licenceType': {\n 'values': licence_types,\n },\n },\n 'ajax': {\n 'url': ''\n }\n },\n 'returns': {\n 'columnDefinitions': [],\n 'filters': {\n 'licenceType': {\n 'values': licence_types,\n },\n },\n 'ajax': {\n 'url': ''\n }\n }\n }\n return data", "def __init__(self, data, team_criteria):\n self.data = data\n self.athletes = dl.get_athletes(data)\n self.data_engine = dict()\n for squad in dl.get_squads(data):\n self.data_engine[squad[\"id\"]] = {\n \"team_criteria\": deepcopy(team_criteria),\n \"team_members\": list()}", "def get_day_query_info():\n all_rt_heat_metric_list = get_rt_day_query_count()\n day_query_rt_dict = {}\n for each_rt in all_rt_heat_metric_list:\n query_list = []\n dataset_id = each_rt[\"key\"]\n query_count = each_rt[\"doc_count\"]\n for each_appcode in each_rt[\"app_count\"][\"buckets\"]:\n app_code = each_appcode[\"key\"]\n app_query_count = each_appcode[\"doc_count\"]\n for each_day in each_appcode[\"agg_by_day\"][\"buckets\"]:\n timestamp = each_day[\"key\"] / 1000\n time_str = each_day[\"key_as_string\"]\n day_query_count = each_day[\"doc_count\"]\n query_list.append(\n {\n \"dataset_id\": dataset_id,\n \"app_code\": app_code,\n \"timestamp\": timestamp,\n \"time_str\": time_str,\n \"day_query_count\": day_query_count,\n \"app_query_count\": app_query_count,\n }\n )\n day_query_rt_dict[dataset_id] = {\n \"query_list\": query_list,\n \"query_count\": query_count,\n }\n # 有查询量的rt\n day_query_rt_list = list(day_query_rt_dict.keys())\n return day_query_rt_dict, day_query_rt_list", "def prepare_data_with_location(self,from_date,to_date,locations,all_products):\n data_dict = {}\n stock_quant_obj=self.env['stock.quant']\n for loc in locations:\n all_locations = self.get_all_locations(warehouse=False, location=loc)\n if not all_locations:\n continue\n #here we are finding the opening stock for these we are using base query\n #of inventory at date v10\n result = self.get_product_qty(all_locations,from_date)\n qty_dict = dict((x,y) for x, y in result)\n \n for product in all_products:\n last_sales = ''\n qty_purchase_in_duration = 0\n qty_sales_in_duration = 0\n last_purchase_date = ''\n scrap_location_qty = 0\n adjusted_qty_in_duration = 0\n warehouse_out_qty = 0\n warehouse_in_qty = 0\n# here from result of inventory at date we are seaching for specific product.\n opening_product_qty = qty_dict.get(product.id)\n\n #finding last sales qty\n last_sales = self.find_last_sales_qty(from_date,to_date,False,all_locations,product)\n #finding last purchase date of product\n last_purchase_date = self.find_last_purchase_date(from_date,to_date,all_locations,product)\n #fiding date purchase qty in duration for specific product\n qty_purchase_in_duration = self.find_purchase_qty_in_duration(from_date,to_date,all_locations,product)\n #fiding scrap qty of precific product\n scrap_location_qty = self.find_scap_location_qty(from_date,to_date,product,all_locations)\n #finding sales qty in duration\n qty_sales_in_duration = self.find_sale_qty_in_duration(from_date,to_date,False,all_locations,product)\n #fidning adjusted qty in duration\n adjusted_qty_in_duration = self.find_adjusted_qty_in_duration(from_date, to_date, product, all_locations)\n\n # dest_location_lst = self.get_other_wahouse_locations(warehouse)\n \n # if any(all_locations) and any(dest_location_lst):\n # #fidning warehouse in qty \n # warehouse_in_qty = self.find_warehouse_transer_in_qty(product, all_locations, dest_location_lst,from_date,to_date)\n # #fidning warehouse out qty for specific product.\n # warehouse_out_qty = self.find_warehouse_transer_out_qty(product, all_locations, dest_location_lst,from_date,to_date)\n \n # if warehouse_out_qty:\n # warehouse_out_qty = warehouse_out_qty and warehouse_out_qty[0][0] or ''\n # if warehouse_in_qty:\n # warehouse_in_qty = warehouse_in_qty and warehouse_in_qty[0][0] or ''\n \n if adjusted_qty_in_duration:\n adjusted_qty_in_duration = adjusted_qty_in_duration and adjusted_qty_in_duration[0][0] or '' \n if scrap_location_qty:\n scrap_location_qty = scrap_location_qty and scrap_location_qty[0][0] or ''\n \n # if qty_sales_in_duration:\n # qty_sales_in_duration = qty_sales_in_duration and qty_sales_in_duration[0][0] or ''\n # if qty_purchase_in_duration:\n # qty_purchase_in_duration = qty_purchase_in_duration or ''\n if last_sales:\n last_sales = datetime.strptime(last_sales and last_sales[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if last_purchase_date:\n last_purchase_date = datetime.strptime(last_purchase_date and last_purchase_date[0][0], '%Y-%m-%d %H:%M:%S').strftime('%d-%m-%Y') or ''\n \n if data_dict.has_key(loc.id):\n data_lst=data_dict.get(loc.id)\n data_lst.append({'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,'last_sales':last_sales or '',\n 'last_purchase_date':last_purchase_date or '','qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0\n ,'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0 \n })\n data_dict.update({loc.id:data_lst})\n continue\n data_dict.update({loc.id:[{'product':product,'sku':product.default_code or '','name':product.name,\n 'Cost':product.standard_price or '','sales_price':product.lst_price or '',\n 'opening_qty':opening_product_qty or 0,\n 'last_sales':last_sales or '','last_purchase_date':last_purchase_date or '',\n 'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,\n 'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,\n 'warehouse_out_qty':warehouse_out_qty or 0\n }]})\n return data_dict", "def make_dict(\n nn,\n q_id,\n polarity,\n context_cond,\n cat,\n subcat,\n answer_info,\n bias_targets,\n version,\n notes,\n context,\n question,\n ans_list,\n ans_place,\n):\n this_dict = {\n \"example_id\": nn,\n \"question_index\": q_id,\n \"question_polarity\": polarity,\n \"context_condition\": context_cond,\n \"category\": cat,\n \"answer_info\": answer_info,\n \"additional_metadata\": {\n \"subcategory\": subcat,\n \"stereotyped_groups\": bias_targets,\n \"version\": version,\n \"source\": notes,\n },\n \"context\": context.strip(),\n \"question\": question.strip(),\n \"ans0\": ans_list[0],\n \"ans1\": ans_list[1],\n \"ans2\": ans_list[2],\n \"label\": ans_place,\n }\n return this_dict", "def results_from_db(self, gp_conn=None):\n d = dict.fromkeys(self.active_arms)\n for i in d:\n d[i] = {'success': None, 'trials': None}\n d[i]['success'] = random.randint(0, 100)\n d[i]['trials'] = random.randint(100, 200)\n return d", "def prepare_data(self):\n\n # Get days abbrs and month names lists\n self.month_names = cal_data.get_month_names()\n self.month_names_eng = cal_data.get_month_names_eng()\n self.days_abrs = cal_data.get_days_abbrs()\n\n # Today date\n self.active_date = cal_data.today_date_list()\n # Set title\n self.title = \"%s - %s\" % (self.month_names[self.active_date[1] - 1],\n self.active_date[2])\n\n # Quarter where current month in the self.quarter[1]\n self.get_quarter()", "def extract_data():\n raw_data = pd.read_csv(\"../../../resource/DataVisualization/vaccinations.csv\")\n raw_data = raw_data[[\"location\", \"date\", \"people_fully_vaccinated_per_hundred\"]]\n raw_data.date = pd.to_datetime(raw_data.date, format=\"%Y-%m-%d\")\n min_date = raw_data.date.min()\n raw_data.date = raw_data.date-min_date\n raw_data.date = pd.Series([x.days for x in raw_data.date])\n raw_data.drop(raw_data.loc[raw_data.people_fully_vaccinated_per_hundred.isnull()].index,\n axis=0, inplace=True)\n raw_data[\"people_fully_vaccinated_per_hundred\"] /= 100\n\n data_dict = dict()\n for country in raw_data.location.unique():\n if len(raw_data.loc[raw_data.location == country]) >= 100:\n tmp_data = raw_data.loc[raw_data.location == country]\n tmp_data.drop(\"location\", axis=1, inplace=True)\n data_dict[country] = {\"data\":tmp_data}\n else:\n raw_data.drop(raw_data.loc[raw_data.location ==\n country].index, inplace=True)\n return data_dict, min_date, raw_data", "def init_stock():\n return {\"five\":0, \"one\": 0, \"quarter\": 25, \"dime\": 25, \"nickel\":25}", "def get_meta_information() -> Dict:\n return {'name': 'NAS-Bench-201',\n 'references': ['Xuanyi Dong, Yi Yang',\n 'NAS-Bench-201: Extending the Scope of Reproducible Neural Architecture Search',\n 'https://openreview.net/forum?id=HJxyZkBKDr',\n 'https://github.com/D-X-Y/AutoDL-Projects'],\n }", "def generate_dict(values):\n if values[\n 0\n ]: # Checks if the checkbox is true or false, so if the measurement should be condcuted or not\n return {\n \"measure_every\": values[1],\n \"start_strip\": values[2],\n \"end_strip\": values[3],\n }\n else:\n return {}", "def initQ(self,states,actions):\r\n \r\n Q = {}\r\n \r\n for a in actions:\r\n for s in states:\r\n Q[(s,a)] = random.randrange(100)\r\n \r\n return(Q)", "def _make_meta(self):\n available_meas_times = list()\n available_intervals = list()\n drill_by = list()\n related = list()\n last_data_set_instance = dict()\n\n if self._data['report_save_historical_instances_ind'] == 'Y':\n # last measurement instance\n res = self._db.Query(\"\"\"SELECT *\n FROM report_data_set_instance\n WHERE\n `element_id`=%s\n AND `segment_value_id` = %s\n ORDER BY measurement_time DESC\n LIMIT 0, 1\"\"\",(self._id, self._segment_value_id))\n if res:\n last_data_set_instance = self._db.record[0]\n last_data_set_instance['measurement_time'] = self._formatter.format_date(last_data_set_instance['measurement_time'])\n\n # available measurement instances\n res = self._db.Query(\"\"\"SELECT *\n FROM report_data_set_instance\n WHERE\n `element_id`=%s\n AND `segment_value_id` = %s\n ORDER BY measurement_time DESC\"\"\",(self._id, self._segment_value_id))\n if res:\n for data_set_instance in self._db.record:\n data_set_instance['measurement_time'] = self._formatter.format_date(data_set_instance['measurement_time'])\n available_meas_times.append(data_set_instance)\n \n\n # get drill by. not for this version\n\n # available measurement intervals\n if self._data['report_primary_shared_dimension_id'] is None:\n self._data['report_primary_shared_dimension_id'] = 0\n\n self._db.Query(\"\"\"\n SELECT measurement_interval.*,\n dashboard_element.element_id\n FROM dashboard_element\n LEFT JOIN measurement_interval\n ON measurement_interval.measurement_interval_id = dashboard_element.measurement_interval_id\n WHERE\n (dashboard_element.`element_id`<>%s\n AND dashboard_element.measurement_interval_id <> %s\n AND dashboard_element.shared_measure_id = %s\n AND dashboard_element.`type` = 'internal report'\n AND ifnull(dashboard_element.report_used_for_drill_to_ind,'N') = %s\n AND ifnull(dashboard_element.report_primary_shared_dimension_id,0) = %s\n AND ifnull(dashboard_element.segment_id,0) = %s)\n OR\n dashboard_element.`element_id`=%s\n AND 3=4\n \n GROUP BY measurement_interval.measurement_interval_id\n ORDER BY\n measurement_interval.display_sequence,\n dashboard_element.name ASC\n \"\"\",\n (self._id,\n self._data['measurement_interval_id'],\n self._data['shared_measure_id'],\n self._data['report_used_for_drill_to_ind'],\n self._data['report_primary_shared_dimension_id'],\n self._data['segment_id'],\n self._id))\n\n\n for interval in self._db.record:\n interval['report_data_set_instance_id'] = 0\n available_intervals.append(interval)\n\n # see related\n self._db.Query(\"\"\"SELECT e.*\n FROM dashboard_element_topic det, dashboard_element e\n WHERE e.element_id = det.dashboard_element_id\n AND dashboard_element_id <> %s\n AND e.enabled_ind = 'Y'\n AND topic_id IN (select topic_id from dashboard_element_topic where dashboard_element_id = %s)\n UNION SELECT e.*\n FROM dashboard_element e, metric_drill_to_report m\n WHERE m.metric_element_id = e.element_id\n AND m.report_element_id = %s\n AND e.enabled_ind = 'Y'\n AND ifnull(e.segment_id,0) = %s\n \"\"\", (self._id, self._id, self._id, self._data['segment_id']))\n \n\n for related_element in self._db.record:\n if not related_element['segment_id']:\n related_element['segment_id'] = 0\n if related_element['segment_id'] == self._data['segment_id']:\n related_element['segment_value_id'] = self._segment_value_id\n else:\n related_element['segment_value_id'] = 0\n related.append(related_element)\n\n # elements displayed on the page\n before_dataset = list()\n after_dataset = list()\n \n charts_before_dataset = list()\n charts_after_dataset = list()\n \n \n # dataset table\n dataset_el = OrderedDict()\n dataset_el['element_id'] = ''\n dataset_el['element_type'] = 'dataset'\n dataset_el['element_name'] = ''\n dataset_el['element_desc'] = ''\n dataset_el['placement'] = ''\n dataset_el['sequence'] = 0\n dataset_el['show_ind'] = self._data['show_data_set_table_in_report_ind']\n \n \n # charts\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_chart \n WHERE \n `element_id`= %s\n AND \n (ISNULL(report_data_set_pivot_id)\n OR report_data_set_pivot_id = 0) \n ORDER BY display_sequence ASC\"\"\", (self._id, ))\n for chart in self._db.record:\n chart_el = OrderedDict()\n chart_el['element_id'] = chart['report_data_set_chart_id']\n chart_el['element_type'] = 'chart'\n chart_el['pivot_id'] = 0\n if chart['report_data_set_pivot_id']:\n chart_el['pivot_id'] = chart['report_data_set_pivot_id']\n chart_el['element_name'] = chart['name']\n chart_el['element_desc'] = chart['description']\n chart_el['placement'] = chart['chart_placement']\n chart_el['sequence'] = chart['display_sequence']\n chart_el['show_ind'] = chart['enabled_ind']\n if chart_el['placement'] == 'before table': \n charts_before_dataset.append(chart_el)\n else:\n charts_after_dataset.append(chart_el)\n \n # pivots\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_pivot\n WHERE\n `element_id`= %s\n ORDER BY display_sequence ASC\"\"\", (self._id, ))\n for pivot in self._db.record:\n before_pivot = list()\n after_pivot = list()\n #pivot_element = list()\n \n pivot_el = OrderedDict()\n pivot_el['element_id'] = pivot['report_data_set_pivot_id']\n pivot_el['element_type'] = 'pivot'\n pivot_el['element_name'] = pivot['name']\n pivot_el['element_desc'] = ''\n pivot_el['placement'] = pivot['pivot_table_report_placement']\n pivot_el['sequence'] = pivot['display_sequence']\n pivot_el['show_ind'] = pivot['enabled_ind']\n \n # charts\n self._db.Query(\"\"\"SELECT *\n FROM report_data_set_chart \n WHERE \n `element_id`= %s\n AND report_data_set_pivot_id = %s \n ORDER BY display_sequence ASC\"\"\",\n (self._id, pivot_el['element_id']))\n for chart in self._db.record:\n chart_el = OrderedDict()\n chart_el['element_id'] = chart['report_data_set_chart_id']\n chart_el['element_type'] = 'chart'\n chart_el['pivot_id'] = 0\n if chart['report_data_set_pivot_id']:\n chart_el['pivot_id'] = chart['report_data_set_pivot_id']\n chart_el['element_name'] = chart['name']\n chart_el['element_desc'] = chart['description']\n chart_el['placement'] = chart['chart_placement']\n chart_el['sequence'] = chart['display_sequence']\n chart_el['show_ind'] = chart['enabled_ind']\n if chart_el['placement'] == 'before table': \n before_pivot.append(chart_el)\n else:\n after_pivot.append(chart_el)\n pivot_element = before_pivot + [pivot_el] + after_pivot \n \n if pivot_el['placement'] == 'before data set':\n before_dataset += pivot_element\n else:\n after_dataset += pivot_element\n elements = charts_before_dataset + before_dataset + [dataset_el] + after_dataset + charts_after_dataset\n \n \n self._jfile.make_current_meta(last_data_set_instance,\n available_meas_times,\n available_intervals,\n drill_by,\n related,\n elements,\n self._segment_values)", "def build_teams(self):\n # get all nations\n all_nations = self.games.get_all_nations()\n\n # build teams for all participating nations in FIFA World Cup 2018\n bt = BuildTeams(self.squad_size, self.selected_attrs)\n bt.read_data()\n\n # a dict with a nations' name as a key and players' data as value\n self.teams = {}\n\n # build squad for every nation\n for nation in all_nations:\n team = bt.build_team(nation)\n # if we got enough players, add team\n if team.shape[0] >= bt.squad_size:\n #print(team)\n # convert pandas dataframe to matrix and flatten it\n self.teams[nation] = team.as_matrix().flatten()", "def create_dataframe(euctr_cond):\n def f(x):\n d = {}\n d['number_of_countries'] = x.eudract_number_with_country.nunique()\n d['min_end_date'] = x.date_of_the_global_end_of_the_trial.min()\n d['max_end_date'] = x.date_of_the_global_end_of_the_trial.max()\n d['comp_date'] = np.where(pd.notnull(x.date_of_the_global_end_of_the_trial),1,0).sum()\n d['has_results'] = x.trial_results.sum()\n d['includes_pip'] = x.trial_is_part_of_a_paediatric_investigation_plan.sum()\n d['single_blind'] = x.trial_single_blind.sum()\n d['not_single_blind'] = x.not_single_blind.sum()\n d['rare_disease'] = x.trial_condition_being_studied_is_a_rare_disease.sum()\n d['not_rare_disease'] = x.not_rare_disease.sum()\n d['rare_disease_blank'] = x.rare_disease_blank.sum()\n d['completed'] = np.where(x.end_of_trial_status == 'Completed', 1, 0).sum()\n d['ongoing'] = np.where((x.end_of_trial_status == 'Ongoing') | (x.end_of_trial_status == 'Restarted'), 1, 0).sum()\n d['terminated'] = np.where(x.end_of_trial_status == 'Prematurely Ended', 1, 0).sum()\n d['suspended'] = np.where((x.end_of_trial_status == 'Temporarily Halted') | (x.end_of_trial_status == 'Suspended by CA'), 1, 0).sum()\n d['other_status'] = np.where((x.end_of_trial_status == 'Not Authorised') | (x.end_of_trial_status == 'Prohibited by CA'), 1, 0).sum()\n d['no_status'] = np.where(pd.isnull(x.end_of_trial_status),1,0).sum()\n d['phase_1'] = x.trial_human_pharmacology_phase_i.sum()\n d['phase_2'] = x.trial_therapeutic_exploratory_phase_ii.sum()\n d['phase_3'] = x.trial_therapeutic_confirmatory_phase_iii.sum()\n d['phase_4'] = x.trial_therapeutic_use_phase_iv.sum()\n d['bioequivalence'] = x.trial_bioequivalence_study.sum()\n d['not_bioequivalence'] = x.not_bioequivalence_study.sum()\n d['healthy_volunteers'] = x.subject_healthy_volunteers.sum()\n d['not_healthy_volunteers'] = x.not_healthy_volunteers.sum()\n d['full_title'] = x.full_title.astype('str').min()\n d['abbreviated_title'] = x.abbreviated_title.astype('str').max()\n d['non_eu'] = x.non_eu.sum()\n return pd.Series(d)\n\n return euctr_cond.groupby('eudract_number').apply(f).reset_index()", "def make_output_df(self):\n df = pd.concat([pd.DataFrame(dat) for dat in [self.qdata, self.pdata]], axis=1)\n columns = np.hstack(([['{}{}'.format(x, c) for c in self.actions] for x in ['q', 'p']]))\n df.columns = columns\n df.insert(0, 'trial', np.arange(1, df.shape[0]+1))\n df['choice'] = self.choices\n df['feedback'] = self.feedback\n# r = np.array(self.bandits.rvalues)\n# p = np.array(self.bandits.preward)\n df['optimal'] = self.demand\n df.insert(0, 'agent', 1)\n self.data = df.copy()", "def buildIndustryData(self, dIndustryData):\n d = {}\n for id, dictIndustry in dIndustryData.iteritems():\n industryType = dictIndustry['industrytype']\n name = self.frame.mode.game.industrydata[industryType]['name']\n cities = self.frame.mode.game.industrydata[industryType]['cities']\n d[id] = '%s - %s' % (name, cities)\n return d", "def get_subs_dict(self, qnodes=None):\n #d = self.qparams.copy()\n d = self.qparams\n d.update(self.optimize_params(qnodes=qnodes))\n # clean null values\n subs_dict = {k: v for k, v in d.items() if v is not None}\n #print(\"subs_dict:\", subs_dict)\n return subs_dict", "def dynamic(question_quantized_dfs):\n all_transitions = {}\n\n for i, qdf in enumerate(question_quantized_dfs):\n q = qdf.iloc[:, utils.SKIP_COLUMNS:] # don't work on audio_rms column\n\n all_transitions[i] = OrderedDict()\n\n for au in q:\n all_transitions[i][au] = np.zeros((4, 4))\n\n for (x, y), c in Counter(zip(q[au], q[au][1:])).items():\n all_transitions[i][au][x - 1, y - 1] += c # TODO some warning about non-integer index\n\n # Concatenate all AU triples right, then all question down\n temp1 = []\n for k, q_transitions in all_transitions.items():\n temp2 = []\n\n for au, trans in q_transitions.items():\n trans_features = list(transition_features(trans))\n trans_index = [au + '_change_ratio', au + '_slow_change_ratio', au + '_fast_change_ratio']\n\n temp2.append(pd.DataFrame(trans_features, index=trans_index).T)\n temp1.append(pd.concat(temp2, axis=1))\n\n question_dynamic_features = pd.concat(temp1, axis=0)\n\n # Fix index\n question_dynamic_features.index = range(len(question_quantized_dfs))\n\n return question_dynamic_features", "def generate_data(cls):\n cls.results = {}\n cls.pre_process()\n today = datetime.today()\n skip = None\n values = map(lambda a: a[0], cls.data)\n if len(values) == 0 and 'citation' not in cls.config_data_name:\n skip = True\n weights= map(lambda a: a[1], cls.data)\n if cls.config_data_name == 'reads_histogram':\n bins = range(1996, today.year+2)\n elif cls.min_year:\n bins = range(cls.min_year, today.year+2)\n else:\n try:\n bins = range(min(values),max(values)+2)\n except:\n skip = True\n if not skip:\n refereed_values = map(lambda a: a[0], cls.refereed_data)\n refereed_weights= map(lambda a: a[1], cls.refereed_data)\n # get the regular histogram\n cls.value_histogram = histogram(values,bins=bins)\n cls.refereed_value_histogram = histogram(refereed_values,bins=bins)\n # get the normalized histogram\n cls.normalized_value_histogram = histogram(values,bins=bins,weights=weights)\n cls.refereed_normalized_value_histogram = histogram(refereed_values,bins=bins,weights=refereed_weights)\n else:\n cls.value_histogram = False\n cls.results[str(today.year)] = \"0:0:0:0\"\n cls.post_process()", "def clean_cases(data):\n newdata=[]\n #Add up Bucks Data\n bucks=defaultdict(list)\n for i in data:\n if i['areaName'] in ['Chiltern','Aylesbury Vale','South Bucks','Wycombe']:\n bucks[i['date']].append(i)\n else:\n newdata.append(i)\n log.debug(bucks)\n for _date,_all in bucks.items():\n item={'areaName': 'Buckinghamshire','areaCode':'E06000060','specimenDate':_date}\n item['newCasesBySpecimenDate']=sum([x['newCasesBySpecimenDate'] for x in _all])\n item['cumCasesBySpecimenDate']=sum([x['cumCasesBySpecimenDate'] for x in _all])\n newdata.append(item)\n\n return newdata", "def test_create_results_dict_1(self):\n dict = find_domains.create_results_dict(self.rps_results)\n with self.subTest():\n self.assertEqual(len(dict.keys()), 4)\n with self.subTest():\n self.assertEqual(len(dict[\"ABCDE\"]), 2)\n with self.subTest():\n self.assertEqual(len(dict[\"FGHIJ\"]), 2)", "def data_for_question(self, question_type):\n\t\treturn {}", "def action():\n compyear = datetime.today().year - 1\n onedays = collect_onedays(compyear)\n precs = {}\n qcount = 0\n for quiz in onedays:\n plist = ll_oneday_players(quiz)\n if plist:\n qcount += 1\n for llama in plist:\n if llama in precs:\n precs[llama] += 1\n else:\n precs[llama] = 1\n tuplelist = []\n for llama in precs:\n tuplelist.append([llama, precs[llama]])\n tuplelist = sorted(tuplelist, key=itemgetter(1), reverse=True)\n print(tuplelist)\n print(qcount)", "def _get_new_data(self, page_url, soup):\n data = {}\n data['url'] = page_url\n title = soup.find('dd', class_='lemmaWgt-lemmaTitle-title').find('h1')\n data['title'] = title.get_text()\n summary = soup.find('div', class_='lemma-summary')\n data['summary'] = summary.get_text()\n return data", "def extract_data(self):\n values = {}\n for injkey in self.data_sets.keys():\n values[injkey] = {}\n alldata = self.data_sets[injkey]\n paramkeys = alldata['params'].keys()\n for datakey in alldata.keys():\n if not datakey == 'params':\n values[injkey][datakey] = {}\n values[injkey][datakey]['metric_val'] = {}\n values[injkey][datakey]['metric_val']['vals'] = []\n for paramkey in paramkeys:\n values[injkey][datakey][paramkey] = {}\n values[injkey][datakey][paramkey]['vals'] = []\n trials = alldata[datakey]\n for trial_num in trials.keys():\n trial = trials[trial_num]\n values[injkey][datakey]['metric_val']['vals'] \\\n .append(trial['metric_val'])\n values[injkey][datakey]['metric_val']['type'] \\\n = trial['metric']\n values[injkey][datakey]['metric_val']['units'] \\\n = 'dimensionless'\n param_vals = trial['params']\n for param_name in param_vals.keys():\n val, units = self.parse_pint_string(\n pint_string=param_vals[param_name]\n )\n values[injkey][datakey][param_name]['vals'] \\\n .append(float(val))\n values[injkey][datakey][param_name]['units'] \\\n = units\n self.values = values", "def create_competition_stat_object():\n\n pga_northerntrust_2015 = {\"name\": \"pga_northerntrust_2015\", \"date\": [\"2015-02-16\", \"2015-02-22\"]}\n pga_ohlclassic_2015 = {\"name\": \"pga_ohlclassic_2015\", \"date\": [\"2014-11-10\", \"2014-11-16\"]}\n pga_pebblebeach_2015 = {\"name\": \"pga_pebblebeach_2015\", \"date\": [\"2015-02-09\", \"2015-02-15\"]}\n pga_arnoldpalmer_2015 = {\"name\": \"pga_arnoldpalmer_2015\", \"date\": [\"2015-03-16\", \"2015-03-22\"]}\n pga_pgachampionship_2015 = {\"name\": \"pga_pgachampionship_2015\", \"date\": [\"2015-08-10\", \"2015-08-16\"]}\n pga_barclays_2015 = {\"name\": \"pga_barclays_2015\", \"date\": [\"2015-08-24\", \"2015-08-30\"]}\n pga_playerschampions_2015 = {\"name\": \"pga_playerschampions_2015\", \"date\": [\"2015-05-04\", \"2015-05-10\"]}\n pga_bmw_2015 = {\"name\": \"pga_bmw_2015\", \"date\": [\"2015-09-14\", \"2015-09-20\"]}\n pga_puertoricoopen_2015 = {\"name\": \"pga_puertoricoopen_2015\", \"date\": [\"2015-03-02\", \"2015-03-08\"]}\n pga_bridgestone_2015 = {\"name\": \"pga_bridgestone_2015\", \"date\": [\"2015-08-03\", \"2015-08-09\"]}\n pga_quickenloans_2015 = {\"name\": \"pga_quickenloans_2015\", \"date\": [\"2015-07-27\", \"2015-08-02\"]}\n pga_cadillacchampionship_2015 = {\"name\": \"pga_cadillacchampionship_2015\", \"date\": [\"2015-03-02\", \"2015-03-08\"]}\n pga_rbccanadianopen_2015 = {\"name\": \"pga_rbccanadianopen_2015\", \"date\": [\"2015-07-20\", \"2015-07-26\"]}\n pga_cimbclassic_2015 = {\"name\": \"pga_cimbclassic_2015\", \"date\": [\"2014-10-20\", \"2014-10-26\"]}\n pga_rbcheritage_2015 = {\"name\": \"pga_rbcheritage_2015\", \"date\": [\"2015-04-13\", \"2015-04-19\"]}\n pga_farmersinsurance_2015 = {\"name\": \"pga_farmersinsurance_2015\", \"date\": [\"2015-02-02\", \"2015-02-08\"]}\n pga_cocacola_2015 = {\"name\": \"pga_cocacola_2015\", \"date\": [\"2015-09-21\", \"2015-09-27\"]}\n pga_sanderson_2015 = {\"name\": \"pga_sanderson_2015\", \"date\": [\"2014-11-03\", \"2014-11-09\"]}\n pga_deutchebank_2015 = {\"name\": \"pga_deutchebank_2015\", \"date\": [\"2015-08-31\", \"2015-09-07\"]}\n pga_shriners_2015 = {\"name\": \"pga_shriners_2015\", \"date\": [\"2014-10-13\", \"2014-10-19\"]}\n pga_frys_2015 = {\"name\": \"pga_frys_2015\", \"date\": [\"2014-10-06\", \"2014-10-12\"]}\n pga_sonyopen_2015 = {\"name\": \"pga_sonyopen_2015\", \"date\": [\"2015-01-12\", \"2015-01-18\"]}\n pga_greenbrier_2015 = {\"name\": \"pga_greenbrier_2015\", \"date\": [\"2015-06-29\", \"2015-07-05\"]}\n pga_thememorial_2015 = {\"name\": \"pga_thememorial_2015\", \"date\": [\"2015-06-01\", \"2015-06-07\"]}\n pga_hondaclassic_2015 = {\"name\": \"pga_hondaclassic_2015\", \"date\": [\"2015-02-23\", \"2015-03-01\"]}\n pga_theopen_2015 = {\"name\": \"pga_theopen_2015\", \"date\": [\"2015-07-13\", \"2015-07-19\"]}\n pga_houstonopen_2015 = {\"name\": \"pga_houstonopen_2015\", \"date\": [\"2015-03-30\", \"2015-04-05\"]}\n pga_usopen_2015 = {\"name\": \"pga_usopen_2015\", \"date\": [\"2015-06-15\", \"2015-06-21\"]}\n pga_hsbcchampions_2015 = {\"name\": \"pga_hsbcchampions_2015\", \"date\": [\"2014-11-03\", \"2014-11-09\"]}\n pga_valspar_2015 = {\"name\": \"pga_valspar_2015\", \"date\": [\"2015-03-09\", \"2015-03-15\"]}\n pga_humana_2015 = {\"name\": \"pga_humana_2015\", \"date\": [\"2015-01-19\", \"2015-01-25\"]}\n pga_wastemanagement_2015 = {\"name\": \"pga_wastemanagement_2015\", \"date\": [\"2015-01-26\", \"2015-02-01\"]}\n pga_hyundai_2015 = {\"name\": \"pga_hyundai_2015\", \"date\": [\"2015-01-05\", \"2015-01-12\"]}\n pga_wellsfargo_2015 = {\"name\": \"pga_wellsfargo_2015\", \"date\": [\"2015-05-11\", \"2015-05-17\"]}\n pga_johndeere_2015 = {\"name\": \"pga_johndeere_2015\", \"date\": [\"2015-07-06\", \"2015-07-12\"]}\n pga_wyndham_2015 = {\"name\": \"pga_wyndham_2015\", \"date\": [\"2015-08-17\", \"2015-08-23\"]}\n pga_masters_2015 = {\"name\": \"pga_masters_2015\", \"date\": [\"2015-04-16\", \"2015-04-12\"]}\n pga_zurich_2015 = {\"name\": \"pga_zurich_2015\", \"date\": [\"2015-04-20\", \"2015-04-26\"]}\n pga_valero_2015 = {\"name\": \"pga_valero_2015\", \"date\": [\"2015-03-23\", \"2015-03-29\"]}\n pga_crowneplaza_2015 = {\"name\": \"pga_crowneplaza_2015\", \"date\": [\"2015-05-18\", \"2015-05-24\"]}\n pga_attbyronnelson_2015 = {\"name\": \"pga_attbyronnelson_2015\", \"date\": [\"2015-05-25\", \"2015-05-31\"]}\n pga_fedexstjude_2015 = {\"name\": \"pga_fedexstjude_2015\", \"date\": [\"2015-06-08\", \"2015-06-14\"]}\n\n dates_1415 = [pga_northerntrust_2015 ,pga_ohlclassic_2015 ,pga_pebblebeach_2015 ,pga_arnoldpalmer_2015 ,\n pga_pgachampionship_2015 ,pga_barclays_2015 ,pga_playerschampions_2015 ,pga_bmw_2015 ,\n pga_puertoricoopen_2015 ,pga_bridgestone_2015 ,pga_quickenloans_2015 ,pga_cadillacchampionship_2015 ,\n pga_rbccanadianopen_2015 ,pga_cimbclassic_2015 ,pga_rbcheritage_2015 ,pga_farmersinsurance_2015 ,\n pga_cocacola_2015 ,pga_sanderson_2015 ,pga_deutchebank_2015 ,pga_shriners_2015 ,pga_frys_2015 ,\n pga_sonyopen_2015 ,pga_greenbrier_2015 ,pga_thememorial_2015 ,pga_hondaclassic_2015 ,pga_theopen_2015 ,\n pga_houstonopen_2015 ,pga_usopen_2015 ,pga_hsbcchampions_2015 ,pga_valspar_2015 ,pga_humana_2015 ,\n pga_wastemanagement_2015 ,pga_hyundai_2015 ,pga_wellsfargo_2015 ,pga_johndeere_2015 ,pga_wyndham_2015 ,\n pga_masters_2015 ,pga_zurich_2015 ,pga_valero_2015 ,pga_crowneplaza_2015 ,pga_attbyronnelson_2015 ,\n pga_fedexstjude_2015 ]\n\n return dates_1415", "def test_8_data_fetching_multiple(self):\n d = {'WorkoutType': 'Running',\\\n 'Minutes': 10.0,\\\n 'CaloriesBurned': 100.9}\n _ = self.fitness.insert_in_database(d, date_time=datetime.utcnow()+timedelta(days=1)+timedelta(minutes=1))\n d = {'WorkoutType': 'Jogging',\\\n 'Minutes': 10.0,\\\n 'CaloriesBurned': 100.9}\n _ = self.fitness.insert_in_database(d, date_time=datetime.utcnow()+timedelta(days=1)+timedelta(minutes=2))\n d = {'WorkoutType': 'Dancing',\\\n 'Minutes': 10.0,\\\n 'CaloriesBurned': 100.9}\n _ = self.fitness.insert_in_database(d, date_time=datetime.utcnow()+timedelta(days=1)+timedelta(minutes=4))\n d1 = date.today() + timedelta(days=1)\n dt1 = datetime(d1.year, d1.month, d1.day) + timedelta(hours=8)\n result, success = self.fitness.get_columns_given_range(dt1,dt1+timedelta(days=1))\n\n self.assertEqual(len(result), 3)\n self.assertTrue(success)\n self.assertEqual(result[0]['WorkoutType'],'Running')\n self.assertEqual(result[1]['WorkoutType'],'Jogging')\n self.assertEqual(result[2]['WorkoutType'],'Dancing')", "def get_results(self):\n d = {}\n# r = {}\n for analyser in self.xml_tree.getroot():\n for child in analyser:\n if child.tag == 'all-records':\n for record in child:\n attributes = record.attrib\n sample = attributes['sampleId']\n assay_id = attributes['assayId']\n genotype = attributes['genotypeId']\n quality = attributes['description'].split('.')[0]\n if re.match(r'rs\\d+', assay_id):\n if sample in d:\n if assay_id in d[sample]:\n for allele in list(genotype):\n if allele not in d[sample][assay_id]['genotype']:\n d[sample][assay_id]['genotype'] += allele\n if quality not in d[sample][assay_id]['quality']:\n d[sample][assay_id]['quality'].append(quality)\n else:\n d[sample][assay_id] = {'genotype': genotype, 'quality': [quality]}\n else:\n d[sample] = {assay_id: {'genotype': genotype, 'quality': [quality]}}\n# if sample in r:\n# if assay_id in r[sample]:\n# for allele in list(genotype):\n# if allele not in r[sample][assay_id]:\n# r[sample][assay_id] += allele\n# else:\n# r[sample][assay_id] = genotype\n# else:\n# r[sample] = {assay_id: genotype}\n# for k, v in r.items():\n# for k1, v1, in v.items():\n# if len(v1) == 1:\n# v[k1] += v1\n# pprint.pprint(r)\n# df = pd.DataFrame.from_dict(r).transpose()\n# print(df)\n# df.to_excel('snpcheck.xlsx')\n return d", "def make_voting_results_data(categories, district_data = {}, state=48, district=7, leg_body='US-REP', \r\n election_year='2018', census_year='2016', district_config_file = 'static/data/district.json',\r\n voting_precincts_file=None, voting_results_file=None):\r\n print( \"\\nGetting election results per precinct\" )\r\n \r\n precinct_key = 'precinct'\r\n district_key = 'district'\r\n category = 'Voting Results'\r\n fields = []\r\n labels = {}\r\n \r\n # TODO if leg_body == 'STATE-REP' or leg_body == 'STATE-SEN':\r\n \r\n # TODO set presidential year versus congressional year\r\n election_result_fields = []\r\n if election_year == '2018':\r\n election_result_fields = [\r\n 'us_sen_rep',\r\n 'us_sen_dem',\r\n 'registered_voters',\r\n 'us_hou_rep',\r\n 'us_hou_dem',\r\n 'total_votes'\r\n ]\r\n if election_year == '2016':\r\n election_result_fields = [\r\n 'us_pres_rep',\r\n 'us_pres_dem',\r\n 'registered_voters',\r\n 'us_hou_dem',\r\n 'us_hou_rep',\r\n 'total_votes'\r\n ] \r\n \r\n # fields\r\n fields = []\r\n fields.extend(election_result_fields)\r\n fields.extend(['reg_per', 'dem_diff', 'dem_per', 'over_18', 'us_hou_dem_pot'])\r\n\r\n labels = {\r\n 'us_hou_dem_pot' : 'Democratic Potential',\r\n 'registered_voters' : 'Registered Voters',\r\n 'us_pres_rep' : 'US President Republican',\r\n 'us_pres_dem' : 'US President Democratic',\r\n 'us_sen_rep' : 'US Senate Republican',\r\n 'us_sen_dem' : 'US Senate Democratic',\r\n 'us_hou_rep' : 'US House Republican',\r\n 'us_hou_dem' : 'US House Democratic',\r\n 'total_votes' : 'Total Ballots Cast',\r\n 'dem_diff' : 'US House Democratic Difference',\r\n 'reg_per' : 'Registered Voters:18 Years+ %',\r\n 'dem_per' : 'US House Democrat Votes:18 Years+ %',\r\n 'over_18' : '18 Years and Over'\r\n }\r\n \r\n over_18 = float(district_data[census_year][district_key]['over_18'])\r\n \r\n state = \"{0:0>2}\".format(state)\r\n district = \"{0:0>2}\".format(district)\r\n \r\n state_abbr = str(states.mapping('fips', 'abbr')[state])\r\n district_abbr = leg_body + '-' + state_abbr + district\r\n geojson_path = 'static/geojson/'\r\n \r\n # read voting precincts \r\n if voting_precincts_file is None:\r\n find_voting_precincts_in_district(state=state, district=district, leg_body=leg_body)\r\n voting_precincts_file = get_voting_precincts_geojson_filename(\r\n state=state, district=district, leg_body=leg_body)\r\n voting_precincts = gpd.read_file(voting_precincts_file)\r\n \r\n # read voting results\r\n if voting_results_file is None:\r\n # TODO download voting results from Open Elections, \r\n # e.g., https://github.com/openelections/openelections-data-tx\r\n voting_results_file = 'static/data/20181106__tx__general__harris__precinct.csv'\r\n voting_results_data = pd.read_csv(voting_results_file)\r\n \r\n # add election results info (election years) to district_config file\r\n with open(district_config_file) as district_json:\r\n district_config = json.load(district_json)\r\n \r\n if 'election_years' not in district_config.keys():\r\n district_config['election_years'] = [election_year]\r\n else:\r\n if election_year not in district_config['election_years']:\r\n district_config['election_years'].append(election_year)\r\n \r\n if election_year not in district_config.keys():\r\n district_config[election_year] = [category]\r\n \r\n if category not in district_config[election_year]:\r\n district_config[election_year].append(category)\r\n \r\n # add election result categories\r\n if category not in categories.keys():\r\n categories[category] = {'fields': fields, 'labels': labels}\r\n \r\n # add voting results to district data\r\n if election_year not in district_data.keys():\r\n district_data[election_year] = { precinct_key: {} }\r\n if election_year in district_data.keys():\r\n if precinct_key not in district_data[election_year].keys():\r\n district_data[election_year][precinct_key] = {}\r\n\r\n # prepare the district data for disctrict-wide voting results\r\n if district_key not in district_data[election_year].keys():\r\n district_data[election_year][district_key] = {}\r\n \r\n # set all voting results for the disctrict to zero\r\n for field in fields:\r\n if field != 'over_18':\r\n district_data[election_year][district_key][field] = 0.0\r\n \r\n # standardize voting_results_data\r\n # convert party column to DEM or REP\r\n if len(voting_results_data[ voting_results_data['party'] == 'Republican' ]) > 0:\r\n voting_results_data.loc[ \r\n voting_results_data[ voting_results_data['party'] == 'Republican' ].index, \r\n 'party' ] = 'REP'\r\n if len(voting_results_data[ voting_results_data['party'] == 'Democratic' ]) > 0:\r\n voting_results_data.loc[ \r\n voting_results_data[ voting_results_data['party'] == 'Democratic' ].index, \r\n 'party' ] = 'DEM'\r\n \r\n # convert precinct column to int\r\n voting_results_data.drop(\r\n voting_results_data[ voting_results_data['precinct'] == 'TOTAL' ].index, \r\n inplace=True\r\n )\r\n voting_results_data['precinct'] = pd.to_numeric(voting_results_data['precinct'])\r\n \r\n peak_no_vote = 0\r\n \r\n # initialize a progress bar for processing the precincts\r\n total_precincts = len(voting_precincts)\r\n pbar = tqdm(\r\n total=total_precincts, initial=0, \r\n unit_scale=True, desc='Voting Precincts'\r\n )\r\n \r\n field_queries = {\r\n 'us_pres_rep' : [['office', 'President'], ['party', 'REP']],\r\n 'us_pres_dem' : [['office', 'President'], ['party', 'DEM']],\r\n 'us_sen_rep' : [['office', 'U.S. Senate'], ['party', 'REP']],\r\n 'us_sen_dem' : [['office', 'U.S. Senate'], ['party', 'DEM']],\r\n 'us_hou_rep' : [['office', 'U.S. House'], ['party', 'REP']],\r\n 'us_hou_dem' : [['office', 'U.S. House'], ['party', 'DEM']],\r\n 'registered_voters' : [['office', 'Registered Voters']],\r\n 'total_votes' : [['office', 'Ballots Cast']]\r\n }\r\n # dict for dataframe and excel file\r\n election_results = {}\r\n election_results['Precinct'] = []\r\n for field in fields:\r\n election_results[labels[field]] = []\r\n\r\n # get the voting results for each precinct\r\n for precIndex, precinct in voting_precincts.iterrows():\r\n geoid = precinct.PRECINCT\r\n election_results['Precinct'].append(int(geoid))\r\n if geoid not in district_data[election_year][precinct_key].keys():\r\n district_data[election_year][precinct_key][geoid] = {} \r\n \r\n for field in election_result_fields:\r\n # get the number of pres-rep votes in each precinct\r\n query_result = query_voting_results( voting_results_data, int(geoid), field_queries[ field ] )\r\n district_data[election_year][precinct_key][geoid][field] = query_result\r\n election_results[labels[field]].append( query_result )\r\n # get the total number of ballots cast\r\n if field == 'total_votes':\r\n total_votes = query_result\r\n \r\n # calculate the district wide total for field\r\n total = district_data[election_year][district_key][field]\r\n total = total + float(district_data[election_year][precinct_key][geoid][field])\r\n district_data[election_year][district_key][field] = total\r\n\r\n # calculate the democrat / republican difference\r\n field = 'dem_diff'\r\n dem = district_data[election_year][precinct_key][geoid]['us_hou_dem']\r\n rep = district_data[election_year][precinct_key][geoid]['us_hou_rep']\r\n district_data[election_year][precinct_key][geoid][field] = dem - rep\r\n election_results[labels[field]].append( dem - rep )\r\n\r\n # calculate the democrat percent turnout relative to the 18+ age population\r\n field = 'dem_per'\r\n over_18 = float(district_data[census_year][precinct_key][geoid]['over_18'])\r\n election_results[labels['over_18']].append( int(over_18) )\r\n if over_18 > 0.0: \r\n district_data[election_year][precinct_key][geoid][field] = int((float(dem) / over_18) * 100.0)\r\n election_results[labels[field]].append( int((float(dem) / over_18) * 100.0) )\r\n else:\r\n district_data[election_year][precinct_key][geoid][field] = 0\r\n election_results[labels[field]].append( 0 )\r\n\r\n # calculate the registred voter percent relative to the 18+ age population\r\n field = 'reg_per'\r\n reg = district_data[election_year][precinct_key][geoid]['registered_voters']\r\n if over_18 > 0.0:\r\n district_data[election_year][precinct_key][geoid][field] = int((float(reg) / over_18) * 100.0)\r\n election_results[labels[field]].append( int((float(reg) / over_18) * 100.0) )\r\n else:\r\n district_data[election_year][precinct_key][geoid][field] = 0\r\n election_results[labels[field]].append( 0 )\r\n \r\n no_vote = over_18 - total_votes\r\n if no_vote > peak_no_vote:\r\n peak_no_vote = no_vote\r\n pbar.update(1) \r\n\r\n pbar.close()\r\n\r\n\r\n # calculate democratic potential factor = normalized non-voters plus dem percentage\r\n for precIndex, precinct in voting_precincts.iterrows():\r\n geoid = precinct.PRECINCT\r\n \r\n dem = float(district_data[election_year][precinct_key][geoid]['us_hou_dem'])\r\n over_18 = float(district_data[census_year][precinct_key][geoid]['over_18'])\r\n # temporary format\r\n # if election_year == '2018':\r\n # total_votes = int(voting_results_data[ \r\n # (voting_results_data['PRECINCT'] == geoid) \r\n # ].iloc[0]['TOTAL'])\r\n # else:\r\n # openelections format here\r\n total_votes = query_voting_results( voting_results_data, int(geoid), field_queries[ 'total_votes' ] )\r\n \r\n no_vote = over_18 - total_votes\r\n rel_no_vote = float(no_vote) / float(peak_no_vote)\r\n field = 'us_hou_dem_pot'\r\n if over_18 > 0.0:\r\n dem_pot = ( (rel_no_vote + (dem / over_18) ) / 2.0 ) * 100.0\r\n else:\r\n dem_pot = ( rel_no_vote / 2.0 ) * 100.0\r\n district_data[election_year][precinct_key][geoid][field] = int(dem_pot)\r\n election_results[labels[field]].append( int(dem_pot) )\r\n\r\n # calculate district wide difference\r\n field = 'dem_diff'\r\n dem = int(district_data[election_year][district_key]['us_hou_dem'])\r\n rep = int(district_data[election_year][district_key]['us_hou_rep'])\r\n district_data[election_year][district_key][field] = int(dem - rep)\r\n\r\n # calculate district wide percentages\r\n field = 'dem_per'\r\n dem = float(district_data[election_year][district_key]['us_hou_dem'])\r\n over_18 = float(district_data[census_year][district_key]['over_18'])\r\n district_data[election_year][district_key][field] = int((dem / over_18) * 100.0)\r\n \r\n field = 'reg_per'\r\n reg = float(district_data[election_year][district_key]['registered_voters'])\r\n district_data[election_year][district_key][field] = int((reg / over_18) * 100.0)\r\n \r\n election_results = pd.DataFrame(election_results)\r\n\r\n excel_file = get_district_excel_filename(state, district, leg_body)\r\n election_results.to_excel(excel_file)\r\n\r\n # write the disctrict config to a file\r\n to_json(district_config, district_config_file)\r\n\r\n return categories, district_data", "def build_query_dict(self, term_list, issn_list, year_list, jlist):\n journal_frame = self.make_jlist(jlist)\n\n search_terms = self.build_search_terms(term_list)\n dict1 = {}\n #This loop goes through and sets up a dictionary key with an ISSN number\n\n for issn in issn_list:\n\n issn_terms = ' AND ISSN(' + issn + ')'\n dict2 = {}\n #This loop goes and attaches all the years to the outer loop's key.\n for year in year_list:\n\n year_terms = \"AND PUBYEAR IS \" + str(year)\n querystring = search_terms + year_terms + issn_terms\n\n dict2[year] = querystring\n\n dict1[issn] = dict2\n\n return dict1", "def create_results_dict(self, n_options):\n if n_options == 1:\n self.results_dict[0] = {\"configuration\": [\"user\"]}\n for name in self.results_metadata[\"names\"]:\n self.results_dict[0][name] = []\n else:\n self.results_dict = {}\n for i in range(n_options):\n self.results_dict[i] = {\"configuration\": []}\n for name in self.results_metadata[\"names\"]:\n self.results_dict[i][name] = []", "def prepare_data(qids_raw, conditions_raw, outputs_raw):\n\n qids = []\n conditions = []\n outputs = []\n dictionaries_standardization = []\n for qid_raw, condition_raw, output_raw in zip(qids_raw, conditions_raw, outputs_raw):\n qid, condition, output, dictionary = preprocess_sample(qid_raw, condition_raw, output_raw)\n qids.append(qid)\n conditions.append(condition)\n outputs.append(output)\n dictionaries_standardization.append(dictionary)\n\n return qids, conditions, outputs, dictionaries_standardization", "def from_bragg_teach(cls, data_dir=None,\n min_questions=None, relations=None, conditions=None):\n if data_dir is None:\n data_dir = os.environ['BRAGG_TEACH_DIR']\n\n df = pd.read_csv(os.path.join(data_dir, 'data.csv'))\n df = df[df.finalobservation] # Ignore non-final.\n df = df.rename(columns={\n 'questionid': 'question',\n 'workerid': 'worker',\n 'observationlabel': 'answer',\n 'observationvalue': 'answertype',\n 'questiongold': 'gt',\n 'observationtime': 'time',\n 'finalobservation': 'final',\n 'actionrule': 'actiontype',\n })\n df['condition'] = df['condition'].map(json.loads)\n if min_questions is not None:\n df = pd.concat([df for worker, df in df.groupby(\n 'worker') if df.question.nunique() >= min_questions])\n\n def condition_name(condition):\n if 'n' in condition['policy'] and condition['policy']['n'] == 3:\n return 'pilot_3'\n elif 'n' in condition['policy'] and condition['policy']['n'] == 10:\n return 'pilot_10'\n elif 'n' in condition['policy'] and condition['policy']['n'] == 20:\n return 'pilot_20'\n elif ('explore_policy' in condition['policy'] and\n condition['ask_bonus'] == 0.04):\n return 'rl_v1'\n elif ('explore_policy' in condition['policy'] and\n condition['ask_bonus'] == 0.08):\n return 'rl_v2'\n df['condition'] = df['condition'].map(condition_name)\n if conditions is not None:\n df = df[df['condition'].isin(conditions)]\n\n df['gt'] = df['gt'].map(json.loads)\n df['answer'] = df['answer'].map(json.loads)\n if relations is not None:\n for column in ['gt', 'answer']:\n df[column] = df[column].map(lambda d: dict(\n (relation, d[relation]) for relation in relations) if\n d is not None else None)\n df['correct'] = df['gt'] == df['answer']\n\n df['condition'] = df['condition'].map(\n lambda x: json.dumps(x, sort_keys=True))\n return cls(df[['question', 'worker', 'answer', 'answertype', 'gt',\n 'time', 'correct', 'condition',\n 'action', 'actiontype']])", "def additional_data_dict(titles: list) -> dict or str:\n try:\n additional_data = {}\n for title in titles:\n url = \"http://www.omdbapi.com/?i=tt3896198&apikey=6b513db6&t=\" + title\n headers = {\"Accept\": \"application/json\"}\n req = requests.get(url, headers=headers)\n api_content = json.loads(req.content.decode('utf-8'))\n # Because of no BoxOffice key in API for movie 'Ben Hur' (ID 68 in db):\n api_content.setdefault('BoxOffice', 'N/A')\n additional_data[title] = {}\n if api_content['imdbRating']:\n additional_data[title]['imdb_rating'] = float(api_content['imdbRating'])\n else:\n additional_data[title]['imdb_rating'] = -1\n if api_content['Runtime'] == 'N/A':\n additional_data[title]['runtime'] = -1\n else:\n additional_data[title]['runtime'] = int(re.sub(r'[^0-9]', '', api_content['Runtime']))\n if api_content['BoxOffice'] == 'N/A':\n additional_data[title]['box_office'] = -1\n else:\n additional_data[title]['box_office'] = int(re.sub(r'[^0-9]', '', api_content['BoxOffice']))\n nominations_oscars = re.search(r'Nominated for (.+?) Oscar', api_content['Awards'])\n if nominations_oscars:\n additional_data[title]['nominations_oscars'] = int(nominations_oscars.group(1))\n else:\n additional_data[title]['nominations_oscars'] = 0\n oscars = re.search(r'Won (.+?) Oscar', api_content['Awards'])\n if oscars:\n additional_data[title]['oscars'] = int(oscars.group(1))\n else:\n additional_data[title]['oscars'] = 0\n nominations_others = re.search(r'(\\d+) nomination', api_content['Awards'])\n if nominations_others:\n additional_data[title]['nominations_others'] = int(nominations_others.group(1))\n else:\n additional_data[title]['nominations_others'] = 0\n wins_others = re.search(r'(\\d+) win', api_content['Awards'])\n if wins_others:\n additional_data[title]['wins_others'] = int(wins_others.group(1))\n else:\n additional_data[title]['wins_others'] = 0\n return additional_data\n except KeyError:\n return \"No data about some movie(s). Check data source.\"\n except requests.exceptions.ConnectionError:\n return \"No access. Check internet connection or API is down.\"", "def predict_bike_demand(weather_data):\n # TODO: connect to the real deal!\n return {s[\"extra\"][\"uid\"]: random.randint(0, 11) for s in MOCK_STATION_STATS}", "def prepare_date_for_all_warehouses_sheets(self,product,product_data_dict,opening_qty,last_sales,last_purchase_date,qty_purchase_in_duration,qty_sales_in_duration,scrap_location_qty,adjusted_qty_in_duration,warehouse_in_qty,warehouse_out_qty):\n if last_purchase_date: \n last_purchase_date = datetime.strptime(last_purchase_date, '%d-%m-%Y')\n if last_sales:\n last_sales = datetime.strptime(last_sales, '%d-%m-%Y')\n if product_data_dict.has_key(product):\n product_data = product_data_dict.get(product)\n old_opening_qty = product_data.get('opening_qty')\n new_opening_qty = product_data.get('opening_qty') + opening_qty \n \n new_last_sales = product_data.get('last_sales')\n new_last_sales.append(last_sales) \n \n new_last_purchase_date_lst = product_data.get('last_purchase_date')\n new_last_purchase_date_lst.append(last_purchase_date)\n \n old_qty_purchase_in_duration = product_data.get('qty_purchase_in_duration')\n new_qty_purchase_in_duration = old_qty_purchase_in_duration + qty_purchase_in_duration\n \n old_qty_sales_in_duration = product_data.get('qty_sales_in_duration')\n new_qty_sales_in_duration = old_qty_sales_in_duration + qty_sales_in_duration\n \n old_scrap_location_qty = product_data.get('scrap_location_qty')\n new_scrap_location_qty = old_scrap_location_qty + scrap_location_qty\n \n old_adjusted_qty_in_duration = product_data.get('adjusted_qty_in_duration')\n new_adjusted_qty_in_duration = old_adjusted_qty_in_duration + adjusted_qty_in_duration\n \n old_warehouse_in_qty = int(product_data.get('warehouse_in_qty') or 0)\n new_warehouse_in_qty = old_warehouse_in_qty + warehouse_in_qty or 0\n \n old_warehouse_out_qty = int(product_data.get('warehouse_out_qty') or 0)\n new_warehouse_out_qty = old_warehouse_out_qty + warehouse_out_qty or 0\n \n product_data.update({'opening_qty':new_opening_qty,'last_sales':new_last_sales,\n 'last_purchase_date':new_last_purchase_date_lst,'qty_purchase_in_duration':new_qty_purchase_in_duration,\n 'qty_sales_in_duration': new_qty_sales_in_duration,'scrap_location_qty':new_scrap_location_qty,\n 'adjusted_qty_in_duration':new_adjusted_qty_in_duration,\n 'warehouse_in_qty':new_warehouse_in_qty,'warehouse_out_qty':new_warehouse_out_qty\n })\n \n product_data_dict.update({product:product_data})\n return product_data_dict\n \n product_data_dict.update({product:{\n 'opening_qty':opening_qty or 0,'last_sales':[last_sales or ''],\n 'last_purchase_date':[last_purchase_date],'qty_purchase_in_duration':qty_purchase_in_duration or 0,\n 'qty_sales_in_duration': qty_sales_in_duration or 0,'scrap_location_qty':scrap_location_qty or 0,\n 'adjusted_qty_in_duration':adjusted_qty_in_duration or 0,\n 'warehouse_in_qty':warehouse_in_qty or 0,'warehouse_out_qty':warehouse_out_qty or 0\n }})\n return product_data_dict", "def build_query_url(\n begin_date, end_date, stationid, product, datum=None, bin_num=None,\n interval=None, units='metric', time_zone='gmt'):\n base_url = 'http://tidesandcurrents.noaa.gov/api/datagetter?'\n\n # If the data product is water levels, check that a datum is specified\n if product == 'water_level':\n if datum is None:\n raise ValueError('No datum specified for water level data.See'\n ' https://tidesandcurrents.noaa.gov/api/#datum '\n 'for list of available datums')\n else:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'datum': datum,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n elif product == 'hourly_height':\n if datum is None:\n raise ValueError('No datum specified for water level data.See'\n ' https://tidesandcurrents.noaa.gov/api/#datum '\n 'for list of available datums')\n else:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'datum': datum,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n elif product == 'high_low':\n if datum is None:\n raise ValueError('No datum specified for water level data.See'\n ' https://tidesandcurrents.noaa.gov/api/#datum '\n 'for list of available datums')\n else:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'datum': datum,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n elif product == 'predictions':\n # If no interval provided, return 6-min predictions data\n if interval is None:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'datum': datum,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n else:\n # Compile parameter string, including interval, for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'datum': datum,\n 'interval': interval,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n # If the data product is currents, check that a bin number is specified\n elif product == 'currents':\n if bin_num is None:\n raise ValueError(\n 'No bin specified for current data. Bin info can be '\n 'found on the station info page'\n ' (e.g., https://tidesandcurrents.noaa.gov/cdata/StationInfo?id=PUG1515)')\n else:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'bin': str(bin_num),\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n # For all other data types (e.g., meteoroligcal conditions)\n else:\n # If no interval provided, return 6-min met data\n if interval is None:\n # Compile parameter string for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n else:\n # Compile parameter string, including interval, for use in URL\n parameters = {'begin_date': begin_date,\n 'end_date': end_date,\n 'station': stationid,\n 'product': product,\n 'interval': interval,\n 'units': units,\n 'time_zone': time_zone,\n 'application': 'py_noaa',\n 'format': 'json'}\n\n # Build URL with requests library\n query_url = requests.Request(\n 'GET', base_url, params=parameters).prepare().url\n\n return query_url", "def mor_prepare_data():\n prices, locations, areas, links = [], [], [], []\n for i in range(START_PAGE, SEARCHING_DEPTH+1):\n handler = requests.get(main_url, params={\"page\": str(i)})\n soup = bs4.BeautifulSoup(handler.text, 'lxml')\n heads = soup.find_all(\"header\")\n once = True\n for head in heads:\n if head.find(\"meta\", {\"itemprop\": \"category\"}) and once:\n\n raw_price = head.find(\"meta\", {\"itemprop\": \"price\"})\n price = int(float(raw_price[\"content\"]) if raw_price else \"\")\n\n raw_loc_list = head.find(\"h2\",\n {\"class\": \"single-result__title\"}).getText().strip().split(\n \", \")\n found = False\n for loc in raw_loc_list:\n if location_mapper[CITY].get(loc.lower(), 0):\n location = location_mapper[CITY][loc.lower()]\n\n found = True\n break\n if not found:\n location = \"\"\n if DEBUG_MODE:\n print(raw_loc_list)\n\n raw_area = head.find(\"p\", {\n \"class\": \"single-result__price single-result__price--currency\"}).getText().strip().split()\n if price and location:\n square_price = raw_area[0] if len(raw_area) == 2 else \"\".join(\n (raw_area[0], raw_area[1]))\n\n area = int(price / float(square_price.replace(\",\", \".\")))\n link_url = head.find('a')['href']\n\n if location and area and link_url:\n prices.append(price) if price < PRICE_UPPER_LIMIT else prices.append(\n PRICE_UPPER_LIMIT)\n locations.append(location)\n areas.append(area) if area < AREA_UPPER_LIMIT else areas.append(\n AREA_UPPER_LIMIT)\n links.append(link_url)\n\n return prices, locations, areas, links", "def load_vqa_json(self, data_split):\n qdic, adic = {}, {}\n\n with open(config.DATA_PATHS[self.exp_type][data_split]['ques_file'], 'r') as f:\n qdata = json.load(f)['questions']\n for q in qdata:\n q_key = data_split + QID_KEY_SEPARATOR + str(q['question_id'])\n qdic[q_key] = {\n 'qstr': q['question'],\n 'iid': q['image_id']\n }\n if self.use_ocr:\n qdic[q_key]['ocr_tokens'] = q['ocr_tokens']\n if self.use_binary:\n qdic[q_key]['ocr_answer_flag'] = q['ocr_answer_flag']\n\n if 'test' not in data_split:\n with open(config.DATA_PATHS[self.exp_type][data_split]['ans_file'], 'r') as f:\n adata = json.load(f)['annotations']\n for a in adata:\n # TODO: we only use key 'answer' in this a['answers'] list\n adic[data_split + QID_KEY_SEPARATOR + str(a['question_id'])] = \\\n a['answers']\n\n self.logger.info('parsed {} questions for {}'.format(len(qdic), data_split))\n return qdic, adic", "def _setData(self):\n data_list = []\n results = self.query.all()\n formatter = date.getLocaleFormatter(self.request, \"date\", \"long\")\n time_formatter = date.getLocaleFormatter(self.request, \"time\", \"short\")\n for result in results:\n data = {}\n data[\"subject\"] = result.short_name\n # this tab appears in the workspace pi/ view...\n data[\"url\"] = url.set_url_context(\"../calendar/sittings/obj-%i/schedule\" %\n result.sitting_id)\n # Note: same UI is also displayed at: \n # /business/sittings/obj-%i/schedule % result.sitting_id\n data[\"items\"] = \"\"\n data[\"status\"] = misc.get_wf_state(result)\n data[\"status_date\"] = formatter.format(result.status_date)\n data[\"owner\"] = \"\"\n data[\"type\"] = result.group.type\n data[\"group\"] = u\"%s %s\" % (\n result.group.type.capitalize(), result.group.short_name)\n data[\"time_from_to\"] = (\n time_formatter.format(result.start_date),\n time_formatter.format(result.end_date))\n data[\"date\"] = formatter.format(result.start_date) \n if result.venue:\n data[\"venue\"] = _(result.venue.short_name)\n else:\n date[\"venue\"] = \"\"\n if type(result)==domain.Question:\n data[\"to\"] = result.ministry.short_name\n else:\n data[\"to\"]= \"\"\n # past, present, future\n today = datetime.datetime.today().date()\n startday = result.start_date.date()\n if today==startday:\n data[\"css_class\"] = \"present\"\n elif today>startday:\n data[\"css_class\"] = \"past\"\n else:\n data[\"css_class\"] = \"future\"\n data_list.append(data)\n self._data = data_list", "def convert_question(self, q):\n\n item = {}\n item['id'] = q['id']\n item['title'] = q['title']\n item['body'] = q['text']\n item['author_id'] = q['author']['id']\n item['author'] = q['author']['username']\n item['url'] = q['url']\n item['score'] = q['score']\n item['score_label'] = self.convert_count(q['score'])\n item['answer_count'] = q['answer_count']\n item['answer_count_label'] = self.convert_count(q['answer_count'])\n item['view_count'] = q['view_count']\n item['view_count_label'] = self.convert_count(q['view_count'])\n item['added_at'] = q['added_at']\n item['added_at_label'] = timeago.format(datetime.fromtimestamp(int(q['added_at']), TIMEZONE), datetime.now(TIMEZONE))\n item['last_activity'] = q['last_activity_at']\n item['last_activity_label'] = timeago.format(datetime.fromtimestamp(int(q['last_activity_at']), TIMEZONE), datetime.now(TIMEZONE))\n item['has_more_comments'] = False\n item['has_more_answers'] = False\n item['has_accepted_answer'] = q['has_accepted_answer']\n item['closed'] = q['closed']\n\n item['tags'] = []\n for tag in q['tags']:\n item['tags'].append({'name': tag})\n\n return item", "def createdictionary(bpm, extremes, duration, numbeats, time_beats):\n dict = {}\n dict[\"mean_hr_bpm\"] = bpm\n dict[\"voltage_extremes\"] = extremes\n dict[\"duration\"] = duration\n dict[\"num_beats\"] = numbeats\n dict[\"beats\"] = time_beats\n return dict", "def _setData(self):\n data_list = []\n results = self.query.all()\n formatter = date.getLocaleFormatter(self.request, \"date\", \"long\")\n for result in results:\n data = {}\n data[\"qid\"] = (\"i-\" + str(result.parliamentary_item_id))\n if type(result)==domain.AgendaItem:\n g = u\" \" + result.group.type + u\" \" + result.group.short_name\n else:\n g = u\"\" # !+ g?\n data[\"subject\"] = result.short_name\n data[\"title\"] = result.short_name\n data[\"result_item_class\"] = \"workflow-state-\" + result.status\n data[\"url\"] = url.set_url_context(\"%ss/obj-%i\" % (\n result.type, result.parliamentary_item_id))\n data[\"status\"] = misc.get_wf_state(result)\n data[\"status_date\"] = formatter.format(result.status_date)\n data[\"owner\"] = \"%s %s\" %(result.owner.first_name, result.owner.last_name)\n data[\"type\"] = _(result.type)\n if type(result)==domain.Question:\n data[\"to\"] = result.ministry.short_name\n else:\n data[\"to\"]= u\"\"\n # remember original domain object\n data[\"id\"] = result.parliamentary_item_id\n data[\"_obj\"] = result\n # append processed result item\n data_list.append(data)\n self._data = data_list", "def apigw_event():\n\n return {\n \"queryStringParameters\": {\n \"snotel_site\": \"322:CO:SNTL\",\n \"days\": \"30\",\n \"element_code\": \"WDIRV\"\n }\n }", "def serialized_data(self):\n upcoming_shows = self.upcoming_shows\n past_shows = self.past_shows\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'address': self.address,\n 'phone': self.phone,\n 'image_link': self.image_link,\n 'facebook_link': self.facebook_link,\n 'city': self.city.name,\n 'state': self.city.state_name,\n 'genres': self.genres if self.genres else [],\n 'website': self.website,\n 'seeking_description': self.seeking_description,\n 'seeking_talent': self.seeking_talent,\n 'num_upcoming_shows': len(upcoming_shows),\n 'upcoming_shows_count': len(upcoming_shows),\n 'upcoming_shows': upcoming_shows,\n 'past_shows': past_shows,\n 'past_shows_count': len(past_shows),\n }", "def get_result(dataset=None):\n if dataset is None:\n data = gen_dataset()\n else:\n data = pd.DataFrame(data=dataset)\n\n data['SEASON'] = data.apply(lambda x: set_season(str_2_date(x['ORD_DT'])), axis=1)\n\n data.drop('ORD_DT', axis=1, inplace=True)\n data.drop('QT_ORDD', axis=1, inplace=True)\n\n return data.to_dict('records')", "def info(self):\n past_shows = self.get_shows(Show.start_time <= datetime.now())\n upcoming_shows = self.get_shows(Show.start_time > datetime.now())\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'genres': self.genres,\n 'city': self.city,\n 'state': self.state,\n 'phone': self.phone,\n 'website': self.website,\n 'facebook_link': self.facebook_link,\n 'seeking_venue': self.seeking_venue,\n 'seeking_description': self.seeking_description,\n 'image_link': self.image_link,\n 'past_shows': past_shows,\n 'upcoming_shows': upcoming_shows,\n 'past_shows_count': len(past_shows),\n 'upcoming_shows_count': len(upcoming_shows)\n }", "def _build_set(self, n_words):\n # count all words\n counter = Counter()\n utterances, labels = self.read_json()\n for utterance in utterances:\n tokens = nltk.word_tokenize(utterance)\n counter.update(tokens)\n\n # generate an int representation\n count = [['UNK', -1]]\n count.extend(counter.most_common(n_words - 1))\n\n # convert the int representation into a dictionary\n dictionary = dict()\n for word, _ in count:\n dictionary[word] = len(dictionary)\n data = list()\n unk_count = 0\n for word in counter.most_common():\n if word in dictionary:\n index = dictionary[word]\n else:\n index = 0 # dictionary['UNK']\n unk_count += 1\n data.append(index)\n count[0][1] = unk_count\n return dictionary", "def demand_all(demand_dict, network):\n warehouse_demand = {}\n for sku, sku_data in demand_dict.iteritems():\n warehouse_demand[sku] = []\n for i in range(0, len(network.warehouses)):\n warehouse_demand[sku].append([0, 0])\n for i in sku_data.keys():\n try:\n clean = ZipCodesData.convert_zip_name(i)\n rank = network.ranking.get_leadtime(clean, network.zips[clean].state)\n set_demand = network.zips[str(clean)].get_demand_arrays(sku_data[i])\n for j in range(0, len(set_demand[1])):\n try:\n warehouse_demand[sku][set_demand[1][j]][0] += set_demand[0][j]\n warehouse_demand[sku][set_demand[1][j]][1] += (math.sqrt(set_demand[0][j]))**2\n except:\n pass\n except KeyError:\n pass\n return warehouse_demand", "def getQueryType(ogcuisine):\n establishmenttype = {}\n establishmenttype['Fast Food'] = 'Fast Food restaurants'\n establishmenttype['Burgers'] = 'Burger places'\n establishmenttype['Cheesesteaks'] = 'Cheesesteak spots'\n establishmenttype['Gastropubs'] = 'Gastropubs'\n establishmenttype['Breakfast'] = 'Breakfast spots'\n establishmenttype['Diner'] = 'Diners'\n establishmenttype['Salad'] = 'Salad places'\n establishmenttype['Sandwiches'] = 'Sandwich places'\n establishmenttype['Soup'] = 'Soup places'\n establishmenttype['Pizza'] = 'Pizza places'\n establishmenttype['Italian'] = 'Italian restaurants'\n establishmenttype['African'] = 'African restaurants'\n establishmenttype['Ethiopian'] = 'Ethiopian restaurants'\n establishmenttype['American'] = 'American restaurants'\n establishmenttype['BBQ'] = 'BBQ restaurants'\n establishmenttype['French'] = 'French restaurants'\n establishmenttype['Belgian'] = 'Belgian restaurants'\n establishmenttype['British'] = 'British restaurants'\n establishmenttype['Irish'] = 'Irish restaurants'\n establishmenttype['Southern'] = 'Southern restaurants'\n establishmenttype['Cajun'] = 'Cajun restaurants'\n establishmenttype['Caribbean'] = 'Caribbean restaurants'\n establishmenttype['Chinese'] = 'Chinese restaurants'\n establishmenttype['Latin American'] = 'Latin restaurants'\n establishmenttype['Cuban'] = 'Cuban restaurants'\n establishmenttype['Latin'] = 'Latin restaurants'\n establishmenttype['Brazilian'] = 'Brazilian'\n establishmenttype['Mexican'] = 'Mexican'\n establishmenttype['Tex-Mex'] = 'Tex-Mex restaurants'\n establishmenttype['Greek'] = 'Greek restaurants'\n establishmenttype['Indian'] = 'Indian restaurants'\n establishmenttype['Japanese'] = 'Japanese restaurants'\n establishmenttype['Sushi'] = 'Sushi restaurants'\n establishmenttype['Mediterranean'] = 'Mediterranean restaurants'\n establishmenttype['Middle Eastern'] = 'Middle Eastern restaurants'\n establishmenttype['Kosher'] = 'Kosher restaurants'\n establishmenttype['Seafood'] = 'Seafood restaurants'\n establishmenttype['Spanish / Tapas'] = 'Spanish / Tapas restaurants'\n establishmenttype['Steakhouse'] = 'Steakhouses'\n establishmenttype['Thai'] = 'Thai restaurants'\n establishmenttype['Vegetarian'] = 'Vegetarian restaurants'\n establishmenttype['Vietnamese'] = 'Vietnamese restaurants'\n establishmenttype['Coffee'] = 'Coffee shops'\n establishmenttype['Bagels'] = 'Bagel shops'\n establishmenttype['Bakeries'] = 'Bakeries'\n establishmenttype['Beer / Wine Stores'] = 'Beer and Wine stores'\n establishmenttype['Cupcakes'] = 'Cupcake shops'\n establishmenttype['Breweries'] = 'Breweries'\n establishmenttype['Desserts'] = 'Dessert spots'\n establishmenttype['Distilleries'] = 'Distilleries'\n establishmenttype['Donuts'] = 'Donut shops'\n establishmenttype['Empanadas'] = 'Empanada spots'\n establishmenttype['Gelato'] = 'Gelato spots'\n establishmenttype['Ice Cream / FroYo'] = 'Ice Cream shops'\n establishmenttype['Beer Bars'] = 'Beer Bars'\n establishmenttype['Cocktail Bars'] = 'Cocktail Bars'\n establishmenttype['Dive Bars'] = 'Dive Bars'\n establishmenttype['Sports Bars'] = 'Sports Bars'\n establishmenttype['Wine Bars'] = 'Wine Bars'\n establishmenttype['Beer Gardens'] = 'Beer Gardens'\n\n return establishmenttype[ogcuisine]", "def range_8(configuration):\n range_dict_all = {\n # updated aLIGO design sensitivity range from 197.5 to 181.5 Mpc on 9 Apr 2018 to reflect T1800044-v4\n \"HL\" : {'H1' : 181.5, 'L1' : 181.5},\n \"HLV\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3 },\n \"HLVK\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, 'K1' : 160.0},\n \"HLVKI\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, 'K1' : 160.0, 'I1' : 181.5},\n \"GW170817\" : {'H1': 107/2.26 *1.26 , 'L1': 218/2.26, 'V1': 58/2.26}, # 1.26 is the improvement factor for H1's range due to data processing.\n \"GW170817_without_Virgo\" : {'H1': 107/2.26 *1.26 , 'L1': 218/2.26},\n \"GW170814\" : {'H1': 53, 'L1': 98, 'V1': 26}, # 1.26 is the improvement factor for H1's range due to data processing.\n \"design\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3 },\n \"early\" : {'H1' : 60., 'L1': 60.},\n \"half_ligo\" : {'H1' : 99, 'L1' : 99, 'V1': 128.3 },\n \"half_virgo\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 64 },\n \"nosrm\" : {'H1' : 159, 'L1' : 159, 'V1': 109 },\n \"india\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, \"I1\" : 181.5 },\n \"kagra\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, \"I1\" : 181.5 , \\\n \"K1\" : 160.0},\n \"bala\" : {'H1' : 181.5, 'H2' : 181.5, 'L1' : 181.5, 'V1': 128.3, \\\n \"I1\" : 181.5 , \"K1\" : 160.0},\n \"sa\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, \"I1\" : 181.5 , \\\n \"K1\" : 160.0, \"S1\":181.5},\n \"sa2\" : {'H1' : 181.5, 'L1' : 181.5, 'V1': 128.3, \"I1\" : 181.5 , \\\n \"K1\" : 160.0, \"S1\":181.5},\n \"steve\" : {'H1' : 160.0, 'L1' : 160.0, 'V1': 160.0, \"I1\" : 160.0 },\n \"s6vsr2\" : {'H1' : 20., 'L1' : 20., 'V1': 8. }\n }\n return(range_dict_all[configuration])", "def info(self):\n past_shows = self.get_shows(Show.start_time <= datetime.now())\n upcoming_shows = self.get_shows(Show.start_time > datetime.now())\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'genres': self.genres,\n 'address': self.address,\n 'city': self.city,\n 'state': self.state,\n 'phone': self.phone,\n 'website': self.website,\n 'facebook_link': self.facebook_link,\n 'seeking_talent': self.seeking_talent,\n 'seeking_description': self.seeking_description,\n 'image_link': self.image_link,\n 'past_shows': past_shows,\n 'upcoming_shows': upcoming_shows,\n 'past_shows_count': len(past_shows),\n 'upcoming_shows_count': len(upcoming_shows)\n }", "def setUp(self):\n self.app = create_app()\n self.client = self.app.test_client\n self.database_name = \"trivia_test\"\n self.QUESTIONS_PER_PAGE = 10\n #self.database_path = \"postgres://{}/{}\".format('localhost:5432', self.database_name)\n self.database_path = \"postgres://{}/{}\".format('postgres:postgres@localhost:5432', self.database_name)\n setup_db(self.app, self.database_path)\n\n self.new_question_1 = {'answer': '1', 'category': 1, 'difficulty': 1, 'question': 'new question 1'}\n self.new_question_2 = {'answer': '', 'category': '', 'difficulty': '', 'question':'' }\n\n self.search_term_1 = {'searchTerm': 'actor'}\n self.search_term_2 = {'searchterm': 'actor'} # wrong requst parameter\n self.search_term_3 = {'searchTerm': 'World Cup'} \n self.search_term_4 = {'searchTerm': 'world Cup'} # lower case which doesn't exist in any question\n\n # when category is 'click' which menas ALL category\n self.quizzes_1 = {\n 'previous_questions': [], \n 'quiz_category': {'id': 0, 'type': 'click'}\n }\n\n # when specify category\n self.quizzes_2 = {\n 'previous_questions': [18, 19],\n 'quiz_category': {'id': '1', 'type': 'Art'}\n }\n\n # wrong data for request as category doesn't exist\n self.quizzes_3 = {\n 'previous_questions': [18, 19],\n 'quiz_category': {'id': '1', 'type': 'Full Stack Web Development'}\n }\n\n # previous_questions contains all questions which means we run out of questions\n self.quizzes_4 = {\n 'previous_questions': [2,4,5,6,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23],\n 'quiz_category': {'id': '1', 'type': 'Art'}\n }\n\n # binds the app to the current context\n with self.app.app_context():\n self.db = SQLAlchemy()\n self.db.init_app(self.app)\n # create all tables\n self.db.create_all()", "def get_data(self, data):\n self.data = {}\n self.data[ATTR_PM1] = data['current']['values'][0]['value']\n self.data[ATTR_PM25] = data['current']['values'][1]['value']\n self.data[ATTR_PM25_LIMIT] = data['current']['standards'][0]['limit']\n self.data[ATTR_PM25_PERCENT] = (data['current']['standards'][0]\n ['percent'])\n self.data[ATTR_PM10] = data['current']['values'][2]['value']\n self.data[ATTR_PM10_LIMIT] = data['current']['standards'][1]['limit']\n self.data[ATTR_PM10_PERCENT] = (data['current']['standards'][1]\n ['percent'])\n self.data[ATTR_PRESSURE] = data['current']['values'][3]['value']\n self.data[ATTR_HUMIDITY] = data['current']['values'][4]['value']\n self.data[ATTR_TEMPERATURE] = data['current']['values'][5]['value']\n self.data[ATTR_CAQI] = data['current']['indexes'][0]['value']\n self.data[ATTR_CAQI_LEVEL] = (data['current']['indexes'][0]\n ['level'].lower().replace('_', ' '))", "def construct_zi_dict(train_info_list, test_info_list):\r\n zi_dict, train_dataset_list, test_dataset_list = dict(), list(), list()\r\n for user, age, gender, education, querys in train_info_list:\r\n for query in querys:\r\n for zi in query:\r\n if zi not in zi_dict:\r\n zi_dict[zi] = 0\r\n zi_dict[zi] += 1\r\n for user, querys in test_info_list:\r\n for query in querys:\r\n for zi in query:\r\n if zi not in zi_dict:\r\n zi_dict[zi] = 0\r\n zi_dict[zi] += 1\r\n zi_list = sorted(zi_dict.iteritems(), key=lambda x: x[1], reverse=True)\r\n zi2index = dict([(zi[0], [zi[1], idx]) for idx, zi in enumerate(zi_list)])\r\n index2zi = dict([(idx, [zi[0], zi[1]]) for idx, zi in enumerate(zi_list)])\r\n \r\n return zi2index, index2zi", "def _setData(self):\n data_list = []\n results = self.query.all()\n formatter = date.getLocaleFormatter(self.request, \"date\", \"long\")\n for result in results:\n data ={}\n data[\"qid\"]= (\"m_\" + str(result.motion_id))\n data[\"subject\"] = u\"M \" + str(result.motion_number) + u\" \" + result.short_name\n data[\"title\"] = result.short_name\n if result.approval_date:\n data[\"result_item_class\"] = (\"workflow-state-\" + \n result.status + \"sc-after-\" + \n datetime.date.strftime(result.approval_date, \"%Y-%m-%d\"))\n else:\n data[\"result_item_class\"] = \"workflow-state-\" + result.status\n data[\"url\"] = url.set_url_context(\"motions/obj-\" + str(result.motion_id))\n data[\"status\"] = misc.get_wf_state(result)\n data[\"status_date\"] = formatter.format(result.status_date)\n data[\"owner\"] = \"%s %s\" %(result.owner.first_name, result.owner.last_name)\n data[\"type\"] = _(result.type)\n data[\"to\"] = \"\"\n data_list.append(data)\n self._data = data_list", "def slot_key_db() -> Dict[str, List]:\n\n return {'q50': 'second_person_plural',\n 'q28': 'cot_caught',\n 'q80': 'rain_sun',\n 'q66': 'crawfish',\n 'q110': 'halloween',\n 'q64': 'sandwich',\n 'q90': 'side_road',\n 'q105': 'beverage',\n 'q73': 'shoes',\n 'q79': 'highway',\n 'q58': 'yard_sale',\n 'q107': 'rubbernecking',\n 'q94': 'frosting',\n 'q14': 'lawyer',\n 'q76': 'kitty_corner',\n 'q65': 'firefly',\n 'q60': 'verge',\n 'q118': 'brew_thru',\n 'q103': 'water_fountain'}", "def generate_qna_report(self, past_qna):\n response = []\n\n # reverse the order so correct report order can be generated\n past_qna.reverse()\n for qna in past_qna:\n\n found_page = False\n for page in self.module:\n\n if page[\"QID\"] == qna[\"QID\"]:\n found_page = True\n\n found_answer = False\n answer_list = []\n for answer in page[\"answers\"]:\n if answer[\"AID\"] in qna[\"AID\"]:\n found_answer = True\n answer_list.append({\n \"AID\": answer[\"AID\"],\n \"prettyAID\": answer.get(\"prettyAID\"),\n \"answer\": answer[\"answer\"],\n \"description\": answer[\"description\"],\n \"resources\": answer[\"resources\"]\n })\n\n response.append({\n \"QID\": page[\"QID\"],\n \"question\": page[\"question\"],\n \"description\": page[\"description\"],\n \"resources\": page[\"resources\"],\n \"answers\": answer_list\n })\n\n if not found_answer:\n raise ValueError(\"AID: \" + qna[\"AID\"] + \"not found!\")\n\n if not found_page:\n raise ValueError(\"QID: \" + qna[\"QID\"] + \" not found!\")\n\n return response", "def build(self):\n states = WOFRegion.query.filter(WOFRegion.country_iso=='US')\n\n logger.info('Indexing US states.')\n\n for row in tqdm(states):\n\n # Key -> id(s)\n for key in map(keyify, state_key_iter(row)):\n self.add_key(key, row.wof_id)\n\n # ID -> state\n self.add_location(row.wof_id, StateMatch(row))", "def _get_ancillary_data_for_acquisition(self):\n max_num_acq_opt_evals = self.get_acq_opt_max_evals(self.step_idx)\n return Namespace(max_evals=max_num_acq_opt_evals,\n t=self.step_idx,\n curr_max_val=self.curr_opt_val,\n evals_in_progress=self.eval_points_in_progress)", "def build_param_and_data_dict(self, s_gen, xr, yr, r):\n # Note it is important to create a new dictionary here so that\n # we reset the data dict after generating new data\n self.data = {\n 'DT': self.dt,\n 'motion_prior': self.motion_prior,\n 'motion_gen': self.motion_gen,\n 'ds': self.ds,\n 'de': self.de,\n 'L0': self.l0,\n 'L1': self.l1,\n 'GAMMA': self.gamma,\n 'lamb': self.lamb,\n 'fista_c': self.fista_c,\n 'D': self.tc.t_D.get_value(),\n 'N_L': self.n_l,\n 'N_T': self.n_t,\n 'L_I': self.l_i,\n 'L_N': self.l_n,\n 'N_g_itr': self.n_g_itr,\n 'N_itr': self.n_itr,\n 'N_P': self.n_p,\n 'XS': self.tc.t_XS.get_value(),\n 'YS': self.tc.t_YS.get_value(),\n 'XE': self.tc.t_XE.get_value(),\n 'YE': self.tc.t_YE.get_value(),\n 'Var': self.tc.t_Var.get_value(),\n 'G': self.tc.t_G.get_value(),\n 'tau': self.tau,\n 'XR': xr, 'YR': yr,\n 'IE': self.tc.t_IE.get_value(),\n 'S_gen': s_gen,\n 'S_gen_name': self.s_gen_name,\n 'R': r,\n 'Ips': self.Ips,\n 'FP': self.FP,\n 'quad_reg': self.quad_reg,\n 'quad_reg_mean': self.quad_reg_mean,\n 'drop_prob': self.drop_prob,\n 's_range': self.s_range,\n }", "def _init_dict(self):\n dict_ord = self.MIN_VALID\n\n for da in self.train_das:\n for dai in da:\n if dai.name not in self.dict_slot:\n self.dict_slot[dai.name] = dict_ord\n dict_ord += 1\n if dai.value not in self.dict_value:\n self.dict_value[dai.value] = dict_ord\n dict_ord += 1\n\n for tree in self.train_trees:\n for t_lemma, formeme in tree.nodes:\n if t_lemma not in self.dict_t_lemma:\n self.dict_t_lemma[t_lemma] = dict_ord\n dict_ord += 1\n if formeme not in self.dict_formeme:\n self.dict_formeme[formeme] = dict_ord\n dict_ord += 1\n\n self.dict_size = dict_ord", "def create_time_dicts(\n Spc:Dict) -> List[Dict]:\n abreviations = ['Kd','Km','Kq']\n daily_range = Spc['daily']\n monthly_range = Spc['monthly']\n quarterly_range = Spc['quarterly']\n all_ranges = np.cumsum([0,daily_range,monthly_range,quarterly_range])\n\n out_list = []\n for i,abrev in enumerate(abreviations):\n temp_dict = {}\n temp_dict['range'] = range(all_ranges[i],all_ranges[i+1])\n temp_dict['one'] = np.ones(Spc[abrev])\n temp_dict['w'] = np.arange(1,Spc[abrev]+1)/Spc[abrev]\n temp_dict['k'] = np.arange(1,Spc[abrev]+1)\n temp_dict['kk'] = Spc[abrev]\n out_list.append(temp_dict)\n\n return out_list", "def get_daily_energy_demand_houses(houses_dict, cfg):\n settings = cfg['settings']\n typtage_combinations = settings['typtage_combinations']\n houses_list = settings['houses_list_VDI']\n\n # Load the file containing the energy factors of the different typical\n # radiation year (TRY) regions, house types and 'typtage'. In VDI 4655,\n # these are the tables 10 to 24.\n # For the 'noarch' conda build, access the file as pkg resource object\n with pkg_resources.resource_stream('lpagg', cfg['data']['energy_factors']\n ) as resource:\n energy_factors_df = pd.read_excel(resource,\n sheet_name='Faktoren',\n index_col=[0, 1, 2])\n\n if settings.get('zero_summer_heat_demand', None) is not None:\n # Reduze the value of 'F_Heiz_TT' to zero.\n # For modern houses, this eliminates the heat demand in summer\n energy_factors_df.loc[(slice(None), slice(None), 'F_Heiz_TT'),\n ('SWX', 'SSX')] = 0\n\n # Create a new DataFrame with multiindex.\n # It has two levels of columns: houses and energy\n # The DataFrame stores the individual energy demands for each house in\n # each time step\n energy_demands_types = ['Q_Heiz_TT', 'W_TT', 'Q_TWW_TT']\n settings['energy_demands_types'] = energy_demands_types\n iterables = [houses_dict.keys(), energy_demands_types]\n multiindex = pd.MultiIndex.from_product(iterables, names=['house',\n 'energy'])\n daily_energy_demand_houses = pd.DataFrame(index=multiindex,\n columns=typtage_combinations)\n\n # Fill the DataFrame daily_energy_demand_houses\n for house_name in houses_list:\n house_type = houses_dict[house_name]['house_type']\n N_Pers = houses_dict[house_name]['N_Pers']\n N_WE = houses_dict[house_name]['N_WE']\n try:\n TRY = houses_dict[house_name]['TRY']\n except KeyError:\n raise KeyError('Key \"TRY\" (Region) missing from house '+house_name)\n\n # Savety check:\n if TRY not in energy_factors_df.index.get_level_values(0):\n logger.error('Error! TRY '+str(TRY)+' not contained in file ' +\n cfg['data']['energy_factors'])\n logger.error(' Skipping house \"'+house_name+'\"!')\n continue # 'Continue' skips the rest of the current for-loop\n\n # Get yearly energy demands\n Q_Heiz_a = houses_dict[house_name]['Q_Heiz_a']\n W_a = houses_dict[house_name]['W_a']\n Q_TWW_a = houses_dict[house_name]['Q_TWW_a']\n\n # (6.4) Do calculations according to VDI 4655 for each 'typtag'\n for typtag in typtage_combinations:\n F_Heiz_TT = energy_factors_df.loc[TRY, house_type,\n 'F_Heiz_TT'][typtag]\n F_el_TT = energy_factors_df.loc[TRY, house_type, 'F_el_TT'][typtag]\n F_TWW_TT = energy_factors_df.loc[TRY, house_type,\n 'F_TWW_TT'][typtag]\n\n Q_Heiz_TT = Q_Heiz_a * F_Heiz_TT\n\n if house_type == 'EFH':\n N_Pers_WE = N_Pers\n elif house_type == 'MFH':\n N_Pers_WE = N_WE\n\n W_TT = W_a * (1.0/365.0 + N_Pers_WE * F_el_TT)\n Q_TWW_TT = Q_TWW_a * (1.0/365.0 + N_Pers_WE * F_TWW_TT)\n\n if W_TT < 0:\n logger.warning('Warning: W_TT for '+house_name+' and ' +\n typtag + ' was negative, see VDI 4655 page 16')\n W_TT = W_a * (1.0/365.0 + N_Pers_WE * 0)\n\n if Q_TWW_TT < 0:\n logger.warning('Warning: Q_TWW_TT for '+house_name+' and ' +\n typtag + ' was negative, see VDI 4655 page 16')\n Q_TWW_TT = Q_TWW_a * (1.0/365.0 + N_Pers_WE * 0)\n\n # Write values into DataFrame\n daily_energy_demand_houses.loc[house_name,\n 'Q_Heiz_TT'][typtag] = Q_Heiz_TT\n daily_energy_demand_houses.loc[house_name,\n 'W_TT'][typtag] = W_TT\n daily_energy_demand_houses.loc[house_name,\n 'Q_TWW_TT'][typtag] = Q_TWW_TT\n\n# print(daily_energy_demand_houses)\n return daily_energy_demand_houses", "def get_data_lettings(self):\n return {\n 'search_type': SearchForm.SEARCH_TYPE_LETTING,\n 'min_price': '100',\n 'max_price': '200',\n 'location':'Test, Test',\n 'min_bedrooms': '5',\n 'property_type': str(PropertyTypeFactory().slug)\n }", "def formatdata(data,Params):\n\tmndata = dict()\n\talltrials = np.array([])\n\tfor k in range(len(Params[\"conditions\"])):\n\t\tconditionmean = data[0,k].mean(axis = 0)\n\t\tmndata.update({Params[\"conditions\"][k]: {'data' : data[0,k].mean(axis = 0), 'cmax' : conditionmean.max(), 'cmin' : conditionmean.min()}})\n\treturn mndata", "def test_data():\n return {\"David Andrews\" : [200.50, 400.00, 250.75],\n \"John Goodfellow\" : [25.00, 175.50],\n \"Mary Suzuki\" : [75.00, 125.00, 250.00],\n \"Bonney Lake\" : [500.50, 700.75, 500.25],\n \"DeMarcus Rollins\" : [155.00, 165.00]\n }", "def clean_extracted_data_details(dict_data, starting_date):\n stats_dataframe = pd.DataFrame.from_dict(dict_data)\n iso_year, iso_week = starting_date.isocalendar()[:2]\n stats_dataframe.drop(stats_dataframe.index[stats_dataframe.index == 'date_start'], inplace = True)\n stats_dataframe.drop(stats_dataframe.index[stats_dataframe.index == 'date_stop'], inplace = True)\n titles = pd.DataFrame(stats_dataframe.columns).T.rename(columns = pd.DataFrame(stats_dataframe.columns).T.loc[0])\n titles.index = ['campaign']\n stats_dataframe = titles.append(stats_dataframe)\n one_column = stats_dataframe.iloc[:, 0]\n for number in range(1, len(stats_dataframe.columns)):\n one_column = one_column.append(stats_dataframe.iloc[:, number])\n new_index = []\n for i, _ in enumerate(titles):\n for _, j in enumerate(stats_dataframe.index):\n new_index.append('{}_{}'.format(j, i))\n one_column.index = new_index\n return one_column.to_frame('{}-{}'.format(iso_year, iso_week)), stats_dataframe.index", "def setUp(self):\n\t\tself.app = app.test_client()\n\t\tself.app.test = True\n\t\tself.base_data = [\n\t\t\t\t\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"alta\",\n\t\t\t\t\"base_total\": 98,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"beaver mountain\",\n\t\t\t\t\"base_total\": 68,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"brian head\",\n\t\t\t\t\"base_total\": 40,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"brighton\",\n\t\t\t\t\"base_total\": 77,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"cherry peak\",\n\t\t\t\t\"base_total\": 44,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"deer valley\",\n\t\t\t\t\"base_total\": 67,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"eagle point\",\n\t\t\t\t\"base_total\": 19,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"nordic valley\",\n\t\t\t\t\"base_total\": 12,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"park city\",\n\t\t\t\t\"base_total\": 50,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"powder mountain\",\n\t\t\t\t\"base_total\": 51,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"snowbasin\",\n\t\t\t\t\"base_total\": 64,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"snowbird\",\n\t\t\t\t\"base_total\": 103,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"solitude\",\n\t\t\t\t\"base_total\": 75,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"sundance\",\n\t\t\t\t\"base_total\": 36,\n\t\t\t\t\"crawled_at\": \"2018-03-05\"\n\t\t\t\t}\n\t\t]\n\n\t\tself.twenty_four_hour_data = [\t\t\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"alta\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 20\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"beaver mountain\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 3\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"brian head\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 4\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"brighton\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 5\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"cherry peak\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 5\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"deer valley\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 5\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"eagle point\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 4\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"nordic valley\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 2\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"park city\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 7\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"powder mountain\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 10\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"snowbasin\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 7\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"snowbird\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 18\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"solitude\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 7\n\t\t\t\t},\n\t\t\t\t{\n\t\t\t\t\"area_name\": \"sundance\",\n\t\t\t\t\"crawled_at\": \"2018-03-05\",\n\t\t\t\t\"twenty_four_hour_total\": 4\n\t\t\t\t}\n\t\t]", "def create_data_model():\r\n data = {}\r\n data['distance_matrix'] = mtrx.create_distance_matrix(mtrx.create_data()) \r\n data['demands'] = clean.demands\r\n # Each location has a demand corresponding to the quantity—for example, \r\n # weight or volume—of the item to be picked up.\r\n data['vehicle_capacities'] = capacity\r\n # Each vehicle has a capacity: the maximum quantity that the vehicle can hold. \r\n # As a vehicle travels along its route, the total quantity of the items it is carrying \r\n # can never exceed its capacity.\r\n data['num_vehicles'] = number\r\n data['depot'] = 0\r\n return data", "def get_smmry_data(soup, game_dict):\n\n # Get date and time data.\n try:\n date_soup = soup.find(\"div\", {\"class\": \"spielbericht_tipp_status\"})\n league = date_soup.div.span.text\n date_string = date_soup.div.text\n date = re.search(r'\\d{2}.\\d{2}.\\d{2}', date_string).group(0)\n time = re.search(r'\\d{2}:\\d{2}', date_string).group(0)\n matchday = re.search(r'[|]\\d+', date_string).group(0)[1:]\n\n game_dict[\"league\"] = league\n game_dict[\"fb_date\"] = date\n game_dict[\"fb_time\"] = time\n game_dict[\"matchday\"] = matchday\n except AttributeError:\n pass\n\n # Get game result.\n try:\n result = soup.find(\"div\", {\"class\": \"stand\"}).text\n game_dict[\"result\"] = result\n except AttributeError:\n pass\n\n # Try to get the referee name.\n try:\n referee = soup.find(\"span\", {\"class\": \"schiri_link\"}).text\n game_dict[\"referee\"] = referee\n except AttributeError:\n pass\n\n # Get team, club name and repective url by team.\n try:\n smmry_soup = soup.find(\n \"div\", {\"class\": \"spielbericht_ergebnis_wrapper\"})\n club_title = smmry_soup.find_all(\"img\")\n team_title = smmry_soup.findAll(\"div\", {\"class\": \"teaminfo\"})\n\n # Loop through teams.\n for j, team in enumerate([\"home_\", \"away_\"]):\n game_dict[team + \"team\"] = team_title[j].a[\"title\"]\n game_dict[team + \"team_url\"] = team_title[j].a[\"href\"]\n game_dict[team + \"club\"] = club_title[j][\"title\"]\n except (AttributeError, TypeError):\n pass\n\n return game_dict", "def serialized_data(self):\n upcoming_shows = self.upcoming_shows\n past_shows = self.past_shows\n\n return {\n 'id': self.id,\n 'name': self.name,\n 'phone': self.phone,\n 'image_link': self.image_link,\n 'facebook_link': self.facebook_link,\n 'city': self.city.name,\n 'state': self.city.state_name,\n 'num_upcoming_shows': len(upcoming_shows),\n 'upcoming_shows_count': len(upcoming_shows),\n 'upcoming_shows': upcoming_shows,\n 'past_shows': past_shows,\n 'past_shows_count': len(past_shows),\n }", "def test_make_envs_dict(self):\r\n envs = make_envs_dict(self.l19_data, self.l19_sample_names,\r\n self.l19_taxon_names)\r\n for key in envs.keys():\r\n col_idx = self.l19_taxon_names.index(key)\r\n self.assertEqual(sum(envs[key].values()),\r\n self.l19_data[:, col_idx].sum())" ]
[ "0.5826197", "0.55371296", "0.5417445", "0.5337191", "0.52416694", "0.51694846", "0.5151711", "0.5091198", "0.5085503", "0.5074048", "0.5069187", "0.5063415", "0.50576264", "0.5021445", "0.50159806", "0.5006675", "0.4991931", "0.49876162", "0.49839446", "0.49812287", "0.49771714", "0.4976073", "0.49660462", "0.4920108", "0.49189234", "0.49144468", "0.49057344", "0.49013743", "0.48851863", "0.48794577", "0.4874787", "0.48725206", "0.48649722", "0.4862617", "0.4843661", "0.48422158", "0.4836905", "0.48342216", "0.48272362", "0.48242885", "0.4803854", "0.47859198", "0.4783136", "0.4780947", "0.4774142", "0.47732624", "0.47719684", "0.4768472", "0.47672066", "0.4758461", "0.4758057", "0.47574976", "0.47571838", "0.47537583", "0.47533932", "0.47532788", "0.4748903", "0.4747321", "0.47383043", "0.4731048", "0.471816", "0.4705787", "0.4704996", "0.47047663", "0.4702001", "0.46952632", "0.46863413", "0.46777782", "0.46723992", "0.46680152", "0.46679518", "0.4666979", "0.46620768", "0.46544614", "0.46516767", "0.4649256", "0.46477085", "0.46462062", "0.46402812", "0.4629526", "0.4627981", "0.46268702", "0.46234658", "0.46234334", "0.4620099", "0.46190557", "0.46186224", "0.46152994", "0.46149826", "0.46146262", "0.46111342", "0.46104008", "0.46082622", "0.4607455", "0.4606569", "0.46043572", "0.4603674", "0.4598378", "0.45931715", "0.4591333" ]
0.6585092
0
Slurp all the individual QA files, night by night Loops on nights, generating QANight objects along the way
def slurp_nights(self, make_frameqa=False, remove=True, restrict_nights=None, write_nights=False, **kwargs): log = get_logger() # Remake? if make_frameqa: self.make_frameqa(**kwargs) # Reset log.info("Resetting QA_Night objects") self.qa_nights = [] # Loop on nights for night in self.mexp_dict.keys(): if restrict_nights is not None: if night not in restrict_nights: continue qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir) qaNight.slurp(remove=remove) # Save nights self.qa_nights.append(qaNight) # Write? if write_nights: qaNight.write_qa_exposures()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_quasar(folder, set_type, doc_size):\n print(\"def process_quasar(folder, set_type, doc_size) ...\")\n\n # create counter for enumeration of batch-files\n counter = 0\n\n # Question File and Path\n question_file = set_type + \"_questions.json\"\n question_file_path = Path(\"/\".join([folder, \"questions\", question_file]))\n\n # Contexts File and Path\n context_file = set_type + \"_contexts.json\"\n context_file_path = Path(\"/\".join([folder, \"contexts\", doc_size, context_file]))\n\n with open(question_file_path, \"r\") as qf, open(context_file_path, \"r\") as cf:\n question_id_list = list()\n data_dict = dict()\n batches_data = list()\n\n # Parse each line separate to avoid memory issues\n for line in qf:\n parsed_question = json.loads(line)\n question_id = parsed_question[\"uid\"]\n question_id_list.append(question_id)\n data_dict[question_id] = {\"answer\": parsed_question[\"answer\"]}\n data_dict[question_id].update({\"question\": parsed_question[\"question\"]})\n\n # in order to create batches with the size of 30 and to avoid Memory Errors\n if len(data_dict) == 30:\n contexts_counter = 0\n for line2 in cf:\n parsed_answer = json.loads(line2)\n # Answer ID should have a corresponding question ID\n answer_id = parsed_answer[\"uid\"]\n if answer_id in question_id_list:\n contexts_counter += 1\n # List of contexts with retrieval scores, contexts are sorted from highest to lowest score\n answer_contexts = parsed_answer[\"contexts\"]\n # remove scores of contexts\n cleaned_answer_contexts = [ls_elem[1] for ls_elem in answer_contexts]\n data_dict[answer_id].update({\"contexts\": cleaned_answer_contexts})\n if contexts_counter == 30:\n contexts_counter = 0\n break\n\n # add information where answer in context is\n answers_list, questions_list, contexts_list = add_end_idx(data_dict)\n\n # create the batch-encodings\n batches_data.append(create_encodings(answers_list, questions_list, contexts_list))\n data_dict.clear()\n question_id_list.clear()\n # if len(batches_data) % 1000 == 0:\n\n print(\"\\n length batches_data \" + str(len(batches_data)) + \" \" + str(counter))\n\n if len(batches_data) == 2000:\n counter += 1\n save_batch_files(\"/local/anasbori/bert_odqa/ODQA_Bert_Project/batch_output\", batches_data,\n counter)\n\n batches_data.clear()\n\n counter += 1\n save_batch_files(Path(\"/local/anasbori/bert_odqa/ODQA_Bert_Project/batch_output\"), batches_data, counter)", "def test_reader(qn_filepath, answers_dirpath):\n qns = get_questions(qn_filepath)\n for qn in qns:\n if qn.qid == 100:\n q = qn\n break\n assert q\n docs = get_documents(answers_dirpath, q.qid)\n print docs\n print docs[0].content", "def _collect_quizzes():\n data_path = join(dirname(abspath(__file__)), 'data')\n for _, _, filenames in os.walk(data_path):\n for filename in filenames:\n if filename.endswith('.yml'):\n quiz_type = filename.replace('.yml', '').capitalize()\n QUIZ_DICT[quiz_type] = []\n with open(join(data_path, filename), encoding='utf-8') as f:\n data = yaml.load(f)\n for class_name, settings in data.items():\n Q = type(class_name, (Quiz, ), settings)\n QUIZ_DICT[quiz_type].append(Q)\n QUIZ_DICT_FLAT[class_name] = Q", "def __init__(self, specprod_dir=None, **kwargs):\n if specprod_dir is None:\n specprod_dir = specprod_root()\n self.specprod_dir = specprod_dir\n # Init\n QA_MultiExp.__init__(self, specprod_dir=specprod_dir, **kwargs)\n # Load up exposures for the full production\n nights = get_nights(specprod_dir=self.specprod_dir)\n for night in nights:\n self.mexp_dict[night] = {}\n for exposure in get_exposures(night, specprod_dir = self.specprod_dir):\n # Object only??\n frames_dict = get_files(filetype = str('frame'), night = night,\n expid = exposure, specprod_dir = self.specprod_dir)\n self.mexp_dict[night][exposure] = frames_dict\n # Output file names\n self.qaexp_outroot = self.qaprod_dir+'/'+self.prod_name+'_qa'\n # Nights list\n self.qa_nights = []", "def OBQAFacts():\n download_dataset(Collection.ALLEN_AI_OBQA, check_shallow_integrity)\n facts_file = os.path.join(\n OBQA_CACHE_DIR, \"OpenBookQA-V1-Sep2018\",\n \"Data\", \"Main\", \"openbook.txt\"\n )\n with open(facts_file, \"rt\") as f:\n for line in f:\n fact = line.strip(string.whitespace + \"\\\"\")\n if len(fact) > 0:\n yield fact", "def __call__(self, path):\n\n # Iterates through a directory of raw sources and builds staging databases\n databases = self.process(path)\n\n # Output database file\n qafile = os.path.join(path, \"questions.db\")\n\n # Build consolidated SQLite questions database\n db2qa = DB2QA()\n db2qa(databases, qafile)", "def main():\n now = time.strftime('%Y%m%d%H%M%S')\n\n # info = get_info(now)\n # info_filename = 'info_' + now + '.csv'\n # info.to_csv(os.path.join('..', '..', 'data', 'raw', info_filename), index=False)\n\n questions = get_questions(now)\n\n # don't talk about all this detail in the talk", "def wackydocs():\n for i, fil in enumerate(WACKYFILES):\n print >> sys.stderr, \"Reading wackypedia file %s %s...\" % (fil, common.str.percent(i+1, len(WACKYFILES)))\n print >> sys.stderr, stats()\n for j, doc in enumerate(wackydocs_in_file(fil)):\n if j % 10000 == 0:\n print >> sys.stderr, \"Reading wackypedia file %s %s, document #%d...\" % (fil, common.str.percent(i+1, len(WACKYFILES)), j)\n print >> sys.stderr, stats()\n yield doc", "def main():\n for tourney in tourneys:\n read_tourney(tourney)\n with open(\"obj/tournaments.pkl\", \"wb\") as f:\n pickle.dump(tournaments, f, pickle.HIGHEST_PROTOCOL)\n with open(\"obj/participants.pkl\", \"wb\") as f:\n pickle.dump(raw_participants, f, pickle.HIGHEST_PROTOCOL)\n with open(\"obj/matches.pkl\", \"wb\") as f:\n pickle.dump(all_matches, f, pickle.HIGHEST_PROTOCOL)", "def build_data(self):\n from desiutil.io import combine_dicts\n # Loop on exposures\n odict = {}\n for qanight in self.qa_nights:\n for qaexp in qanight.qa_exps:\n # Get the exposure dict\n idict = write_qa_exposure('foo', qaexp, ret_dict=True)\n odict = combine_dicts(odict, idict)\n # Finish\n self.data = odict", "def load_fhwa_records():\n print('--- Importing FHWA DFLTD v.2 records ---')\n for i in tqdm(range(len(tbl_project))):\n prj_id = tbl_project['lng_KeyProject'][i]\n\n expl_in_project = tbl_exploration[\n tbl_exploration.lng_KeyProject == prj_id].index\n for i_exp in expl_in_project:\n expl_id = tbl_exploration['txt_KeyExplorationName'][i_exp]\n\n piles_in_project = tbl_deepfoundation[\n tbl_deepfoundation.lng_KeyProject == prj_id].index\n for i_pile in piles_in_project:\n pile_id = tbl_deepfoundation['lng_KeyDeepFoundation'][i_pile]\n\n tests_for_pile = tbl_loadtest[\n (tbl_loadtest.lng_KeyProject == prj_id) &\n (tbl_loadtest.lng_KeyDeepFoundation == pile_id)\n ].index\n for i_lt in tests_for_pile:\n test_id = tbl_loadtest['lng_KeyLoadTest'][i_lt]\n\n # -- Adding Project Data -------------------------------- #\n if len(piles_in_project) > 1 and len(expl_in_project) < 2:\n wrn = 'Expanded from a project with multiple piles '\\\n 'and/or retests'\n prj = add_loc_proj(i, wrn)\n elif len(piles_in_project) < 2 and len(expl_in_project) > 1:\n wrn = 'Expanded from a project with multiple '\\\n 'explorations'\n prj = add_loc_proj(i, wrn)\n elif len(piles_in_project) > 1 and len(expl_in_project) > 1:\n wrn = 'Expanded from a project with multiple '\\\n 'explorations and multiple piles/retests'\n prj = add_loc_proj(i, wrn)\n else:\n prj = add_loc_proj(i)\n db.session.add(prj)\n\n # -- Adding Exploration Data ---------------------------- #\n exploration = add_expl_data(i_exp, expl_id, prj)\n db.session.add(exploration)\n\n # -- Adding Layer Data ---------------------------------- #\n add_layer_data(prj_id, expl_id, exploration)\n\n # -- Adding Pile Data ----------------------------------- #\n pile = add_pile_data(i_pile, prj_id, pile_id, prj)\n db.session.add(pile)\n\n # -- Adding Load Test Data ------------------------------ #\n load_test = add_load_test_data(i_lt, pile)\n db.session.add(load_test)\n\n # -- Adding Static Test Data ---------------------------- #\n add_static_test_data(prj_id, pile_id, test_id, load_test)\n\n # -- Adding Interpreted Data ---------------------------- #\n add_interp_data(prj_id, pile_id, test_id, load_test)\n\n db.session.commit()", "def extractquestions(xml_dir, output_path):\n # submissions : the form data submitted from the twentyquestions\n # HITs as a list of dictionaries mapping the question identifiers\n # to the free text, i.e.:\n #\n # [{'gameRoomJson': game_room_json_string}, ...]\n #\n submissions = _utils.extract_xml_dir(xml_dir)\n\n # extract the rows from the game room jsons\n row_strs = set()\n for submission in submissions:\n data = json.loads(submission['gameRoomJson'])\n\n # generate all the subject-question-answer triples created\n # during the game.\n subject = data['game']['round']['subject']\n for questionAndAnswer in data['game']['round']['questionAndAnswers']:\n # use an OrderedDict so the keys appear in the right order\n # in the JSON.\n row = collections.OrderedDict([\n ('subject', subject),\n ('question', questionAndAnswer['question']['questionText']),\n ('answer', questionAndAnswer['answer']['answerValue'])\n ])\n row_strs.add(json.dumps(row))\n\n # write out the data\n with click.open_file(output_path, 'w') as output_file:\n output_file.write('\\n'.join(sorted(row_strs)))", "def organise_qa_output(metadata, base_dir, write_tag):\n filenames = metadata['FITSImageFilename']\n for i, fits_file in enumerate(filenames):\n kat_target = katpoint.Target(metadata['KatpointTargets'][i])\n\n # Move QA report and create metadata\n pb_filebase = os.path.splitext(fits_file)[0] + '_PB'\n qa_report = pb_filebase + '_continuum_validation_snr5.0_int'\n pb_dir = _productdir(metadata, base_dir, i, '_PB', write_tag)\n\n qa_dir = _productdir(metadata, base_dir, i, '_QA', write_tag)\n os.mkdir(qa_dir)\n os.rename(os.path.join(pb_dir, qa_report), qa_dir)\n make_report_metadata(metadata, qa_dir)\n\n # Move RMS image and create metadata\n rms_dir = _productdir(metadata, base_dir, i, '_RMS', write_tag)\n os.mkdir(rms_dir)\n rms_image = pb_filebase + '_aegean_rms'\n mean_pb_rms = _calc_rms(os.path.join(pb_dir, rms_image + FITS_EXT))\n\n make_image_metadata(metadata, '_PB', pb_dir, i,\n 'Continuum Image PB corrected',\n 'Continuum image PB corrected',\n mean_pb_rms)\n\n os.rename(os.path.join(pb_dir, rms_image + FITS_EXT),\n os.path.join(rms_dir, rms_image + FITS_EXT))\n _add_missing_axes(os.path.join(rms_dir, rms_image + FITS_EXT))\n _caption_pngs(rms_dir, rms_image, kat_target, 'RMS PB Corrected')\n make_image_metadata(metadata, '_PB_aegean_rms', rms_dir, i,\n 'Continuum PB Corrected RMS Image',\n 'Continuum PB Corrected RMS image',\n mean_pb_rms)\n\n # Move MEAN image and create metadata\n bkg_dir = _productdir(metadata, base_dir, i, '_BKG', write_tag)\n os.mkdir(bkg_dir)\n bkg_image = pb_filebase + '_aegean_bkg'\n os.rename(os.path.join(pb_dir, bkg_image + FITS_EXT),\n os.path.join(bkg_dir, bkg_image + FITS_EXT))\n _add_missing_axes(os.path.join(bkg_dir, bkg_image + FITS_EXT))\n _caption_pngs(bkg_dir, bkg_image, kat_target, 'MEAN PB Corrected')\n make_image_metadata(metadata, '_PB_aegean_bkg', bkg_dir, i,\n 'Continuum PB Corrected Mean Image',\n 'Continuum PB Corrected Mean image',\n mean_pb_rms)\n\n # Remove .writing tag\n dir_list = [pb_dir, qa_dir, rms_dir, bkg_dir]\n for product_dir in dir_list:\n os.rename(product_dir, os.path.splitext(product_dir)[0])", "def main():\r\n\r\n directory = 'D:\\\\Profession\\\\Intern\\\\Assignments\\\\Codes\\\\Assignement Codes\\\\Part 2\\\\data_dumps'\r\n path = os.path.join(directory, 'dump_3')\r\n if not (os.path.exists(path)):\r\n os.mkdir(path)\r\n\r\n for date in range(1, 31):\r\n # date-month-year\r\n # file_name1 = path + '\\\\' + str(date) + '-8-2020' + '_file1.txt'\r\n\r\n # year-month-date\r\n # file_name1 = path + '\\\\' + '2020-08-' + str(date) + '_file3.txt'\r\n\r\n # month_year_date\r\n file_name1 = path + '\\\\' + 'Aug_2020_' + str(date) + '_file5.txt'\r\n\r\n # date-month-year\r\n # file_name2 = path + '\\\\' + str(date) + '-8-2020' + '_file2.txt'\r\n\r\n # year-month-date\r\n # file_name2 = path + '\\\\' + '2020-08-' + str(date) + '_file4.txt'\r\n\r\n # month_year_date\r\n file_name2 = path + '\\\\' + 'Aug_2020_' + str(date) + '_file6.txt'\r\n\r\n rows = []\r\n for row in range(100):\r\n string = 'asddfgfhgkhjghkweoriuywoipywbnxvnmznvnmbatr'\r\n rows.append(string)\r\n with open(file_name1, 'w') as f1, open(file_name2, 'w') as f2:\r\n f1.writelines(rows)\r\n f2.writelines(rows)", "def qa_test():\r\n # Reads Code and Runs Code Metrics\r\n with open(\"BrainDataVisualiser.py\",\"r\") as file:\r\n code = file.read()\r\n with open(\"QA_LOGS.txt\",\"a\") as file:\r\n # Timestamp and append metric results to log\r\n file.write(datetime.date.today().strftime(\"%b-%d-%Y\")+\"\\n\\t\")\r\n file.write(\"General Analysis\\n\\t\\t\")\r\n file.write(str(analyze(code))+\"\\n\\t\")\r\n file.write(\"Cyclomatic Complexity\\n\")\r\n for i in cc_visit(code):\r\n file.write(\"\\t\\t\"+cc_rank(i.complexity)+\" \"+str(i)+\"\\n\")", "def run(self):\n if not os.path.exists(self.output_folder):\n os.makedirs(self.output_folder)\n for entry in glob.glob(os.path.join(self.data_folder, self.data_expression)):\n f = open(entry)\n text = json.loads(f.read())\n f.close()\n self.create_page_objects(text)", "def main():\n courses = set()\n browser = create_browser()\n for k, v in COURSE_INDICES.items():\n browser.get(v)\n wait_for_load(browser)\n\n quarters = extract_quarters(browser)\n i = 0\n try:\n while i < len(quarters.options):\n load_quarter(browser, quarters, quarters.options[i])\n courses |= extract_courses(browser, k)\n i += 1\n except selenium.common.exceptions.WebDriverException as e:\n # Every so often the web application fails to load course descriptions,\n # resulting in stale or invisible node references. When these occur,\n # refresh the page and try extracting courses from the quarter again.\n logging.warning(\n 'An error occurred extracting courses from %s Seattle College\\n%s',\n titlecase.titlecase(k), e)\n quarters = extract_quarters(browser)\n\n output = io.StringIO()\n export_courses(courses, output)\n print(output.getvalue())\n output.close()\n\n browser.quit()", "def main():\n # %%\n CFG.profiles_yamls_path.mkdir(parents=True, exist_ok=True)\n fpaths = list( _Config.raw_profiles_path.glob('*.html') )\n print( f'{len(fpaths)} htmls found' )\n # %%\n fpath = CFG.raw_profiles_path / 'luis-mario-urrea-murillo.html'\n # %%\n fpath = CFG.raw_profiles_path / 'cristian-david-montoya-saldarriaga-09638514a.html'\n # %%\n fpaths = [ CFG.raw_profiles_path / 'ricardo-alarcon-44079b105.html' ]\n # %%\n fpaths = [ Path('/home/teo/_data/talent/linkedin_raw_profiles/israellaguan.html')]\n # %%\n dics = {}\n # %%\n\n for i, fpath in enumerate(fpaths):\n if fpath in dics:\n continue\n\n with fpath.open('rt') as f_in:\n html = f_in.read()\n\n print( f'\\n***{i+1}/{len(fpaths)} {fpath.name}:')\n dic = extract_one( html, fpath )\n dic['linkedin_url'] = f\"https://www.linkedin.com/in/{fpath.name.split('.')[0]}\"\n dic['scraped_at'] = dt.datetime.fromtimestamp( fpath.stat().st_ctime )\n # pprint(dic['work_stats'])\n dics[fpath] = dic\n\n dics_arr = list(dics.values())\n # %%\n del dics\n # %%\n\n with (CFG.profiles_yamls_path / 'all_profiles.json').open('wt') as f_out:\n json.dump( dics_arr, f_out, cls=DateTimeEncoder, indent=4 )\n # %%\n with (CFG.profiles_yamls_path / 'all_profiles.yaml').open('wt') as f_out:\n yaml.safe_dump( dics_arr, f_out )\n # %%\n df = produce_summary_table( dics_arr )\n df.to_excel( CFG.raw_profiles_path.parent / 'mined_ruby_candidates_sample.xlsx',\n index=False)\n # %%", "def test_AFQ_data():\n _, bids_path, _ = get_temp_hardi()\n\n for mapping in [SynMap(use_prealign=False), AffMap()]:\n myafq = api.AFQ(\n bids_path=bids_path,\n dmriprep='vistasoft',\n mapping=mapping)\n npt.assert_equal(nib.load(myafq.b0[\"01\"]).shape,\n nib.load(myafq.dwi_file[\"01\"]).shape[:3])\n npt.assert_equal(nib.load(myafq.b0[\"01\"]).shape,\n nib.load(myafq.dti_params[\"01\"]).shape[:3])\n myafq.rois\n shutil.rmtree(op.join(\n bids_path,\n 'derivatives/afq'))", "def release_qa():\n lines = StringIO.StringIO(local('find . -name \"*.py\"', capture=True))\n for line in lines.readlines():\n print \"PYLINT CHECK\"\n print \"-----------------------\"\n pyfile = os.path.normpath(line).replace(\"\\n\",\"\").replace(\"\\r\",\"\")\n \n reportfilename = pyfile.replace(\"./\", \"\").replace(\"/\", \"_\").replace(\".py\", \".txt\")\n reportpath = os.path.join(\"qa\", \"pylint\", reportfilename)\n\n options = {\"pyfile\":pyfile, \"reportpath\": reportpath}\n command = \"pylint %(pyfile)s > %(reportpath)s\" % options \n _subexec(command) \n\n print \"PEP8 CHECK\"\n print \"-----------------------\"\n reportpath = os.path.join(\"qa\", \"pep8\", reportfilename)\n options['reportpath'] = reportpath\n command = \"pep8 %(pyfile)s > %(reportpath)s\" % options\n _subexec(command)", "def __next__(self):\n # ++++ Get Next Four Lines ++++\n elemList = []\n for i in range(4):\n line = self._file.readline()\n self._currentLineNumber += 1 ## increment file position\n if line:\n elemList.append(line.strip('\\n'))\n else: \n elemList.append(None)\n \n # ++++ Check Lines For Expected Form ++++\n trues = [bool(x) for x in elemList].count(True)\n nones = elemList.count(None)\n # -- Check for acceptable end of file --\n if nones == 4:\n raise StopIteration\n # -- Make sure we got 4 full lines of data --\n assert trues == 4,\\\n \"** ERROR: It looks like I encountered a premature EOF or empty line.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n # -- Make sure we are in the correct \"register\" --\n assert elemList[0].startswith(self._hdSyms[0]),\\\n \"** ERROR: The 1st line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[0],self._currentLineNumber) \n assert elemList[2].startswith(self._hdSyms[1]),\\\n \"** ERROR: The 3rd line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[1],self._currentLineNumber) \n # -- Make sure the seq line and qual line have equal lengths --\n assert len(elemList[1]) == len(elemList[3]), \"** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber) \n \n # ++++ Return fatsQ data as tuple ++++\n return tuple(elemList)", "def load_questions(self, verbose=True):\n for question in self.question_list:\n question.load_question(self.data)", "def write_to_databse(fileName):\n f = open(fileName)\n queries = eval(open(fileName).read())\n for q in queries:\n site.write(q)\n print \"Quries are saved:)\"", "def make_qa_report(metadata, base_dir, write_tag):\n # Change directory as QA code writes output directly to the running directory\n work_dir = os.getcwd()\n\n filenames = metadata['FITSImageFilename']\n for i, fits_file in enumerate(filenames):\n pb_dir = _productdir(metadata, base_dir, i, '_PB', write_tag)\n pb_filebase = os.path.splitext(fits_file)[0] + '_PB'\n\n log.info('Write QA report output')\n os.chdir(pb_dir)\n pb_fits = os.path.join(pb_dir, pb_filebase + FITS_EXT)\n command = '/home/kat/valid/Radio_continuum_validation -I {} --telescope MeerKAT -F'\\\n ' /home/kat/valid/filter_config_MeerKAT.txt -r'.format(pb_fits)\n sysarg = shlex.split(command)\n with log_qa(log):\n rcv.main(sysarg[0], sysarg[1:])\n os.chdir(work_dir)", "def load_data(self, inroot=None):\n self.data = {}\n # Load\n for night in self.mexp_dict.keys():\n qaNight = QA_Night(night, specprod_dir=self.specprod_dir, qaprod_dir=self.qaprod_dir)\n qaNight.load_data()\n #\n self.data[night] = qaNight.data[night]", "def readQrels(fileName):\n ln = 0\n res = []\n\n with open(fileName) as f:\n for line in tqdm(f, desc='loading qrels (by line)', leave=False):\n ln += 1\n line = line.strip()\n if not line:\n continue\n try:\n e = parseQrelEntry(line)\n res.append(e)\n except:\n raise Exception('Error parsing QRELs in line: %d' % ln)\n\n return res", "def next(self):\n # ++++ Get Next Four Lines ++++\n elemList = []\n for i in range(4):\n line = self._file.readline()\n self._currentLineNumber += 1 ## increment file position\n if line:\n elemList.append(line.strip('\\n'))\n else:\n elemList.append(None)\n \n # ++++ Check Lines For Expected Form ++++\n trues = [bool(x) for x in elemList].count(True)\n nones = elemList.count(None)\n # -- Check for acceptable end of file --\n if nones == 4:\n raise StopIteration\n # -- Make sure we got 4 full lines of data --\n assert trues == 4,\\\n \"** ERROR: It looks like I encountered a premature EOF or empty line.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n # -- Make sure we are in the correct \"register\" --\n assert elemList[0].startswith(self._hdSyms[0]),\\\n \"** ERROR: The 1st line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[0],self._currentLineNumber)\n assert elemList[2].startswith(self._hdSyms[1]),\\\n \"** ERROR: The 3rd line in fastq element does not start with '%s'.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._hdSyms[1],self._currentLineNumber)\n # -- Make sure the seq line and qual line have equal lengths --\n assert len(elemList[1]) == len(elemList[3]), \"** ERROR: The length of Sequence data and Quality data of the last record aren't equal.\\n\\\n Please check FastQ file near line number %s (plus or minus ~4 lines) and try again**\" % (self._currentLineNumber)\n \n # ++++ Return fatsQ data as tuple ++++\n return tuple(elemList)", "def load_ditz_issues(where_they_live):\n\n if not os.path.isdir(where_they_live):\n raise ValueError(\"Sorry, %s must be a directory.\")\n\n for issue_file in \\\n glob(os.path.join(where_they_live, 'issue-*.yaml')):\n\n yield yaml.load(open(issue_file))", "def _generate_examples(self, filepath):\n logger.info(\"generating examples from = %s\", filepath)\n with open(filepath, encoding=\"utf-8\") as f:\n qasper = json.load(f)\n for id_ in qasper:\n qasper[id_][\"id\"] = id_\n yield id_, qasper[id_]", "def main():\n exit_if_already_started()\n while True:\n for timeframe in ['all', 'month', 'week']:\n subreddits = load_list('subs.txt')\n while subreddits:\n # Grab all images/comments from sub, remove from list\n parse_subreddit(subreddits.pop(0), timeframe)", "def add_all_exercises(exam_date, path_all, path_collection):\n type_list = [x for x in os.listdir(path_collection) if '.DS_Store' not in x]\n print(type_list)\n for i in range(len(type_list)):\n print('Type: ' + type_list[i])\n os.mkdir(path_all + '/' + type_list[i])\n path_type = path_collection + '/' + type_list[i]\n nb_ex_type = len(os.listdir(path_type)) # indexing file da 0\n for j in range(nb_ex_type):\n chosen_type_yaml = path_type + '/' + type_list[i] + str(j) + '.yaml'\n if j+1>=9:\n path_ex = path_all + '/' + type_list[i] + '/istanza_' + str(j+1)\n else:\n path_ex = path_all + '/' + type_list[i] + '/istanza_0' + str(j+1)\n print(path_ex)\n os.mkdir(path_ex)\n mode1.create_exercise(exam_date, str(j+1), path_ex, chosen_type_yaml)\n #mode2.create_exercise(str(i+1), path_ex, chosen_type_yaml)\n #mode3.create_exercise(str(i+1), path_ex, chosen_type_yaml)\n print('Exercise ' + str(j+1) + ' added')\n return", "def dump_qa(self):\n #- QA level outputs\n #qa_outfile = {}\n qa_outfig = {}\n for PA in self.palist:\n for QA in self.qalist[PA]:\n #qa_outfile[QA] = self.io_qa(QA)[0]\n qa_outfig[QA] = self.io_qa(QA)[1]\n \n #- make path if needed\n path = os.path.normpath(os.path.dirname(qa_outfig[QA]))\n if not os.path.exists(path):\n os.makedirs(path)\n\n return (qa_outfig)", "def extract_json_to_files(input_dir,output_dir):\n files={}\n files['train']='train-v1.1.json'\n files['dev']='dev-v1.1.json'\n\n for file in files:\n filename=os.path.join(input_dir,files[file])\n with open(filename,'r',encoding='utf-8') as data_file:\n examples = []\n dataset=json.load(data_file)\n count_total=total_exs(dataset)\n count_mapping_problem=0\n count_token_problem=0\n count_ansspan_problem=0\n count_examples=0\n for article_id in tqdm(range(len(dataset['data'])), desc=\"Preprocessing {}\".format(file)):\n article_paragraph=dataset['data'][article_id]['paragraphs']\n for paragraph_id in range(len(article_paragraph)):\n context=article_paragraph[paragraph_id]['context']\n context=context.replace(\"''\",'\"').replace(\"``\",'\"')\n context = context.replace('\\u3000', ' ').replace('\\u202f',' ').replace('\\u2009', ' ')#.replace(\"'\",\"'\")\n context=context.replace('\\-',' ')\n context_tokens=tokenize_sequence(context)\n context=context.lower()\n qas=article_paragraph[paragraph_id]['qas']\n charloc2wordloc=get_char_word_loc_mapping(context, context_tokens)\n if charloc2wordloc is None:\n count_mapping_problem+=len(qas)\n continue\n for qa in qas:\n question=qa['question'].lower()\n question_tokens=tokenize_sequence(question)\n\n ans_text=qa['answers'][0]['text'].lower()\n ans_text=ans_text.replace('\\u3000', ' ').replace('\\u202f', ' ').replace('\\u2009', ' ')\n ans_start_loc=qa['answers'][0]['answer_start']\n if qa['id'] in ['5706baed2eaba6190074aca5','57269c73708984140094cbb5','57269c73708984140094cbb7','572a11661d04691400779721','572a11661d04691400779722','572a11661d04691400779723','572a11661d04691400779724','572a11661d04691400779725','572a2cfc1d0469140077981b','572a3a453f37b319004787e9','572a84d3f75d5e190021fb3c']:\n ans_start_loc+=1\n if qa['id'] in ['572a5df77a1753140016aedf','572a5df77a1753140016aee0','572a84d3f75d5e190021fb38','572a84d3f75d5e190021fb39','572a84d3f75d5e190021fb3a','572a84d3f75d5e190021fb3b','572a85df111d821400f38bad','572a85df111d821400f38bae','572a85df111d821400f38baf','572a85df111d821400f38bb0']:\n ans_start_loc+=2\n if qa['id'] in ['572a5df77a1753140016aee1','572a5df77a1753140016aee2']:\n ans_start_loc+=3\n if qa['id'] in ['57286bf84b864d19001649d6','57286bf84b864d19001649d5']:\n ans_start_loc-=1\n if qa['id'] in ['5726bee5f1498d1400e8e9f3','5726bee5f1498d1400e8e9f4']:\n ans_start_loc-=2\n ans_end_loc=ans_start_loc+len(ans_text)\n\n if context[ans_start_loc:ans_end_loc]!=ans_text:\n count_ansspan_problem+=1\n continue\n ans_start_wordloc = charloc2wordloc[ans_start_loc][1] # answer start word loc\n ans_end_wordloc = charloc2wordloc[ans_end_loc-1][1] # answer end word loc\n assert ans_start_wordloc <= ans_end_wordloc\n\n ans_tokens = context_tokens[ans_start_wordloc:ans_end_wordloc + 1]\n if \"\".join(ans_tokens) != \"\".join(ans_text.split()):\n count_token_problem += 1\n #print(ans_text)\n #print(ans_tokens)\n continue # skip this question/answer pair\n examples.append((' '.join(context_tokens),' '.join(question_tokens),' '.join(ans_tokens),' '.join([str(ans_start_wordloc),str(ans_end_wordloc)])))\n print(\"Number of (context, question, answer) triples discarded due to char -> token mapping problems: \", count_mapping_problem)\n print(\"Number of (context, question, answer) triples discarded because character-based answer span is unaligned with tokenization: \",count_token_problem)\n print(\"Number of (context, question, answer) triples discarded due character span alignment problems (usually Unicode problems): \",count_ansspan_problem)\n print(\"Processed %i examples of total %i\\n\" % (len(examples), len(examples)+count_mapping_problem+count_token_problem+count_ansspan_problem))\n indices = list(range(len(examples)))\n np.random.shuffle(indices)\n with open(os.path.join(output_dir,file+'.context'),'w',encoding='utf-8') as context_file, \\\n open(os.path.join(output_dir,file+'.question'),'w',encoding='utf-8') as question_file, \\\n open(os.path.join(output_dir,file+'.answer'),'w',encoding='utf-8') as answer_file, \\\n open(os.path.join(output_dir,file+'.span'),'w',encoding='utf-8') as span_file:\n for i in indices:\n (context,question,answer,span)=examples[i]\n context_file.write(context+'\\n')\n question_file.write(question+'\\n')\n answer_file.write(answer+'\\n')\n span_file.write(span+'\\n')", "def main():\n\n # Run all the requirements for part A\n ##############################\n # Question 3\n # runs naive A*\n question_3()\n\n ##############################\n # Question 5\n # runs online A*\n question_5()\n\n ##############################\n # Question 7\n # runs online A* on fine grid\n question_7()\n\n\n # Run all the requirements for part B\n ##############################\n # Question 9\n question_9()\n\n ##############################\n # Question 10\n question_10()\n\n ##############################\n # Question 1\n question_11()", "def main():\n LESSONS_PATH = os.path.join(LESSON_LOCATOR_DATA, LESSON_SETS[0])\n ORIGINAL_LESSONS_PATH = os.path.join(LESSONS_PATH, \"original\")\n ANNOTATED_LESSONS_PATH = os.path.join(LESSONS_PATH, \"annotated\")\n\n if not os.path.exists(ANNOTATED_LESSONS_PATH):\n os.mkdir(ANNOTATED_LESSONS_PATH)\n\n print(\"Scanning original lessons in %s...\" % ORIGINAL_LESSONS_PATH)\n\n for item in os.listdir(ORIGINAL_LESSONS_PATH):\n if item == \".DS_Store\": continue\n\n print(\" found: %s\" % item)\n\n item_path = os.path.join(ORIGINAL_LESSONS_PATH, item)\n\n lesson_number = None\n lesson_description = None\n mobj = re.search(r'^AY\\s+(\\d+)\\s*-\\s*(.+)\\.txt$', item)\n if mobj:\n lesson_number = mobj.group(1)\n lesson_description = mobj.group(2)\n\n print(\" number: %s\" % lesson_number)\n print(\" description: %s\" % lesson_description)\n\n lesson = dict()\n lesson['number'] = lesson_number\n lesson['description'] = lesson_description\n\n fh = open(item_path)\n lesson_raw_text = fh.read()\n fh.close()\n lesson_text = re.split(r'\\n', lesson_raw_text)\n# lesson_raw_text_reencoded = lesson_raw_text.decode('mac-roman').encode('utf-8')\n# lesson_text = re.split(r'\\n', lesson_raw_text_reencoded)\n\n lesson['text'] = lesson_text\n lesson['parsed'] = parseLesson(lesson_text)\n\n if lesson['parsed']['end_of_lesson'] is None:\n print(\" lesson has no 'end of lesson' marker\")\n\n lesson_json = json.dumps(lesson, indent=4)\n annotated_lesson_path = os.path.join(ANNOTATED_LESSONS_PATH, \"ay_%04d.json\" % int(lesson_number))\n fh = open(annotated_lesson_path, \"w\")\n fh.write(lesson_json)\n fh.close()\n\n else:\n print(\"ERROR: File name not understood: %s\" % item)\n\n return 0", "def _read_analogies(self):\n questions = []\n questions_skipped = 0\n with open(self._options.eval_data, \"rb\") as analogy_f:\n for line in analogy_f:\n if line.startswith(\":\"): # Skip comments.\n continue\n words = line.strip().lower().split(\" \")\n # print words\n ids = [self._cate2id.get(w.strip()) for w in words]\n # print ids\n if None in ids or len(ids) != 4:\n questions_skipped += 1\n else:\n questions.append(np.array(ids))\n print(\"Eval analogy file: \", self._options.eval_data)\n print(\"Questions: \", len(questions))\n print(\"Skipped: \", questions_skipped)\n questions = np.array(questions, dtype=np.int32)\n self._analogy_questions = questions\n self._target_field = np.array(\n list(set(questions[:, 3])), dtype=np.int32)\n np.random.shuffle(self._analogy_questions)", "def parse2016(filename, qdict, cdict):\n \n tree = ET.parse(filename)\n root = tree.getroot()\n\n for child in root:\n # Each child represents a new (original question, related question) pair\n orgq_id = child.attrib[\"ORGQ_ID\"]\n relq_id = child[2].attrib[\"THREAD_SEQUENCE\"]\n orgq_comment = []\n relq_comment = []\n # get orgq_comment, relq_comment\n orgq_subject = child[0].text if child[0].text != None else \"\"\n orgq_body = child[1].text if child[1].text != None else \"\"\n DUPLICATE = True if \"SubtaskA_Skip_Because_Same_As_RelQuestion_ID\" in child[2].attrib else False \n for rel in child[2]:\n if rel.tag == \"RelQuestion\":\n relq_subject = rel[0].text if rel[0].text != None else \"\"\n relq_body = rel[1].text if rel[1].text != None else \"\"\n elif rel.tag == \"RelComment\":\n c_text = rel[0].text\n orgq_c_label = rel.attrib[\"RELC_RELEVANCE2ORGQ\"]\n orgq_comment.append((c_text, orgq_c_label))\n relq_c_label = rel.attrib[\"RELC_RELEVANCE2RELQ\"]\n relq_comment.append((c_text, relq_c_label))\n\n if DUPLICATE is False:\n qdict[relq_id] = (relq_subject, relq_body)\n cdict[relq_id] = relq_comment\n \n if (orgq_id in qdict) != (orgq_id in cdict):\n print(\"WARNING qdict inconsistent with cdict\")\n elif orgq_id not in qdict:\n qdict[orgq_id] = (orgq_subject, orgq_body)\n cdict[orgq_id] = relq_comment\n else:\n cdict[orgq_id] = cdict[orgq_id] + orgq_comment\n \n return qdict, cdict", "def __init__(self, examdb, number_of_questions, intended_learning_outcome_used, course_code,\n course_version, exam_date, allow_same_tags=False, existing_questions=None):\n try:\n assert (isinstance(number_of_questions, int))\n self.numQuest = number_of_questions\n self.ILOUsed = list(intended_learning_outcome_used)\n\n assert (isinstance(course_code, str))\n self.course_code = course_code\n\n assert (isinstance(course_version, float))\n self.course_version = course_version\n\n assert (isinstance(exam_date, date))\n self.exam_date = exam_date\n\n assert (isinstance(allow_same_tags, bool))\n self.allow_same_tags = allow_same_tags\n\n except AssertionError as err:\n print(\"Generate Questions By Goal init: \" + str(err))\n return\n\n self.ExamDB = examdb\n self._exam_id = {\n 'exam_id': '',\n 'question_ids': [],\n 'declaration_id': [],\n 'bibliography_id': []\n }\n\n self._objects = {'Declarations': [],\n 'Questions': [],\n }\n self._days = 365 # Number of days that a question is \"quarantined\".\n\n if existing_questions:\n for _qid in existing_questions:\n self._exam_id['question_ids'].append(_qid)\n self._add_question_to_exam(_qid)\n self.numQuest -= len(existing_questions)\n\n if self.numQuest > 0: # If there are more questions to add, run generator algorithm\n self._gen_questions_by_goals()", "def _generate_examples(self, files):\n idx = 0\n for filename in files:\n with open(filename) as file:\n for line in file:\n yield idx, {\"text\": line}\n idx += 1", "def parse_facs_files():\n\n #Load parser settings\n parser_settings = getattr(settings,'FACS_PARSER_SETTINGS')\n\n files_to_parse = [parser_settings['facs_source_directory']+f for f in os.listdir(parser_settings['facs_source_directory']) if '.exp' in f]\n\n for filename in files_to_parse: \n\n #Compute MD5 hash\n facs_file = file(filename,'rbU')\n md5hash = hashlib.md5(facs_file.read()).hexdigest()\n facs_file.close()\n \n #Skip file if previously parsed.\n if FacsFile.objects.filter(original_filename=filename,md5hash=md5hash):\n print 'Skipping ', filename\n continue\n\n #Open file, remove null bytes and prepare csv reader\n facs_file = file(filename, 'rU')\n csv_reader = csv.reader((x.replace('\\0', '') for x in facs_file),dialect=csv.excel_tab)\n\n #Reader header\n csv_header = csv_reader.next()\n facs_file_results = []\n\n #Parse the file\n for csv_row in csv_reader:\n if csv_row[0]:\n facs_file_results.append(dict(zip(csv_header,csv_row)))\n\n #Close the file\n facs_file.close()\n\n #Save the information to database and archive file\n random_ints = ''.join([str(random.randint(0,9)) for n in range(10)])\n archive_filename = parser_settings['facs_archive_directory'] + filename.split('/')[-1][:-4].split('_')[0] + '_' + random_ints + '.exp'\n shutil.move(filename, archive_filename)\n\n facs_file = FacsFile(\n original_filename = filename,\n md5hash = md5hash,\n archive_filename = archive_filename,\n )\n facs_file.save()\n\n #Remove empty elements\n for result in facs_file_results:\n for key, data in result.items():\n if data == '.' or not(data):\n del result[key]\n\n #Cache test code and interface mappings\n test_codes = []\n for testcode_mapping in TestCodeMapping.objects.filter(interface_name=parser_settings['testcode_interface_name']):\n test_code = testcode_mapping.code\n code = test_code.code\n code_mapping = testcode_mapping.code_mapping\n\n test_codes.append((code, code_mapping, test_code))\n\n #Add results to database\n for result in facs_file_results:\n\n #Parse result date\n result_date = dateutil.parser.parse(result[parser_settings['result_datetime']])\n result_error_code = getattr(result, parser_settings['error_codes'], '')\n result_identifier = result[parser_settings['sample_identifier']]\n result_cytometer = result[parser_settings['cytometer_serial']]\n\n #Create the dictionnary of result items.\n new_result_item_dict = {}\n for test_code, facs_file_column, test_code_object in test_codes:\n new_result_item_dict[test_code] = ResultItem(\n test_code = test_code_object,\n result_item_value = result[facs_file_column],\n error_code = result_error_code,\n result_item_datetime = result_date,\n )\n\n #Search for possible duplicate result\n is_duplicate = False\n for possible_duplicate in FacsResult.objects.filter(result_identifier=result_identifier):\n if possible_duplicate.get_resultitem_dict() == new_result_item_dict:\n is_duplicate = True\n break\n\n #Save result and result item to data if it is not a duplicate\n if not is_duplicate:\n \n new_result = FacsResult(\n result_identifier=result_identifier,\n result_datetime=result_date,\n origin_facs_file=facs_file,\n cytometer_serial_number=result_cytometer,\n )\n \n new_result.save()\n \n #Add the reference to the result for each item and save it to database.\n for item in new_result_item_dict.values():\n item.result = new_result\n item.save()\n\n new_result.link_to_requisition()", "def generate_files(self):\n\t\tapply_stemmer, xml_file, query_file, expected_file = self.read_config_file()\n\t\tself.generate_query_file(query_file, xml_file, apply_stemmer)\n\t\tself.generate_expected_file(expected_file, xml_file)\n\t\tlogging.info('FINALIZADO: MÓDULO PROCESSADOR DE CONSULTAS')", "def query(self, n_jobs=1) -> str:\n\n def get_one_answer(file):\n return json.dumps(ask_endpoint(file, os.path.join(self.url, \"take_exam\")))\n\n # send each file to the endpoint\n query_start_time = time.time()\n answers = Parallel(n_jobs=n_jobs)(delayed(get_one_answer)(file) for file in tqdm(self.filelist))\n query_end_time = time.time()\n query_time = query_end_time - query_start_time\n\n # put all answers to the dataframe\n answers = pd.DataFrame(answers, columns=[\"prediction\"])\n answers[\"prediction\"] = answers[\"prediction\"].apply(lambda x: json.loads(x))\n answers[\"path\"] = self.filelist\n\n # create report folder\n os.makedirs(self._report_path, exist_ok=False)\n # save raw answers\n answers.to_csv(os.path.join(self._report_path, \"raw_answers.csv\"), index=False)\n # parse answers\n parsed_answers = pd.DataFrame(columns=[\"path\",\n \"id\",\n \"prediction\"])\n for _, row in answers.iterrows():\n for k, v in row[\"prediction\"][\"answers\"].items():\n parsed_answers.loc[len(parsed_answers)] = [row[\"path\"], int(k), v]\n # save parsed answers\n parsed_answers = parsed_answers.sort_values(by=[\"path\", \"id\"]).reset_index(drop=True)\n parsed_answers.to_csv(os.path.join(self._report_path, \"parsed_answers.csv\"), index=False)\n # save statistics\n stats = {\n \"readiness_time\": self._readiness_time,\n \"query_total_files\": len(self.filelist),\n \"query_total_time\": query_time,\n \"query_n_jobs\": n_jobs,\n \"query_mean_latency\": query_time / len(self.filelist) * n_jobs,\n \"query_rps\": len(self.filelist) / query_time\n }\n with open(os.path.join(self._report_path, \"stats.json\"), \"w\") as f:\n json.dump(stats, f)\n\n return self._report_path", "def generate_qna_report(self, past_qna):\n response = []\n\n # reverse the order so correct report order can be generated\n past_qna.reverse()\n for qna in past_qna:\n\n found_page = False\n for page in self.module:\n\n if page[\"QID\"] == qna[\"QID\"]:\n found_page = True\n\n found_answer = False\n answer_list = []\n for answer in page[\"answers\"]:\n if answer[\"AID\"] in qna[\"AID\"]:\n found_answer = True\n answer_list.append({\n \"AID\": answer[\"AID\"],\n \"prettyAID\": answer.get(\"prettyAID\"),\n \"answer\": answer[\"answer\"],\n \"description\": answer[\"description\"],\n \"resources\": answer[\"resources\"]\n })\n\n response.append({\n \"QID\": page[\"QID\"],\n \"question\": page[\"question\"],\n \"description\": page[\"description\"],\n \"resources\": page[\"resources\"],\n \"answers\": answer_list\n })\n\n if not found_answer:\n raise ValueError(\"AID: \" + qna[\"AID\"] + \"not found!\")\n\n if not found_page:\n raise ValueError(\"QID: \" + qna[\"QID\"] + \" not found!\")\n\n return response", "def _gen_questions_by_goals(self):\n\n try:\n num_questions_per_goal = int(floor(self.numQuest / len(self.ILOUsed)))\n\n # Ensure that the number of questions requested are less than unique ILO's to be used.\n assert ((self.numQuest / len(self.ILOUsed)) >= 1)\n\n except ZeroDivisionError:\n print(\"No ILO's selected, or number of question in exam is set to 0\")\n return\n\n except AssertionError:\n print(\"There aren't enough questions for the number of ILO's chosen, increase the number of questions \" \\\n + \"or reduce the number of ILO's covered in this exam\")\n return\n\n rest = self.numQuest % len(self.ILOUsed)\n\n for ilo in self.ILOUsed:\n # Retrieve all questions that belongs to ilo\n self._get_questions_for_ilo(ilo[0], num_questions_per_goal)\n\n while rest > 0:\n ilo = random.choice(self.ILOUsed)\n self._get_questions_for_ilo(ilo[0], 1)\n rest -= 1\n\n return", "def test_multiple_output_files(self):\r\n convert_fastaqual(self.fasta_file_path,\r\n multiple_output_files=True,\r\n output_directory=self.output_dir,\r\n per_file_buffer_size=23)\r\n\r\n sample_id_s = [('PC.634', expected_fasta_634_default,\r\n expected_qual_634_default),\r\n ('PC.354', expected_fasta_354_default,\r\n expected_qual_354_default),\r\n ('PC.481', expected_fasta_481_default,\r\n expected_qual_481_default)]\r\n for sample_id, expected_fasta, expected_qual in sample_id_s:\r\n actual_output_fasta_path = get_filename_with_new_ext(\r\n self.fasta_file_path,\r\n '_' + sample_id + '.fna',\r\n self.output_dir)\r\n\r\n actual_output_qual_path = get_filename_with_new_ext(\r\n self.fasta_file_path,\r\n '_' + sample_id + '.qual',\r\n self.output_dir)\r\n\r\n actual_output_fasta = open(actual_output_fasta_path)\r\n actual_output_qual = open(actual_output_qual_path)\r\n actual_fasta = actual_output_fasta.read()\r\n actual_output_fasta.close()\r\n actual_qual = actual_output_qual.read()\r\n actual_output_qual.close()\r\n self._files_to_remove.append(actual_output_fasta_path)\r\n self._files_to_remove.append(actual_output_qual_path)\r\n\r\n self.assertEquals(actual_fasta, expected_fasta)\r\n self.assertEquals(actual_qual, expected_qual)", "def run_tests():\n with open(FILENAME) as file:\n # Loads testing parameters from the yaml file.\n tests = yaml.safe_load(file)\n\n # create a dataframe to keep the results\n test_dict = tests['Tests']\n results = pd.DataFrame(test_dict)\n results['Last Average Score'] = \"\"\n results['No of Q-Learning episodes'] = \"\"\n\n # run experiments:\n for i, test in enumerate(test_dict):\n grid = Rooms(test[\"env_size\"], testing=True)\n learning = QLearning(grid, test[\"gamma\"], test[\"alpha\"], test[\"agent_start_pos\"])\n e_greedy = Policy(\"e-greedy\", test[\"epsilon\"], test[\"decay\"])\n greedy = Policy(policy_type=\"greedy\")\n experiment = Experiments(grid, learning, greedy, test[\"iters\"],\n test[\"agent_start_pos\"], test[\"test_no\"])\n\n for session in range(test[\"iters\"]):\n learning.run_multiple_episodes(test[\"batch_episodes\"], e_greedy)\n mean_reward = experiment.run_experiments(test[\"exp_per_batch\"])\n\n results.loc[i,'Last Average Score'] = mean_reward\n results.loc[i,'No of Q-Learning episodes'] = (session + 1) * test[\"batch_episodes\"]\n\n # save results to csv file\n filename = 'results/' + 'test_table.csv'\n results.to_csv(filename)\n\n # plot & save graphs\n experiment.generate_results(test[\"test_no\"], test)\n\n return results", "def create_raw_data():\r\n for csv_file in glob.glob(raw_loc + 'ticket_data/PRR_*'):\r\n filestring =os.path.basename(csv_file)\r\n index_start = 1\r\n j = 0\r\n start = dt.datetime.now()\r\n print('{} file started at {}'.format(filestring, start.strftime(\"%H:%M\")))\r\n df = pd.read_csv(csv_file, encoding = 'utf-8', parse_dates = ['Tick Issue Date'])\r\n df = df.rename(columns = {c: c.replace(' ', '') for c in df.columns})\r\n try:\r\n df.to_sql('raw_ticket_data', con = conn, if_exists='append')\r\n except:\r\n print('File read error')\r\n\r\n\r\n print ('{} file finished in {:03.2f} minutes '.format(filestring, (dt.datetime.now()-start).seconds / 60))", "def setup():\n if not os.path.isfile(etymology_file):\n page = re.compile(r'index.php\\?l=\\w+&p=\\d+&allowed_in_frame=0.html')\n pages = list(find_files(directory=site, pattern=page, recursive=False))\n etymology = etymologies(pages)\n dump(etymology, etymology_file)\n for affix, dictionary in affixes(etymology):\n affix_file = os.path.join('resources', '{}.json'.format(affix))\n if not os.path.isfile(affix_file):\n dump(dictionary, affix_file)", "def main(json_input, output_folder):\n with open(json_input, 'r') as f:\n data = json.loads(f.read())['data']\n\n with open(path.join(output_folder, 'qgeneration.context.txt'), 'w') as context_f, \\\n open(path.join(output_folder, 'qgeneration.context.nojson.txt'), 'w') as context_f_nojson, \\\n open(path.join(output_folder, 'qgeneration.gold.txt'), 'w') as question_f:\n for item in data:\n context_f.write(json.dumps(item['context']) + '\\n')\n context_f_nojson.write(item['context'][:1000] + '\\n')\n question_f.write(json.dumps(item['answer']) + '\\n')\n\n print('DONE')", "def _generate_examples(self, filepath, split):\r\n if self.config.name == \"trex\":\r\n paths = filepath\r\n relations_path = paths[0]\r\n paths = paths[1:]\r\n all_rels = {}\r\n with open(relations_path, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n all_rels[data[\"relation\"]] = data\r\n id_ = -1\r\n for filepath in paths:\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n pred = all_rels.get(data[\"predicate_id\"], {})\r\n for evidences in data[\"evidences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"obj_uri\": str(data[\"obj_uri\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"sub_uri\": str(data[\"sub_uri\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"predicate_id\": str(data[\"predicate_id\"]),\r\n \"sub_surface\": str(evidences[\"sub_surface\"]),\r\n \"obj_surface\": str(evidences[\"obj_surface\"]),\r\n \"masked_sentence\": str(evidences[\"masked_sentence\"]),\r\n \"template\": str(pred.get(\"template\", \"\")),\r\n \"template_negated\": str(pred.get(\"template_negated\", \"\")),\r\n \"label\": str(pred.get(\"label\", \"\")),\r\n \"description\": str(pred.get(\"description\", \"\")),\r\n \"type\": str(pred.get(\"type\", \"\")),\r\n }\r\n elif self.config.name == \"conceptnet\":\r\n id_ = -1\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n if data.get(\"negated\") is not None:\r\n for masked_sentence, negated in zip(data[\"masked_sentences\"], data[\"negated\"]):\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"sub\": str(data.get(\"sub\", \"\")),\r\n \"obj\": str(data.get(\"obj\", \"\")),\r\n \"pred\": str(data[\"pred\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"negated\": str(negated),\r\n }\r\n else:\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"sub\": str(data.get(\"sub\", \"\")),\r\n \"obj\": str(data.get(\"obj\", \"\")),\r\n \"pred\": str(data[\"pred\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"negated\": str(\"\"),\r\n }\r\n elif self.config.name == \"squad\":\r\n id_ = -1\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"id\": str(data[\"id\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"negated\": str(data.get(\"negated\", \"\")),\r\n \"masked_sentence\": str(masked_sentence),\r\n }\r\n elif self.config.name == \"google_re\":\r\n id_ = -1\r\n paths = filepath\r\n for filepath in paths:\r\n # from https://github.com/facebookresearch/LAMA/blob/master/scripts/run_experiments.py\r\n if \"place_of_birth\" in filepath:\r\n pred = {\r\n \"relation\": \"place_of_birth\",\r\n \"template\": \"[X] was born in [Y] .\",\r\n \"template_negated\": \"[X] was not born in [Y] .\",\r\n }\r\n elif \"date_of_birth\" in filepath:\r\n pred = {\r\n \"relation\": \"date_of_birth\",\r\n \"template\": \"[X] (born [Y]).\",\r\n \"template_negated\": \"[X] (not born [Y]).\",\r\n }\r\n else:\r\n pred = {\r\n \"relation\": \"place_of_death\",\r\n \"template\": \"[X] died in [Y] .\",\r\n \"template_negated\": \"[X] did not die in [Y] .\",\r\n }\r\n with open(filepath, encoding=\"utf-8\") as f:\r\n for row in f:\r\n data = json.loads(row)\r\n for masked_sentence in data[\"masked_sentences\"]:\r\n id_ += 1\r\n yield id_, {\r\n \"pred\": str(data[\"pred\"]),\r\n \"sub\": str(data[\"sub\"]),\r\n \"obj\": str(data[\"obj\"]),\r\n \"evidences\": str(data[\"evidences\"]),\r\n \"judgments\": str(data[\"judgments\"]),\r\n \"sub_w\": str(data[\"sub_w\"]),\r\n \"sub_label\": str(data[\"sub_label\"]),\r\n \"sub_aliases\": str(data[\"sub_aliases\"]),\r\n \"obj_w\": str(data[\"obj_w\"]),\r\n \"obj_label\": str(data[\"obj_label\"]),\r\n \"obj_aliases\": str(data[\"obj_aliases\"]),\r\n \"uuid\": str(data[\"uuid\"]),\r\n \"masked_sentence\": str(masked_sentence),\r\n \"template\": str(pred[\"template\"]),\r\n \"template_negated\": str(pred[\"template_negated\"]),\r\n }", "def all(config_file):\n with open(config_file) as f:\n config = json.load(f)\n scenes = get_realsense_scenes(config['realsense_dir'])\n all_dfs = []\n for scene in scenes:\n scene_data = get_data_from_scene(scene)\n logger.info(\"Evaluating - %s\", scene['scene_name'])\n df = run_test_on_scene(scene_data, config)\n all_dfs.append(df)\n\n df = pd.concat(all_dfs, axis=0)\n df = df.reset_index()\n print(df)\n df.to_csv(config['save_csv'])", "def import_squad_data():\n\n squad_url = (\n \"https://rajpurkar.github.io/SQuAD-explorer/dataset/dev-v2.0.json\"\n )\n squad_file = squad_url.split(\"/\")[-1] # last part of URL\n\n urllib.request.urlretrieve(squad_url, squad_file)\n\n if not os.path.isfile(squad_file):\n sys.exit(\"Dataset %s does not exist!\" % squad_file)\n\n with open(squad_file) as squad_file_handle:\n squad_data = json.load(squad_file_handle)[\"data\"]\n\n title_list = []\n ident_list = []\n context_list = []\n question_list = []\n impossible_list = []\n answer_start_list = []\n answer_text_list = []\n\n # 'data' contains title and paragraph list\n for it_art in squad_data:\n title = it_art[\"title\"]\n\n # 'paragraphs' contains context (the copy) and Q&A sets\n for it_par in it_art[\"paragraphs\"]:\n context = it_par[\"context\"]\n\n # 'qas' contains questions and reference answers\n for it_que in it_par[\"qas\"]:\n question = it_que[\"question\"]\n impossible = it_que[\"is_impossible\"]\n ident = it_que[\"id\"]\n\n # 'answers' contains the answer text and location in 'context'\n for it_ans in it_que[\"answers\"]:\n answer_start = it_ans[\"answer_start\"]\n text = it_ans[\"text\"]\n\n # set an empty answer for an impossible question\n if impossible:\n text = \"\"\n\n # add details of this answer to the list\n title_list.append(title)\n ident_list.append(ident)\n context_list.append(context)\n question_list.append(question)\n impossible_list.append(impossible)\n answer_start_list.append(answer_start)\n answer_text_list.append(text)\n\n squad_data_final = pandas.DataFrame(\n {\n \"id\": ident_list,\n \"subject\": title_list,\n \"context\": context_list,\n \"question\": question_list,\n \"clean_question\": [clean(question) for question in question_list],\n \"impossible\": impossible_list,\n \"answer_start\": answer_start_list,\n \"answer\": answer_text_list,\n }\n )\n\n return squad_data_final.drop_duplicates(keep=\"first\")", "def train_q(n=1000):\n for i in range(50):\n p1_strategy = strategies.QStrategy('X')\n p2_strategy = strategies.QStrategy('O')\n p1 = player.Player('X', p1_strategy)\n p2 = player.Player('O', p2_strategy)\n board = tictactoe.Board()\n game = rl_game.Game(p1, p2, board)\n game.play_many(n)\n p1.strategy.save_q()\n p2.strategy.save_q()", "def run_all(self):\n\n self.run_mash() ###Run MASH analysis\n self.filter_query() ###Filter fasta sequences out based on p value\n self.build_index(self.filtered_out_path) ###Build index for off-target analysis\n os.remove(self.filtered_out_path) ###Clean up intermediate fasta file\n self.format_gRNA(self.path1) ###Format everything in the right order\n self.run_OTF() ###Run off-target analysis\n self.output_parse() ###Parse output values and update table", "def full_load_db_from_file(batch_size=10000):\n\n q_set = QuestionSet(load=True)\n with open('.config/config.json', 'r') as f:\n config = json.load(f)\n config = config['pg']\n\n conn = psycopg2.connect(\n host=config['host'],\n database=config['db'],\n user=config['user'],\n password=config['password'],\n )\n\n i, values = 0, []\n for q in q_set.questions_ordered:\n values.append((\n q.id,\n q.question,\n q.options,\n q.answer,\n q.category_id,\n ))\n i += 1\n\n cur = conn.cursor()\n cur.execute('TRUNCATE TABLE questions')\n query = \"\"\"\n INSERT INTO questions (id, question, options, answer, category_id)\n VALUES {}\n \"\"\"\n\n j = 0\n log.info(\"Writing {} questions to DB...\".format(i))\n for chunk in chunks(values, batch_size):\n log.info('Batch {}...'.format(j + 1))\n j += 1\n\n args = ','.join(cur.mogrify(\"(%s, %s, %s, %s, %s)\", v).decode(\"utf-8\") for v in chunk)\n cur.execute(query.format(args))\n conn.commit()\n\n log.info(\"Data transfer complete.\")\n cur.close()", "def bowtie(self,options=\"--local -p 3\",indexes_folder=\"/mnt/sculpin/data5/Shelly/bin/bowtie2/INDEXES/tair10.cDNA\",genome_basename=\"tair10.cDNA\"):\n if not os.path.isdir(indexes_folder):\n print(\"Could not find your INDEXES Folder: %s\" % indexes_folder)\n\n while True:\n\n indexes_folder = raw_input(\"What is the path (abs or relative) to the Bowtie2 INDEXES: \")\n indexes_folder = os.path.abspath(os.path.expanduser(indexes_folder))\n\n if os.path.isdir(indexes_folder) and\\\n len([x for x in os.listdir(indexes_folder) if genome_basename in x]) > 0:\n\n print(\"Looks like that will work!\")\n break\n\n elif os.path.isdir(indexes_folder):\n print(\"I couldn't find a genome with a basename %s in %s\" %(genome_basename,indexes_folder))\n print(\"Try another folder\")\n\n else:\n print(\"Looks like that folder doesn't exist!\")\n\n\n # Bowtie to Yeast and Tair10\n for genome in [genome_basename]:\n # More specific for options for each genome\n if genome == \"yeast\":\n options += \" \"\n\n # Bowtie R1\n indexes = os.path.join(indexes_folder,genome)\n\n print(\"Bowtie-ing R1 reads to %s\" % genome)\n commandR1 = \" \".join([\"bowtie2\",options,indexes,\",\".join(self.R1),\"1> bowtie.R1.%s.sam 2> bowtie.R1.%s.stats\" % (genome,genome)])\n call(commandR1,shell=True)\n\n # Bowtie R2\n print(\"Bowtie-ing R2 reads %s\" % genome)\n commandR2 = \" \".join([\"bowtie2\",options,indexes,\",\".join(self.R2),\"1> bowtie.R2.%s.sam 2> bowtie.R2.%s.stats\" % (genome,genome)])\n call(commandR2,shell=True)\n\n # # Loading Bowtied Yeast ReadIDs into memory\n # yeast_bowtie_output = [x for x in os.listdir(os.getcwd()) if \"yeast\" in x and \"sam\" in x]\n # readIDs_to_remove = set()\n\n # for f in yeast_bowtie_output:\n # print(\"\\tLoading %f into Memory\" % f)\n # with open(f,\"r\") as input_file:\n # for line in input_file:\n # row = line.strip().split()\n\n # readID = row[0]\n # alignment = row[2]\n\n # if alignment != \"*\":\n # readIDs_to_remove.add(readID)\n\n # # Using these ReadID's parse the Tair10 sam files and remove readIDs\n # # that also bowtied to Yeast\n # print(\"Removing Yeast ReadIDs from Tair10 sam files\")\n\n # tair_bowtie_output = [x for x in os.listdir(os.getcwd()) if \".sam\" in x and \"tair\"]\n\n # for tair in tair_bowtie_output:\n # tair = os.path.join(\"../known_positives/alignments/\",tair)\n\n # if \"R1\" in tair:\n # output_file = open(\"bowtie.R1.no.yeast.sam\",\"w\")\n # elif \"R2\" in tair:\n # output_file = open(\"remove.R2.no.yeast.sam\",\"w\")\n\n # with open(tair,\"r\") as t_file:\n # for line in t_file:\n # row = line.strip().split()\n\n # readID = row[0]\n # alignment = row[2]\n\n # if readID not in yeast_readIDs and alignment != \"*\":\n # output_file.write(line)\n\n # output_file.close()", "def read_files(self):\n for f in self.filenames:\n self.games.extend(pgn.loads(open(f).read()))", "def run_test(self):\n\n # populate *_ps sets\n self.enter_project_file()\n\n # populate *_dir sets\n self.enter_directories()\n\n # The files in the directories makes up the largest possible set of files\n self.result_files = self.result_files_dir\n self.design_files = self.design_files_dir\n self.design_space_files = self.design_space_files_dir\n self.test_bench_files = self.test_bench_files_dir\n\n # populate *_ms sets\n self.enter_meta_results_file()\n\n # populate *_OK sets\n self.check_analysis_status()\n\n df = {'design_files_dir' : list(self.design_files_dir),'design_files_pr' : list(self.design_files_pr),\n 'design_files_ms' : list(self.design_files_ms), 'design_files_OK' : list(self.design_files_OK)}\n\n ds = {'design_space_files_dir' : list(self.design_space_files_dir),\n 'design_space_files_pr' : list(self.design_space_files_pr)}\n\n rs = {'result_files_dir' : list(self.result_files_dir), 'result_files_ms' : list(self.result_files_ms),\n 'result_files_OK' : list(self.result_files_OK)}\n\n tb = {'test_bench_files_dir' : list(self.test_bench_files_dir),\n 'test_bench_files_ms' : list(self.test_bench_files_ms)}\n\n srl = SummaryReportsLinks(self.result_files_dir)\n\n lf = {'files_linked_from_sum_reps' : srl.get_files(),\n 'folders_linked_from_sum_reps' : srl.get_folders()}\n\n # 'test_bench_files_pr' : list(self.test_bench_files_pr),\n \n json_test = {'design_files' : df, 'design_space_files' : ds, 'result_files' : rs,\n 'test_bench_files' : tb, 'stat_files' : self.stat_files,\n 'files_linked_from_sum_reps' : lf}\n\n with open('test_run.json','wb') as f_out:\n json.dump(json_test, f_out, indent=4)", "def read_files(dataroot, is_test=False, preprocess=False, aug_dataset_path=None):\n\tall_filenames = sorted(glob.glob(dataroot), key=alphanum_key)\n\tsamples_raw = []\n\tpreprocessed = 0\n\tskipped = 0\n\n\tfor fname in all_filenames:\n\t\twith open(fname, 'r') as fp:\n\t\t\ttry:\n\t\t\t\tproblem_data = json.load(fp)\n\t\t\t\t#print(fname)\n\t\t\texcept Exception as e:\n\t\t\t\tprint(f\"Error loading JSON from {fname}\", e)\n\t\t\t\traise e\n\n\t\tproblem = MATHProblem(\n\t\t\tfname,\n\t\t\tproblem_data['problem'],\n\t\t\tproblem_data['type'],\n\t\t\tproblem_data['level'],\n\t\t\texplanation=problem_data['explanation'] if not is_test else None,\n\t\t\tsolution=problem_data['solution'] if not is_test else None,\n\t\t\tequation=problem_data['equation'] if not is_test else None,\n\t\t\tjson_data=problem_data,\n\t\t\tpreprocess=preprocess,\n\t\t)\n\n\t\tif problem.no_expression:\n\t\t\tskipped += 1\n\t\tsamples_raw.append(problem)\n\n\tif not is_test and aug_dataset_path:\n\t\tprint('Adding another dataset to the samples...')\n\t\twith open(aug_dataset_path) as fp:\n\t\t\tdata = json.load(fp)\n\t\t\tfor data_item in data:\n\t\t\t\tproblem = MATHProblem(\n\t\t\t\t\t'augmented',\n\t\t\t\t\tdata_item['sQuestion'],\n\t\t\t\t\t'MWP',\n\t\t\t\t\t'NA',\n\t\t\t\t\tsolution=str(data_item['lSolutions'][0]),\n\t\t\t\t\tequation=data_item['new_equation'],\n\t\t\t\t)\n\n\t\t\t\tif problem.no_expression:\n\t\t\t\t\tskipped += 1\n\t\t\t\tsamples_raw.append(problem)\t\n\n\tprint('There are no expressions for {} in {} items'.format(skipped, len(samples_raw)))\n\treturn samples_raw", "def main(raw_filepath, interim_filepath, processed_filepath):\n raw_filepath = Path(raw_filepath)\n interim_filepath = Path(interim_filepath)\n processed_filepath = Path(processed_filepath)\n\n logger = logging.getLogger(__name__)\n logger.info('making final data set from raw data')\n\n years = ['2010', '2011', '2012', '2013', '2014']\n\n #############################################################\n ################ Life Expectancy Outcome ####################\n #############################################################\n\n le_birth = pd.read_csv(raw_filepath / 'US_A.csv',\n usecols=['Tract ID', 'e(0)'],\n dtype={'Tract ID': \"object\"}) \\\n .rename(columns={'Tract ID': 't10_cen_uid_u_2010'}) \\\n .set_index('t10_cen_uid_u_2010')\n\n le_other = pd.read_csv(raw_filepath / 'US_B.csv',\n usecols=['Tract ID', 'Age Group', 'e(x)'],\n dtype={'Tract ID': \"object\"}) \\\n .rename(columns={'Tract ID': 't10_cen_uid_u_2010'}) \\\n .set_index(['t10_cen_uid_u_2010', 'Age Group']) \\\n .sort_index() \\\n .loc[(slice(None), ['15-24', '35-44', '55-64']), :] \\\n .unstack() \\\n .reindex(le_birth.index) # use the same tracts for all experiments\n\n le_other.columns = ['e(20)', 'e(40)', 'e(60)']\n\n # le_birth.to_csv(processed_filepath / 'y_00.csv', header=True)\n # le_other['e(20)'].to_csv(processed_filepath / 'y_20.csv', header=True)\n # le_other['e(40)'].to_csv(processed_filepath / 'y_40.csv', header=True)\n # le_other['e(60)'].to_csv(processed_filepath / 'y_60.csv', header=True)\n\n\n ##############################################################\n ################## Priority Dataset ##########################\n ##############################################################\n\n with open(raw_filepath / 'T10_Priority_Wide_Interpolated.csv', 'r') as f:\n cols = f.readline().strip().split(',')\n\n proj_cols = [x for x in cols if x[-4:] in years]# and\n # get all the priority NETS columns for later\n net_cols = ['t10_cen_uid_u_2010'] + [x[:11] + '_d_' + x[14:] for x in cols if '_net_' in x]\n\n data_X = pd.read_csv(raw_filepath / 'T10_Priority_Wide_Interpolated.csv', usecols=proj_cols,\n dtype={'t10_cen_uid_u_2010': \"object\"}) \\\n .set_index('t10_cen_uid_u_2010')\n\n # Create % younger than 25 (this method is far less than ideal)\n ag25up = data_X.filter(regex='.*(_pop_c_|ag25up).*')\n ag25up_coltuples = [(x[:-4], x[-4:]) for x in ag25up.columns]\n ag25up.columns = pd.MultiIndex.from_tuples(ag25up_coltuples)\n ag25up_long = ag25up.stack()\n ag25dwn_p = ((ag25up_long['t10_ldb_pop_c_'] - ag25up_long['t10_ldb_ag25up_c_'])\n / ag25up_long['t10_ldb_pop_c_']).unstack()\n ag25dwn_p.columns = ['t10_ldb_ag25dwn_p_' + x for x in ag25dwn_p.columns]\n\n # Create % older than 65\n ag65up = data_X.filter(regex='.*(_pop_c_|a60up).*')\n ag65up_coltuples = [(x[:-4], x[-4:]) for x in ag65up.columns]\n ag65up.columns = pd.MultiIndex.from_tuples(ag65up_coltuples)\n ag65up_long = ag65up.stack()\n ag65up_p = (ag65up_long['t10_ldb_a60up_c_'] / ag65up_long['t10_ldb_pop_c_']) \\\n .unstack()\n ag65up_p.columns = ['t10_ldb_ag60up_p_' + x for x in ag65up_p.columns]\n\n # Add our new measure\n data_X = pd.concat([data_X, ag25dwn_p, ag65up_p], axis=1)\n\n # Get rid of all count variables, including nets\n no_count_cols = [x for x in data_X.columns if '_c_' not in x]\n data_X = data_X[no_count_cols]\n\n\n drop_cols = ['t10_gis_area_l_2010',\n 'm10_cen_uid_u_2010',\n 'm10_cen_memi_x_2010',\n 'c10_cen_uid_u_2010',\n 'z10_cen_uid_u_2010']\n\n data_X = data_X.drop(columns=drop_cols) \\\n .reindex(le_birth.index)\n\n data_X.columns = pd.Index([(x[:-5], int(x[-4:])) for x in data_X.columns])\n\n X_priority = data_X.groupby(axis=1, level=0).mean()\n X_priority.to_csv(interim_filepath / 'X_priority.csv')\n\n ###########################################################\n #################### NETS Dataset #########################\n ###########################################################\n\n X_nets_allyrs = pd.read_csv(raw_filepath / 'recvd_t10_vars_v8_20190607.csv', usecols=net_cols,\n dtype={'t10_cen_uid_u_2010': \"object\"}) \\\n .set_index('t10_cen_uid_u_2010') \\\n .reindex(le_birth.index)\n\n X_nets_allyrs.columns = pd.Index([(x[:-5], int(x[-4:])) for x in X_nets_allyrs.columns])\n X_nets = X_nets_allyrs.groupby(axis=1, level=0).mean()\n X_nets.to_csv(interim_filepath / 'X_nets.csv')\n\n # Split predictive data by Variable Set\n X_all = pd.concat([X_priority, X_nets], axis=1) \\\n .dropna(how='any')\n\n final_index = le_birth.index.intersection(X_all.index)\n X_all = X_all.reindex(final_index)\n le_birth = le_birth.reindex(final_index)\n le_other = le_other.reindex(final_index)\n\n le_birth.to_csv(processed_filepath / 'y_00.csv', header=True)\n le_other['e(20)'].to_csv(processed_filepath / 'y_20.csv', header=True)\n le_other['e(40)'].to_csv(processed_filepath / 'y_40.csv', header=True)\n le_other['e(60)'].to_csv(processed_filepath / 'y_60.csv', header=True)\n\n # Var Set 1\n p1_features = ['t10_ldb_hinci_m',\n 't10_ldb_pop_d',\n 't10_ldb_nhblk_p',\n 't10_ldb_hisp_p',\n 't10_ldb_col_p']\n X_p1 = X_all[p1_features]\n X_p1.to_csv(processed_filepath / 'X_varGroup1.csv')\n\n # Var Set 2\n p2_features = [\n \"t10_ldb_hinci_m\",\n \"t10_ldb_pop_d\",\n \"t10_ldb_ag25dwn_p\",\n \"t10_ldb_ag60up_p\",\n \"t10_ldb_nhblk_p\",\n \"t10_ldb_hisp_p\",\n \"t10_ldb_col_p\",\n \"t10_ldb_lep_p\",\n \"t10_ldb_mrenti_m\",\n \"t10_ldb_multi_p\",\n \"t10_ldb_nhwht_p\",\n \"t10_ldb_asian_p\",\n \"t10_ldb_fb_p\",\n \"t10_ldb_hs_p\",\n \"t10_ldb_unemp_p\",\n \"t10_ldb_npov_p\",\n \"t10_ldb_vac_p\",\n \"t10_ldb_own_p\",\n \"t10_ldb_mhmvali_m\"\n ]\n X_p2 = X_all[p2_features]\n X_p2.to_csv(processed_filepath / 'X_varGroup2.csv')\n\n # Var Set 3\n X_p3 = X_nets.reindex(final_index)\n X_p3.to_csv(processed_filepath / 'X_varGroup3.csv')\n\n # Var Set 4\n X_p4 = X_all\n X_p4.to_csv(processed_filepath / 'X_varGroup4.csv')", "def main():\n\n # Create a directory to store copies of all the relevant HTML files (those\n # will be used in testing).\n print('Setting up backup dir if needed ...')\n create_dir(BACKUP_DIR)\n\n # Make backup of the IMDB top 250 movies page\n print('Access top 250 page, making backup ...')\n top_250_html = simple_get(TOP_250_URL)\n top_250_dom = BeautifulSoup(top_250_html, \"lxml\")\n\n make_backup(os.path.join(BACKUP_DIR, 'index.html'), top_250_html)\n\n # extract the top 250 movies\n print('Scraping top 250 page ...')\n url_strings = scrape_top_250(top_250_dom)\n\n # grab all relevant information from the 250 movie web pages\n rows = []\n for i, url in enumerate(url_strings): # Enumerate, a great Python trick!\n print('Scraping movie %d ...' % i)\n\n # Grab web page\n movie_html = simple_get(url)\n\n # Extract relevant information for each movie\n movie_dom = BeautifulSoup(movie_html, \"lxml\")\n rows.append(scrape_movie_page(movie_dom))\n\n # Save one of the IMDB's movie pages (for testing)\n if i == 83:\n html_file = os.path.join(BACKUP_DIR, 'movie-%03d.html' % i)\n make_backup(html_file, movie_html)\n\n # Save a CSV file with the relevant information for the top 250 movies.\n print('Saving CSV ...')\n save_csv(os.path.join(SCRIPT_DIR, 'top250movies.csv'), rows)", "def run_scrapping():\n date = datetime.now().strftime(\"%Y-%m-%d\")\n size = 100\n r = list(range(size))\n random.shuffle(r)\n for i in r:\n scrap_page(url_page.format(i), date)\n print(str(i) + \" / \" + str(size))", "def save_questionless_xqa(data: Dict, question_file: str, gold_file: str):\n with open(question_file, \"w\") as f_question:\n with open(gold_file, \"w\") as f_gold:\n for item in data.values():\n current_json = item.get_xqa_json()\n current_json[\"question\"] = \"\"\n json.dump(current_json, f_question, ensure_ascii=False)\n f_question.write(\"\\n\")\n json.dump(item.get_answer_json(), f_gold, ensure_ascii=False)\n f_gold.write(\"\\n\")", "def load_xqa_wrapper(path: str, part: str):\n if part == \"all\":\n parts = [\"dev\", \"test\"]\n data = {}\n if os.path.exists(join(path, \"train_doc.json\")):\n parts.append(\"train\")\n for part in parts:\n question_data = join(path, f\"{part}_doc.json\")\n gold_data = join(path, f\"{parts}.txt\")\n data = {**data, **load_xqa(question_data, gold_data)}\n else:\n question_data = join(path, f\"{part}_doc.json\")\n gold_data = join(path, f\"{part}.txt\")\n data = load_xqa(question_data, gold_data)\n logging.info(f\"Data loaded of size {len(data)}\")\n return data", "def poretools_fastq():\n dirs = os.listdir(my_dir)\n for folder in dirs:\n path_to_folder = os.path.join(my_dir, folder)\n subprocess.check_output(\"poretools fastq --type fwd {}//*.fast5 > {}_poretools.fq\"\n .format(path_to_folder, path_to_folder), shell=True)\n print(\"Finished folder {}\".format(folder))\n print(\"Finished extractions of FASTQs.\")", "def loadallskills(self):\r\n for skill in os.listdir( os.path.join( es.getAddonPath( info.basename ), \"skills\" )):\r\n es.load(\"%s/skills/%s\" % (info.basename, skill))", "def sudokus(qns=\"test/qns.txt\", ans=\"test/ans.txt\"):\n with open(qns) as fobj_qns, open(ans) as fobj_ans:\n for qn, an in zip(fobj_qns, fobj_ans):\n yield (qn.strip(), an.strip())", "def test_loads_all_example_data(self):\n for file_name in EXAMPLE_CARTS:\n with open(f\"examples/{file_name}\", \"r\") as f:\n cart = json.load(f)\n\n Item.Schema(many=True).load(cart)", "def scrape(self):\n\n self.jobs_load()\n self.new_jobs = []\n\n for bot in self.bot_squad:\n self.new_jobs += bot.scrape_all_pages()\n\n self.statistics(self.new_jobs)\n print('SCRAPE COMPLETE. NOTE: Resulting job list still in RAM')\n print('We observed %d new jobs' % len(self.new_jobs))", "def run_queries(q, file): \n data = csv(cd(file)) # modified to point to Data dir.\n seen = set(col(0, data))\n \n for q in reversed(q):\n for t in twitter(q):\n if t.id not in seen:\n data.append((\n t.id,\n t.author,\n t.language,\n t.text,\n t.date,\n t.likes,\n ))\n seen.add(t.id)\n\n data.save()", "def save_xqa(data: Dict, question_file: str, gold_file: str):\n with open(question_file, \"w\") as f_question:\n with open(gold_file, \"w\") as f_gold:\n for item in data.values():\n json.dump(item.get_xqa_json(), f_question, ensure_ascii=False)\n f_question.write(\"\\n\")\n json.dump(item.get_answer_json(), f_gold, ensure_ascii=False)\n f_gold.write(\"\\n\")", "def process_datasets(self):\n\n with open(self.mappings, \"r+\") as json_file:\n emsl_to_jgi = json.load(json_file)\n emsl_to_jgi_copy = copy.deepcopy(emsl_to_jgi)\n\n contaminant_file_loc = emsl_to_jgi[\"contaminant_file_loc\"]\n # run for each dataset\n for dataset_id, values in emsl_to_jgi.items():\n if dataset_id not in [\n \"contaminant_file_loc\",\n \"analysis_activity_file_loc\",\n \"data_objects_file_loc\",\n \"STUDY\",\n \"tools_used\",\n ]:\n raw_file_loc = values[\"raw_file_loc\"]\n self.dataset_name = values[\"dataset_name\"]\n # dataset search against a fasta file\n for genome_directory, locations in values[\n \"genome_directory\"\n ].items():\n # clear object to prepare next job\n ANALYSIS_JOBS_OBJECT.clear()\n\n # create log_dir\n self.save_job_results = os.path.join(\n self.result_loc, dataset_id, genome_directory\n )\n self.log_collected_at = os.path.join(\n os.path.abspath(self.save_job_results), \"analysis_jobs_logs\"\n )\n if not os.path.exists(self.log_collected_at):\n os.makedirs(self.log_collected_at)\n\n files = [locations[\"faa_file_loc\"], contaminant_file_loc]\n contaminated_faa_file_loc = self.contaminate_fasta(files)\n\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"contaminated_faa_file_loc\",\n contaminated_faa_file_loc,\n emsl_to_jgi_copy,\n )\n # convert .faa to .txt\n faa_txt_file = self.convert_faa2txt(\n dataset_id, contaminated_faa_file_loc\n )\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"txt_faa_file_loc\",\n faa_txt_file,\n emsl_to_jgi_copy,\n )\n\n # log & run job\n self.run_n_log_job(\n dataset_id,\n genome_directory,\n contaminated_faa_file_loc,\n raw_file_loc,\n emsl_to_jgi_copy,\n )\n\n # merge analysis\n resultant_file = self.merge_analysis_jobs(\n dataset_id, genome_directory\n )\n self.register_job_in_emsl_to_jgi(\n dataset_id,\n genome_directory,\n \"resultant_file_loc\",\n resultant_file,\n emsl_to_jgi_copy,\n )\n\n # capture the job metadata object\n logger.info(\"Jobrun\", extra=LOGGED_ANALYSIS_JOB)\n\n # update emsl_to_jgi.json\n json_file.seek(0) # move back to BOF.\n json_file.truncate()\n json_file.write(json.dumps(emsl_to_jgi_copy, default=str, indent=4))\n pass", "def getQuestions(self, CSVfile, week): #create the list of questions for this week\n questions = []\n RawData = csv.reader(open(self.CSVfile, newline=''))\n qLine = RawData.__next__()\n for question in qLine[0:-1]:\n questions.append(Question(question, \n qLine.index(question),\n CSVfile,\n week))\n self.questions = questions", "def Load_trials(files=[], trials=[]):\n\n # adds each file to files list\n\n while True:\n new_file = Add_file(files)\n if new_file:\n files.append(new_file)\n else:\n break\n\n for file in files:\n try:\n ff = open(file)\n failed_to_read_counter = 0\n ff.readline() # skips the title line\n line_read_counter = 0\n while True:\n line_read_counter += 1\n try:\n line = ff.readline()\n except:\n failed_to_read_counter += 1\n continue\n # breaks at last line\n if not line:\n break\n else:\n # splits by tabs\n try:\n fields = line.split(\"\\t\")\n rank = int(fields[0])\n except:\n continue\n # instances a new trial for each line and includes in list of trials\n trial = (\n Trial.add_trial(rank, fields[1], fields[2], fields[3], fields[4], fields[5], fields[6], fields[7],\n file))\n ff.close()\n finally:\n a=0\n return trials", "def test_multiple_output_files(self):\r\n convert_fastq(self.fasta_file_path, self.qual_file_path,\r\n multiple_output_files=True,\r\n output_directory=self.output_dir,\r\n per_file_buffer_size=23)\r\n\r\n sample_ids = [('PC.634', expected_fastq_634_default),\r\n ('PC.354', expected_fastq_354_default),\r\n ('PC.481', expected_fastq_481_default)]\r\n for sample_id, expected_output in sample_ids:\r\n actual_output_file_path = get_filename_with_new_ext(\r\n self.fasta_file_path,\r\n '_' + sample_id + '.fastq',\r\n self.output_dir)\r\n\r\n actual_output_file = open(actual_output_file_path)\r\n actual_output = actual_output_file.read()\r\n actual_output_file.close()\r\n self._files_to_remove.append(actual_output_file_path)\r\n\r\n self.assertEquals(actual_output, expected_output)", "def test3_advanced_info(self):\n\t\tprint \"\\nTEST 3: Extracting detailed entities info from each ontology in %s folder.\\n=================\" % DATA_FOLDER\n\n\t\tfor f in os.listdir(DATA_FOLDER):\n\t\t\tif not f.startswith('.'):\n\t\t\t\tprint \"Loading... >\", f\n\n\t\t\t\t# divert output to a file temporarily \n\t\t\t\tsaveout = sys.stdout \n\t\t\t\tfsock = open('out.log', 'w') \n\t\t\t\tsys.stdout = fsock \n\t\t\t\t\n\t\t\t\to = ontospy.Ontology(DATA_FOLDER + f)\n\t\t\t\tprintEntitiesInformation(o)\t\t\t\t\n\t\t\t\t\n\t\t\t\tsys.stdout = saveout\n\t\t\t\tfsock.close()\n\t\t\t\tprint \"Success.\"", "def generate_report_directories_and_files(self):\n target = r'X:\\ANALYST WORK FILES\\Peter\\Rover\\reports\\ '\n mushroom_target = r'X:\\ANALYST WORK FILES\\Peter\\Rover\\reports\\mushroom_reports\\ '\n for key, value in self.finished_reports_dictionary.items():\n if self.basic_reports_dictionary == \"MUSH\":\n try:\n jobnumber = str(key)\n filename = mushroom_target[:-1] + jobnumber[0:6] + '\\\\' + jobnumber + '_raw.tex'\n filename = filename.replace('/', '-')\n with self.safe_open_w(filename) as f:\n f.write(value)\n except OSError:\n pass\n else:\n try:\n jobnumber = str(key)\n filename = target[:-1] + jobnumber[0:6] + '\\\\' + jobnumber + '_raw.tex'\n filename = filename.replace('/', '-')\n with self.safe_open_w(filename) as f:\n f.write(value)\n except OSError:\n pass\n if self.basic_reports_dictionary == \"MUSH\":\n pass\n else:\n for key, value in self.basic_reports_dictionary.items():\n try:\n jobnumber = str(key)\n filename = target[:-1] + jobnumber + '\\\\' + jobnumber + '.txt'\n filename = filename.replace('/', '-')\n with self.safe_open_w(filename) as f:\n for item in value:\n f.write(item[0])\n f.write(item[1].to_string())\n f.write('\\n\\n')\n except OSError:\n pass", "def _extract_fastqs_from_fast5(self):\n\t\tfor id, h5path in fastq_paths[self.version].iteritems():\n\t\t\ttry:\n\t\t\t\ttable = self.hdf5file[h5path % self.group]\n\t\t\t\tfq = formats.Fastq(table['Fastq'][()])\n\t\t\t\tfq.name += \" \" + self.filename\n\t\t\t\tself.fastqs[id] = fq\n\t\t\texcept Exception, e:\n\t\t\t\tpass", "def setUp(self):\r\n\r\n qiime_dir = get_qiime_project_dir()\r\n\r\n self.key = 'qiime_test'\r\n self.project_id = 'qiime_test'\r\n self.sample_id = 'qiime_sample1'\r\n self.params = [('key', self.key), ('sample', self.sample_id),\r\n ('project', self.project_id)]\r\n test_dir = path.dirname(path.abspath(__file__))\r\n self.seq_file = path.join(test_dir, 'test_support_files',\r\n 'qiime_tutorial_split_lib_seqs_subset.fna')\r\n self.output_dir = mkdtemp()\r\n self.sample_file = [('file', 'qiime_test.fna', fasta_example)]\r\n self._paths_to_clean_up = []\r\n self._dirs_to_clean_up = []\r\n\r\n # make the webfile directory\r\n try:\r\n mkdir(self.output_dir)\r\n except OSError:\r\n pass\r\n\r\n # define directory to clean up\r\n self._dirs_to_clean_up = [self.output_dir]", "def _run_tests(self):\n for pyunit_testcase in self.cfg.testcases:\n yield self._run_testsuite(pyunit_testcase)", "def main(args):\n\tif not os.path.isdir(args.dir):\n\t\tprint \"The specified folder is not a directory.\"\n\t\tsys.exit(1)\n\tNUMBER_OF_FILES = len(os.listdir(args.dir))\n\tif args.num_of_files:\n\t\tNUMBER_OF_FILES = args.num_of_files\n\tprint \"Parsing\", NUMBER_OF_FILES, \"files\"\n\tsql = None\n\tif not args.stdout:\n\t\tsql = sqlite3.connect(args.database)\n\t\tsql.execute(\"\"\"PRAGMA foreign_keys = ON;\"\"\")\n\t\tsql.execute(\"\"\"CREATE TABLE airdates(\n\t\t\tgame INTEGER PRIMARY KEY,\n\t\t\tairdate TEXT\n\t\t);\"\"\")\n\t\tsql.execute(\"\"\"CREATE TABLE documents(\n\t\t\tid INTEGER PRIMARY KEY AUTOINCREMENT,\n\t\t\tclue TEXT,\n\t\t\tanswer TEXT\n\t\t);\"\"\")\n\t\tsql.execute(\"\"\"CREATE TABLE categories(\n\t\t\tid INTEGER PRIMARY KEY AUTOINCREMENT,\n\t\t\tcategory TEXT UNIQUE\n\t\t);\"\"\")\n\t\tsql.execute(\"\"\"CREATE TABLE clues(\n\t\t\tid INTEGER PRIMARY KEY AUTOINCREMENT,\n\t\t\tgame INTEGER,\n\t\t\tround INTEGER,\n\t\t\tvalue INTEGER,\n\t\t\tFOREIGN KEY(id) REFERENCES documents(id),\n\t\t\tFOREIGN KEY(game) REFERENCES airdates(game)\n\t\t);\"\"\")\n\t\tsql.execute(\"\"\"CREATE TABLE classifications(\n\t\t\tclue_id INTEGER,\n\t\t\tcategory_id INTEGER,\n\t\t\tFOREIGN KEY(clue_id) REFERENCES clues(id),\n\t\t\tFOREIGN KEY(category_id) REFERENCES categories(id)\n\t\t);\"\"\")\n\tfor i, file_name in enumerate(glob(os.path.join(args.dir, \"*.html\")), 1):\n\t\twith open(os.path.abspath(file_name)) as f:\n\t\t\tparse_game(f, sql, i)\n\tif not args.stdout:\n\t\tsql.commit()\n\tprint \"All done\"", "def main():\n\n args = get_args()\n random.seed(args.seed)\n wod = []\n\n for name, low, high in read_csv(args.file):\n reps = random.randint(low, high)\n if args.easy:\n reps = int(reps / 2)\n wod.append((name, reps))\n\n wod = random.sample(wod, k=args.num_exercises)\n print(tabulate(wod, headers=('Exercise', 'Reps')))", "def main():\n glob_pattern = \"{root}/{child}/*.xml\".format(root=MANCHESTER_ROOT, child=TARGET_CHILD)\n corpus_files = glob(glob_pattern)\n for filename in corpus_files:\n print(filename)\n to_csv(filtered_parent_freq_count([filename], 2))", "def start_test(n_questions, questions_file):\r\n\tfile = open(\"questions/\"+questions_file, \"r\")\r\n\tc = 0\r\n\tcorrect = []\r\n\twrong = []\r\n\r\n\tfor i in range(3325):\r\n\t\tline = file.readline()\r\n\t\t\r\n\t\tif \"Answer: \" in line:\r\n\t\t\tc += 1\r\n\t\t\tif len(str(line[8:]).strip()) > 1:\r\n\t\t\t\tprint(\"\\nMultiple Choice\")\r\n\t\t\telse:\r\n\t\t\t\tprint(\"\\nSingle Choice\")\r\n\t\t\tansw = str(input(\"\\nAnswer: \")).upper()\r\n\t\t\tif answ == str(line[8:]).strip():\r\n\t\t\t\tcorrect.append(c)\r\n\t\t\telse:\r\n\t\t\t\twrong.append(c)\r\n\t\t\t\tprint(\"Correct answer: \", str(line[8:]))\r\n\t\t\tprint(\"\\n\",\"-\"*10, \"Correct: \", len(correct), \"-\"*10, \"\\n\", \"-\"*10, \"Wrong: \", len(wrong), \"-\"*10 )\r\n\t\t\tcontinue\r\n\r\n\t\tif c == n_questions:\r\n\t\t\tbreak\r\n\t\tprint(line)\r\n\r\n\tdict_result = {\"Corrects\":correct,\r\n\t\t\t\t\t\"Wrongs\":wrong,\r\n\t\t\t\t\t\"n_questions\":n_questions}\r\n\r\n\tfile.close()\r\n\r\n\treturn dict_result", "def test_2_rnaseq(install_test_files, data_dir):\n with make_workdir() as workdir:\n cl = [\"bcbio_nextgen.py\",\n get_post_process_yaml(data_dir, workdir),\n os.path.join(data_dir, os.pardir, \"110907_ERP000591\"),\n os.path.join(data_dir, \"run_info-rnaseq.yaml\")]\n subprocess.check_call(cl)", "def sample_apr(self):\n FLAGS.full_wiki = True\n FLAGS.apr_dir = 'Directory Name'\n apr = apr_lib.ApproximatePageRank()\n seeds = [\n 'Q7755', 'Q878070', 'Q428148', 'Q679847', 'Q2609670', 'Q174834',\n 'Q188628'\n ]\n unique_facts = apr.get_facts(\n seeds, topk=200, alpha=0.9, seed_weighting=True)\n facts = sorted(unique_facts, key=lambda tup: tup[1][1], reverse=True)\n nl_facts = ' . '.join([\n str(x[0][0][1]) + ' ' + str(x[1][0][1]) + ' ' + str(x[0][1][1])\n for x in facts\n ])\n tf.logging.info('Extracted facts: %s', nl_facts)", "def test_ProcessDirectory(temp_dir: pathlib.Path):\n generator = make_dataset.RandomDatasetGenerator(\n start_date_seconds_since_epoch=time.mktime(\n time.strptime(\"1/1/2018\", \"%m/%d/%Y\")\n ),\n categories={\n \"Rainy Day\": [\"Savings\", \"Pension\"],\n \"Everyday Expenses\": [\"Groceries\", \"Clothes\"],\n },\n )\n dir = (\n temp_dir\n / \"ynab\"\n / \"Personal Finances~B0DA25C7.ynab4\"\n / \"data1~8E111055\"\n / \"12345D63-B6C2-CD11-6666-C7D8733E20AB\"\n )\n dir.mkdir(parents=True)\n generator.SampleFile(dir / \"Budget.yfull\", 100)\n\n # One SeriesCollection is generated for each input file.\n series_collection = ynab.ProcessInbox(temp_dir)\n\n # Two Series are produce for each file.\n assert len(series_collection.series) == 2\n\n # Sort series by name since the order of series isn't guaranteed.\n budget_series, transactions_series = sorted(\n series_collection.series, key=lambda s: s.name\n )\n\n # The series name is a CamelCase version of 'Personal Finances' with suffix.\n assert transactions_series.name == \"PersonalFinancesTransactions\"\n assert budget_series.name == \"PersonalFinancesBudget\"\n\n assert transactions_series.unit == \"pound_sterling_pence\"\n assert budget_series.unit == \"pound_sterling_pence\"\n assert transactions_series.family == \"Finances\"\n assert budget_series.family == \"Finances\"\n\n # num measurements = num transactions.\n assert len(transactions_series.measurement) == 100\n # num measurements = num categories * num months.\n assert len(budget_series.measurement) == 4 * 12\n\n for measurement in transactions_series.measurement:\n assert measurement.source == \"YNAB\"\n assert measurement.group\n\n for measurement in budget_series.measurement:\n assert measurement.source == \"YNAB\"\n assert measurement.group", "def generate_examples(file_name):\n for line in read_file(file_name):\n yield json.loads(line)", "def analysis_document(self, workers_num, stock_queues):\n\t\twhile not stock_queues.empty():\n\t\t\tfail_flag = False\n\t\t\tstock_name = stock_queues.get()\n\t\t\t#if not stock_name[0:1] in ['R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z']:\n\t\t\t#\tcontinue\n\t\t\t#if not self.analysis_statement(stock_name):\n\t\t\t#\tcontinue\n\n\t\t\tsav_stock_csv_path = '{}.csv'.format(os.path.join(self.stock_folder_path, stock_name))\n\t\t\tsav_option_csv_path = '{}.csv'.format(os.path.join(self.option_folder_path, stock_name))\n\t\t\tsav_option_com_order_csv_path = '{}.csv'.format(os.path.join(self.option_com_order_folder_path, stock_name))\n\t\t\tif not os.path.exists(self.option_folder_path):\n\t\t\t\tos.mkdir(self.option_folder_path)\n\t\t\tif not os.path.exists(self.option_com_order_folder_path):\n\t\t\t\tos.mkdir(self.option_com_order_folder_path)\n\t\t\tif not os.path.exists(self.techidx_folder_path):\n\t\t\t\tos.mkdir(self.techidx_folder_path)\n\t\t\tif not os.path.exists(self.pickle_folder_path):\n\t\t\t\tos.mkdir(self.pickle_folder_path)\n\t\t\t\t\n\t\t\tdf = self.crawl_price(stock_name)\n\t\t\tif len(df) < self.min_days:\n\t\t\t\tcontinue\n\n\t\t\tresult_all = self.get_supporting_point(stock_name, sav_stock_csv_path)\n\t\t\t#continue\n\t\t\t#self.output_report(stock_name, sav_option_csv_path, sav_option_com_order_csv_path, result_all)\n\t\t\t#print (sav_stock_csv_path, sav_option_csv_path, sav_option_com_order_csv_path)\n\t\t\ttech_idx_path = 'techidx/{}.csv'.format(stock_name)\n\n\t\t\toptions_contract_file_path = 'options/{}.csv'.format(stock_name)\n\t\t\tsav_stock_csv_path = '{}.csv'.format(os.path.join(self.stock_folder_path, stock_name))\n\t\t\toptions_file_path = '{}.csv'.format(os.path.join(self.option_folder_path, stock_name))\n\t\t\toptions_com_order_csv_path = '{}.csv'.format(os.path.join(self.option_com_order_folder_path, stock_name))\n\t\t\tcombin_contract_list_all, state_flag = self.output_report(stock_name, options_file_path, options_com_order_csv_path, result_all)\n\t\t\tif not state_flag:\n\t\t\t\t#print ('continue')\n\t\t\t\tcontinue\n\t\t\tself.get_techidx_result(stock_name)\n\t\t\tbest_combin_contract_all = self.back_testing(tech_idx_path, options_contract_file_path, combin_contract_list_all)\n\t\t\tprint (best_combin_contract_all)\n\n\t\t\tprint ('worker number {}, stock_name is {}'.format(workers_num, stock_name))\n\t\t\t#stock_queues.put(stock_name)\n\n#\t\t\tfor date in best_combin_contract_all.keys():\n\n\n\t\t\tbest_combin_contract_all_json = json.dumps(best_combin_contract_all)\n\t\t\t#print (len(best_combin_contract_all) != 0, len(best_combin_contract_all))\n\t\t\tif len(best_combin_contract_all) != 0:\n\t\t\t\twith open(options_com_order_csv_path, 'w') as f_w:\n\t\t\t\t\tf_w.write(best_combin_contract_all_json)", "def test_building_torrent_piece_queue_multifile():\n \n torrent_md = TorrentMD(TORRENT_PATH_MULTI)\n assert hasattr(torrent_md, \"torrent_dict\")\n assert hasattr(torrent_md, \"torrent_bytes\")\n assert torrent_md.piece_count == len(torrent_md.piece_hashes)\n hs_dict, hs_payload = torrent_md.get_handshake()\n assert len(hs_dict[\"peer_id\"]) == 20\n assert \"info_hash\" in hs_dict\n assert hs_dict[\"info_hash\"] in hs_payload\n for hash in torrent_md.piece_hashes:\n assert len(hash) == 20\n \n fw = FileWriter(torrent_md)\n pm = PieceManager(torrent_md, fw)\n assert len(pm._q) == torrent_md.piece_count", "def check_qiita_studies(dir_base):\n # collect study names\n studies = [d for d in next(os.walk(dir_base))[1] if not d.startswith('.')]\n\n for study in studies:\n file_sampleinfo = \"%s/%s/qiita%s_sampleinfo.txt\" % (dir_base,\n study,\n study)\n\n # check that sample info file exists\n if not os.path.exists(file_sampleinfo):\n raise ValueError('Missing sample info file for study %s!' % study)\n\n # checks that sample info file matches study by comparing column\n # qiita_study_id\n metadata = pd.read_csv(file_sampleinfo,\n sep='\\t', dtype=str, index_col=0)\n if metadata['qiita_study_id'].unique() != [study]:\n raise ValueError('Wrong sample info file for study %s!' % study)\n\n preps = [d\n for d in next(os.walk(dir_base + '/' + study))[1]\n if not d.startswith('.')]\n for prep in preps:\n # get all existing biom files\n dir_prep = dir_base + '/' + study + '/' + prep\n files_biom = [d\n for d in next(os.walk(dir_prep))[2]\n if d.endswith('.biom')]\n fraglen = set(map(lambda x: x.split('_')[-2].split('n')[0],\n files_biom))\n if len(fraglen) > 1:\n raise ValueError(('found biom files with differing '\n 'sequence lengths: \"%s\"') %\n '\", \"'.join(files_biom))\n fraglen = list(fraglen)[0]\n for _type in ['closedref', 'deblurrefhit']:\n file_biom = \"%s/%s/%s/qiita%s_%s_%snt_%s.biom\" % (\n dir_base, study, prep, study, prep, fraglen, _type)\n\n # check that biom file exists\n if not os.path.exists(file_biom):\n raise ValueError(\n 'Missing biom \"%s\" file \"%s\" for %s in study %s, %s!' %\n (_type, file_biom, _type, study, prep))\n\n # check biom contents\n counts = biom2pandas(file_biom)\n obs_alphabet = set([str(c).upper()\n for idx in counts.index\n for c in set(idx)])\n description = None\n if _type == 'closedref':\n exp_alphabet = set(map(str, range(0, 10)))\n description = 'numeric'\n else:\n exp_alphabet = set(string.ascii_uppercase)\n description = 'nucleotides'\n if len(obs_alphabet - exp_alphabet) > 0:\n raise ValueError(('Not all feature IDs are purely %s in '\n 'study %s, %s: \"%s') % (\n description, study, prep, file_biom))\n if metadata.loc[counts.columns, :].shape[0] <\\\n len(counts.columns):\n raise ValueError((\"Not all samples of %s of study %s are \"\n \"in the metadata file!\") % (prep, study))\n return True", "def generate():\n table_name = 'flightscrapequeue'\n routes = [\n {\"origin\": \"SIN\", \"destination\": \"DPS\"},\n {\"origin\": \"SIN\", \"destination\": \"BKK\"},\n {\"origin\": \"SIN\", \"destination\": \"DMK\"},\n {\"origin\": \"SIN\", \"destination\": \"PER\"},\n {\"origin\": \"SIN\", \"destination\": \"PEN\"},\n {\"origin\": \"SIN\", \"destination\": \"HKT\"},\n {\"origin\": \"DPS\", \"destination\": \"SIN\"},\n {\"origin\": \"BKK\", \"destination\": \"SIN\"},\n {\"origin\": \"DMK\", \"destination\": \"SIN\"},\n {\"origin\": \"PER\", \"destination\": \"SIN\"},\n {\"origin\": \"PEN\", \"destination\": \"SIN\"},\n {\"origin\": \"HKT\", \"destination\": \"SIN\"}\n ]\n logger.info(\"Generating scraping queue items for next 365 days ...\")\n batch_items = []\n today = datetime.today()\n crawl_date = today.strftime(\"%Y-%m-%d\")\n total_items_count = 0\n for count, site in enumerate([\"airasia\", \"jetstar\"]):\n for route in routes:\n logger.info(\n \"{0}: {1} to {2}\".format(\n site.title(), route['origin'], route['destination']\n )\n )\n for i in range(1, 366):\n if i > 90 and i <= 180:\n # 3 to 6 months - scrape every 3 days\n if i % 3 != 1:\n continue\n elif i > 180 and i <= 270:\n # 6 to 9 months - scrape every 5 days\n if i % 5 != 1:\n continue\n elif i > 270:\n # over 9 months - scrape every 7 days\n if i % 7 != 1:\n continue\n each_date = today + timedelta(days=i)\n departure_date = each_date.strftime(\"%Y-%m-%d\")\n uuid = '_'.join([\n crawl_date, site, route['origin'], route['destination'],\n departure_date\n ])\n item = {\n 'uuid': uuid,\n 'processing_status': 'pending',\n 'origin': route['origin'],\n 'destination': route['destination'],\n 'crawl_date': crawl_date,\n 'departure_date': departure_date,\n 'num_adult': '1',\n 'num_child': '0',\n 'num_infant': '0',\n 'site': site\n }\n if len(batch_items) == 10:\n batch_write(table_name=table_name, items=batch_items)\n time.sleep(1)\n batch_items = []\n batch_items.append(item)\n total_items_count += 1\n time.sleep(1)\n logger.info('ok')\n if batch_items:\n batch_write(table_name=table_name, items=batch_items)\n logger.info(\"Total items = {}\".format(total_items_count))\n # prepare_email(table_name)", "def _generate_training_files(self):\r\n tmp_dir = get_qiime_temp_dir()\r\n training_set = RdpTrainingSet()\r\n reference_seqs_file = open(self.Params['reference_sequences_fp'], 'U')\r\n id_to_taxonomy_file = open(self.Params['id_to_taxonomy_fp'], 'U')\r\n\r\n for seq_id, seq in parse_fasta(reference_seqs_file):\r\n training_set.add_sequence(seq_id, seq)\r\n\r\n for line in id_to_taxonomy_file:\r\n seq_id, lineage_str = map(strip, line.split('\\t'))\r\n training_set.add_lineage(seq_id, lineage_str)\r\n\r\n training_set.dereplicate_taxa()\r\n\r\n rdp_taxonomy_file = NamedTemporaryFile(\r\n prefix='RdpTaxonAssigner_taxonomy_', suffix='.txt', dir=tmp_dir)\r\n rdp_taxonomy_file.write(training_set.get_rdp_taxonomy())\r\n rdp_taxonomy_file.seek(0)\r\n\r\n rdp_training_seqs_file = NamedTemporaryFile(\r\n prefix='RdpTaxonAssigner_training_seqs_', suffix='.fasta',\r\n dir=tmp_dir)\r\n for rdp_id, seq in training_set.get_training_seqs():\r\n rdp_training_seqs_file.write('>%s\\n%s\\n' % (rdp_id, seq))\r\n rdp_training_seqs_file.seek(0)\r\n\r\n self._training_set = training_set\r\n\r\n return rdp_taxonomy_file, rdp_training_seqs_file", "def _wiki_dump_to_many_dumps( env_dict ):\n wiki_file = env_dict[\"wiki\"][\"big_xml\"]\n if not os.path.exists(wiki_file):\n logger.warning(u\"Wiki [%s] does not exists!\", wiki_file)\n return\n\n chunk_size = env_dict[\"wiki\"][\"wikis_file_buffer\"]\n buffer_size = chunk_size\n file_limit = env_dict[\"wiki\"][\"wikis_file_limit\"]\n\n pos = 0\n buf_leftover = \"\"\n\n def should_end( b ):\n if b == \"\":\n raise IOError(\"end reached\")\n\n wiki_file_out_templ = wiki_file + u\".part%s.xml\"\n\n with open(wiki_file, 'rb') as f_wiki:\n buf = f_wiki.read(chunk_size)\n to_find = \">\"\n first_page = buf.find(to_find)\n header = buf[:first_page + len(to_find)]\n footer = \"\\n</mediawiki>\"\n\n page_end = \"</page>\"\n first_time = True\n try:\n with open(wiki_file, 'rb', buffer_size) as f_wiki:\n while buf != \"\":\n read = 0\n pos += 1\n wiki_file_out = unicode(wiki_file_out_templ % pos)\n with open(wiki_file_out, 'wb+') as f_out:\n logger.info(\"Working on [%s]\", wiki_file_out)\n if not first_time:\n f_out.write(header)\n else:\n first_time = False\n while read < file_limit:\n buf = buf_leftover + f_wiki.read(chunk_size)\n buf_leftover = \"\"\n should_end(buf)\n read += len(buf)\n f_out.write(buf)\n # find page\n buf = f_wiki.read(chunk_size)\n if buf != \"\":\n page_end_pos = buf.find(page_end)\n assert page_end_pos >= 0, \"something fishy happened\"\n page_end_pos += len(page_end)\n f_out.write(buf[:page_end_pos])\n buf_leftover = buf[page_end_pos:]\n f_out.write(footer)\n except IOError:\n pass", "def test_everything(self):\n\n qs = FBO(\n path=TEST_FILES_ROOT,\n ).exclude(\n name__glob='*~',\n ).exclude(\n name__glob='*.meta',\n )\n\n self.assertEqual(\n 7,\n qs.count(),\n )\n self.assertEqual(\n {\n 'index.md',\n 'subdir/index.md',\n 'test1.md',\n 'test2.md',\n 'test1.rst',\n 'test2.rst',\n 'test3.rst',\n },\n {o.name for o in qs},\n )", "def main():\n\n file_name_base = \"./lab-record/result/fairness/\"\n scenarios = ['lan', 'wan1', 'wan2']\n scenario = scenarios[2]\n\n algorithms = [\"bbr\", \"scalable\", \"bic\", \"highspeed\", \"htcp\", \"hybla\",\n \"illinois\", \"vegas\", \"yeah\"]\n names = [\"BBR\", \"Scalable\", \"BIC\", \"High Speed\",\n \"H-TCP\", \"Hybla\", \"Illinois\", \"Vegas\", \"YeAH\"]\n\n test_types = [\"vs_reno\", \"vs_cubic\", \"vs_itself\"]\n\n fsize = 36\n \n index_reno = []\n index_cubic = []\n index_itself = []\n\n data = []\n \n print 'Loadint statistics for ' + file_name_base + '/' + scenario\n\n for algorithm in algorithms:\n for test in test_types:\n path_base = file_name_base + \"/\" + scenario + \"/\" + test + \"/\" + \\\n algorithm + \"/\"\n if test == \"vs_itself\":\n exp_name = names[algorithms.index(algorithm)] + \"_1\"\n con_name = names[algorithms.index(algorithm)] + \"_2\"\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \"_1.log\"\n con_filename = \"/\" + algorithm + \"_2.log\"\n process(path_base, exp_filename, con_filename, index_itself)\n if test == \"vs_reno\":\n exp_name = names[algorithms.index(algorithm)]\n con_name = \"Reno\"\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \".log\"\n con_filename = \"/reno.log\"\n process(path_base, exp_filename, con_filename, index_reno)\n if test == \"vs_cubic\":\n con_name = \"CUBIC\"\n exp_name = names[algorithms.index(algorithm)]\n print path_base + exp_name\n print path_base + con_name\n exp_filename = \"/\" + algorithm + \".log\"\n con_filename = \"/cubic.log\"\n process(path_base, exp_filename, con_filename, index_cubic)\n\n size = 9\n x = numpy.arange(size)\n\n total_width, n = 1.2, 2.5\n width = 1.0 / n\n x = x - (total_width - width) / 2\n\n for i in range(0, len(x)):\n x[i] += 0.5 * i\n\n # Exp\n fig = plt.figure()\n\n # Con\n con_reno = plt.bar(x + 0 * width - 1.2,\n index_reno,\n width=width,\n label='Against Reno',\n alpha=0.5,\n color=\"darkorange\")\n\n con_cubic = plt.bar(x + 1 * width - 1.2,\n index_cubic,\n width=width,\n label='Against CUBIC',\n alpha=0.5,\n color=\"lawngreen\")\n\n con_itself = plt.bar(x + 2 * width - 1.2,\n index_itself,\n width=width,\n label='Against Another Same CCA',\n alpha=0.5,\n color=\"dodgerblue\")\n\n # Index\n plt.xticks(x + 1.5 * width - 1.2, [\"BBR\", \"Scalable\", \"BIC\", \"High Speed\",\n \"H-TCP\", \"Hybla\", \"Illinois\", \"Vegas\",\n \"YeAH\"],\n fontsize=fsize,\n rotation=\"45\")\n plt.ylabel(\"Jain`s Fairness Index\", fontsize=fsize)\n plt.yticks(fontsize=fsize)\n plt.ylim(0.5, 1.1)\n\n ax = plt.subplot(111)\n ax.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,\n ncol=3, mode=\"expand\", borderaxespad=0., fontsize=fsize)\n\n plt.subplots_adjust(left=0.07, right=0.98, top=0.9, bottom=0.2)\n\n plt.show()", "def test_scraper(self):\n\n for entry in tests:\n command = ['./mozdownload/scraper.py',\n '--base_url=%s' % self.wdir,\n '--destination=%s' % self.temp_dir]\n p = processhandler.ProcessHandler(command + entry['options'])\n p.run()\n p.wait()\n dir_content = os.listdir(self.temp_dir)\n self.assertTrue(entry['fname'] in dir_content)\n\n mozfile.remove(os.path.join(self.temp_dir, entry['fname']))", "def AllindividualRuns():\n #800 nm\n RunData(getFiles(mintime=(15, 40, 07), maxtime=(15, 45, 14), folder='data/29Jul/'), out='I800nm')\n RunData(getFiles(mintime=(15, 12, 20), maxtime=(15, 24, 16), folder='data/31Jul/'), out='I800nm5k')\n RunData(getFiles(mintime=(15, 28, 40), maxtime=(15, 39, 21), folder='data/31Jul/'), out='I800nm10k')\n RunData(getFiles(mintime=(15, 43, 24), maxtime=(15, 51, 47), folder='data/31Jul/'), out='I800nm20k')\n RunData(getFiles(mintime=(15, 56, 11), maxtime=(16, 02, 58), folder='data/31Jul/'), out='I800nm30k')\n RunData(getFiles(mintime=(16, 12, 39), maxtime=(16, 18, 25), folder='data/31Jul/'), out='I800nm38k')\n RunData(getFiles(mintime=(16, 21, 52), maxtime=(16, 26, 16), folder='data/31Jul/'), out='I800nm50k')\n RunData(getFiles(mintime=(16, 32, 02), maxtime=(16, 35, 23), folder='data/31Jul/'), out='I800nm54k')\n #700 nm\n RunData(getFiles(mintime=(17, 20, 17), maxtime=(17, 33, 17), folder='data/30Jul/'), out='I700nm5k')\n RunData(getFiles(mintime=(17, 37, 35), maxtime=(17, 46, 51), folder='data/30Jul/'), out='I700nm9k')\n RunData(getFiles(mintime=(17, 48, 35), maxtime=(17, 56, 03), folder='data/30Jul/'), out='I700nm52k')\n RunData(getFiles(mintime=(17, 58, 18), maxtime=(17, 59, 31), folder='data/30Jul/'), out='I700nm32k')\n #600 nm\n RunData(getFiles(mintime=(15, 22, 00), maxtime=(15, 36, 32), folder='data/30Jul/'), out='I600nm5k')\n RunData(getFiles(mintime=(15, 39, 58), maxtime=(15, 47, 58), folder='data/30Jul/'), out='I600nm54k')\n RunData(getFiles(mintime=(15, 52, 07), maxtime=(16, 06, 32), folder='data/30Jul/'), out='I600nm10k')\n #890 nm\n RunData(getFiles(mintime=(13, 37, 37), maxtime=(13, 50, 58), folder='data/01Aug/'), out='I890nm5k')\n RunData(getFiles(mintime=(14, 00, 58), maxtime=(14, 11, 54), folder='data/01Aug/'), out='I890nm10k')\n RunData(getFiles(mintime=(14, 17, 57), maxtime=(14, 25, 49), folder='data/01Aug/'), out='I890nm30k')\n RunData(getFiles(mintime=(14, 30, 03), maxtime=(14, 34, 37), folder='data/01Aug/'), out='I890nm50k')", "def test_play_quiz(self):\n for i in range(5):\n res = self.client().post('/api/quizzes', json=self.quiz_request)\n res_body = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertTrue(res_body['success'])\n\n if res_body['question']:\n self.quiz_request['previous_questions'].append(res_body['question']['id'])", "def make_database(num_files=10):\n for i in range(num_files):\n print('\\n\\n\\nCreating set', str(i), '\\n\\n\\n')\n s_file = 'set' + str(i) + '.hdf5' \n play_dominoes(save_file=s_file)" ]
[ "0.609987", "0.59429026", "0.5871865", "0.55248433", "0.54379", "0.5391129", "0.5366999", "0.53585476", "0.53522", "0.53338474", "0.5293552", "0.5277471", "0.5270402", "0.5206287", "0.52051455", "0.51889557", "0.5166118", "0.51528084", "0.51408404", "0.51355785", "0.5130651", "0.51295424", "0.5125932", "0.51227254", "0.5120197", "0.5109019", "0.51081896", "0.50928885", "0.5080837", "0.5072457", "0.50724167", "0.50707793", "0.5058771", "0.5056753", "0.5052458", "0.5050982", "0.5050474", "0.5048623", "0.5031994", "0.502455", "0.5019459", "0.5017759", "0.49998012", "0.4978795", "0.4978744", "0.49730065", "0.49662438", "0.4964435", "0.49575013", "0.49573565", "0.49535227", "0.4951989", "0.49455693", "0.4942238", "0.49386498", "0.4933643", "0.4930502", "0.4923299", "0.49183694", "0.4907035", "0.49036184", "0.4899207", "0.48940063", "0.48825267", "0.4880527", "0.48795864", "0.48794374", "0.4879177", "0.4871497", "0.4870032", "0.48690888", "0.4866719", "0.48632622", "0.4853237", "0.48463812", "0.484548", "0.48326367", "0.48271337", "0.48219797", "0.48110473", "0.4809443", "0.48018095", "0.4799377", "0.47979677", "0.4796618", "0.4783825", "0.4780353", "0.47726664", "0.47705916", "0.477024", "0.4769064", "0.47658858", "0.47629222", "0.47617996", "0.47558874", "0.47462597", "0.47447658", "0.47372362", "0.4736209", "0.4734345" ]
0.5576337
3
Test case for add_or_update_case
def test_add_or_update_case(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update_case(self):\n pass", "def test_update_one(self):\n pass", "def test_update_record(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update_scenario(self):\n pass", "def test_add_or_update_state_for_none_state_key(self):\n def test_update_value(name, value):\n return f'{name}-{value}'\n\n state_manager = ActorStateManager(self._fake_actor)\n has_value, val = _run(state_manager.try_get_state('state1'))\n self.assertTrue(has_value)\n self.assertEqual('value1', val)\n\n val = _run(state_manager.add_or_update_state('state1', 'value1', test_update_value))\n self.assertEqual('state1-value1', val)", "def test_add_or_update_state_for_state_in_storage(self):\n def test_update_value(name, value):\n return f'{name}-{value}'\n\n state_manager = ActorStateManager(self._fake_actor)\n state_change_tracker = state_manager._get_contextual_state_tracker()\n val = _run(state_manager.add_or_update_state('state1', 'value1', test_update_value))\n self.assertEqual('state1-value1', val)\n state = state_change_tracker['state1']\n self.assertEqual(StateChangeKind.update, state.change_kind)", "def test_update_update_has_a_value(self):\n self.Person.drop_collection()\n\n author = self.Person.objects.create(name=\"Test User\")\n\n with pytest.raises(OperationError):\n self.Person.objects(pk=author.pk).update({})\n\n with pytest.raises(OperationError):\n self.Person.objects(pk=author.pk).update_one({})", "def test_update(self):\n # this is tested graphically, as it is UI\n pass", "def test_add_or_update_state_for_new_state(self):\n def test_update_value(name, value):\n return f'{name}-{value}'\n\n state_manager = ActorStateManager(self._fake_actor)\n state_change_tracker = state_manager._get_contextual_state_tracker()\n val = _run(state_manager.add_or_update_state('state1', 'value1', test_update_value))\n self.assertEqual('value1', val)\n state = state_change_tracker['state1']\n self.assertEqual(StateChangeKind.add, state.change_kind)", "def test_update_collection(self):\n pass", "def test_update_with_no_matches(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 15}, name=\"Mark\")\n assert n_updated == 0\n\n items = list(test_store.get_by())\n assert len(items) == 3\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 15}, name=\"Candy\")\n assert n_updated == 1\n items = list(test_store.get_by())\n\n candy.age = 15\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_put_db_fail(self):\n test_data = {\n 'first_name': 'new_first_name',\n 'last_name': 'new_last_name'\n }\n with mock.patch('user_profile.models.UserProfile.update') as update:\n update.return_value = False\n response = self.client.put(self.url, json.dumps(test_data), content_type='application/json')\n self.assertEquals(response.status_code, 400)", "def test_update_occurrence(self):\n pass", "def test_update_or_create_with_zero(self):\n\n d = {'name': 'winboat', 'some_number': 0}\n e = twsu.update_or_create(self.DBTestCls1, d)\n self.session.flush()\n eq_(e.some_number, 0)", "def test_update9(self):\n pass", "def test_add_item_at_using_put(self):\n pass", "def test_update_rule(self):\n pass", "def test_user_update_request(self):\n pass", "def test_update(app):\n\n assert False", "def test_update_twice_same_result():\n starting_db = create_db(STARTING_DB_INPUT)\n actual: dict = o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n INP\n )\n actual2: dict = o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n INP\n )\n assert actual == EXPECTED == actual2", "def test_update_on_unique_field_raises(test_store):\n\n with pytest.raises(NotImplementedError):\n test_store.update(fields={\"name\": \"Andy\"})", "def test_update(self):\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.my_task.title = 'foo'\n key = self.task_storage.update(self.my_task)\n new_task = self.task_storage.find(key)\n\n self.assertEqual(self.my_task, new_task)", "def add_or_update(self, answer):\n if self.exists(answer):\n self.update(answer)\n else:\n self.add(answer)", "def test_update_no_match(self):\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.task_storage.delete(self.my_task.key)\n\n self.my_task.title = 'foo'\n\n self.key = self.task_storage.update(self.my_task)\n\n self.assertIsNone(self.key)", "def test_simple_patches(self):\n payload = json.dumps([\n {\"op\": \"add\", \"path\": \"/name\", \"value\": \"New name\"},\n {\"op\": \"copy\", \"from\": \"/email\", \"path\": \"/username\"},\n {\"op\": \"replace\", \"path\": \"/subscriber\", \"value\": True}\n ])\n result = patch_item(self.valid_users[0], payload)\n user = Users.query.filter_by(UserID=self.valid_users[0]).first_or_404()\n self.assertEqual(\"New name\", result[\"name\"])\n self.assertEqual(\"[email protected]\", result[\"username\"])\n self.assertEqual(True, result[\"subscriber\"])\n self.assertEqual(\"New name\", user.Name)\n self.assertEqual(\"[email protected]\", user.Username)\n self.assertEqual(True, user.Subscriber)\n self.assertNotEqual(None, user.Updated) # Should update automatically", "def test_update_user(self):\n pass", "def test_app_can_update_a_list(self):\n self.ne=json.dumps({\"newName\":\"pants\"})\n list_update=self.client.put('/shoppinglists/trou',\n data=self.ne,\n headers={\n 'Content-Type':'application/json',\n 'x-access-token':self.tok})\n self.assertIn(\"list doesnt exist\",str(list_update.data)) \n self.assertEqual(list_update.status_code,200)", "def testAddAndDatabaseUpdates(self):\n self.users.TESTAPI_resetFixture()\n self.assertEqual(len(models.UsersModel.objects.all()), 0)\n self.users.add(\"count\", \"necula\")\n self.assertEqual(len(models.UsersModel.objects.all()), 1)\n self.users.add(\"george\", \"necula\")\n self.assertEqual(len(models.UsersModel.objects.all()), 2)", "def test_update_both(cards_db):\n i = cards_db.add_card(Card(\"foo\", owner=\"me\"))\n cards_db.update_card(i, Card(summary=\"bar\", owner=\"not me\"))\n\n mod = cards_db.get_card(i)\n assert mod == Card(\"bar\", owner=\"not me\", state=\"todo\")", "def test_update_multiple(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 14}, age=12)\n assert n_updated == 2\n items = list(test_store.get_by())\n\n andy.age = pandy.age = 14\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update_all(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 13})\n assert n_updated == 3\n items = list(test_store.get_by())\n\n andy.age = pandy.age = 13\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_less_common_patches(self):\n payload = json.dumps([\n {\"op\": \"test\", \"path\": \"/level\", \"value\": 3},\n {\"op\": \"move\", \"from\": \"/email\", \"path\": \"/username\"},\n {\"op\": \"remove\", \"path\": \"/created\"}\n ])\n result = patch_item(self.valid_users[1], payload)\n user = Users.query.filter_by(UserID=self.valid_users[1]).first_or_404()\n self.assertEqual(\"[email protected]\", result[\"username\"])\n self.assertEqual(None, result[\"created\"])\n self.assertEqual(\"[email protected]\", user.Username)\n self.assertEqual(None, user.Created)\n self.assertNotEqual(None, user.Updated) # Should update automatically", "def test_007_update_user(self, mock_db_query, mock_db_add, mock_db_commit):\n mock_db_query.get.side_effect = [\n seller1,\n seller1.address\n ]\n\n standard_dict_update = standard_dict\n standard_dict_update['identity'] = 1\n standard_dict_update['first_name'] = \"Sally\"\n response = self.app.put('/v1/users/' + str(seller1.identity), data=json.dumps(standard_dict_update),\n headers={'accept': 'application/json', 'content-type': 'application/json'})\n\n print(response.get_data().decode())\n\n self.assertEqual(response.status_code, 200)\n # Check we call the correct two database methods\n self.assertTrue(mock_db_add.called)\n self.assertTrue(mock_db_commit.called)", "def test_update(self):\n doc_fields = document_fields.DocumentFields({\n 'foo@': 'bar',\n })\n self.assertEquals('bar', doc_fields['foo'])\n doc_fields.update({\n 'foo@': 'bbq',\n })\n self.assertEquals('bbq', doc_fields['foo'])", "def test_update_with_overwrite(self):\n u = stellr.UpdateCommand(TEST_HTTP)\n u.add_documents({'a': 1}, overwrite=False)\n self.assertEqual(\n u.body, '{\"add\": {\"doc\": {\"a\": 1}, \"overwrite\": false}}')", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "async def test_update(self):\n await self.collection.create({'id': 'foo', 'value': 'bar'})\n updated = await self.resource.update('foo', {'value': 'new'})\n self.assertEqual({'id': 'foo', 'value': 'new'}, updated)", "def test_add_or_update_state_for_removed_state(self):\n def test_update_value(name, value):\n return f'{name}-{value}'\n\n state_manager = ActorStateManager(self._fake_actor)\n _run(state_manager.remove_state('state1'))\n\n val = _run(state_manager.add_or_update_state('state1', 'value1', test_update_value))\n self.assertEqual('value1', val)", "def test_add_or_update_state_without_update_value_factory(self):\n state_manager = ActorStateManager(self._fake_actor)\n with self.assertRaises(AttributeError):\n _run(state_manager.add_or_update_state('state1', 'value1', None))", "def test_update(client):\n rv = update(client, 'Michael')\n assert json.loads(rv.data.decode())['code'] == 0\n assert json.loads(rv.data.decode())['owner'] == 'Michael'", "def test_handle_removals_add_only_if_not_in_remove(self):\n self.assertFalse(False)", "def test_update(self):\n\n res_create = self.metadata.create_or_update(data=self.create)\n\n updated = self.create.dict(exclude_unset=True)\n updated[\"owner\"] = self.owner\n updated_entity = CreateDashboardRequest(**updated)\n\n res = self.metadata.create_or_update(data=updated_entity)\n\n # Same ID, updated algorithm\n self.assertEqual(res.service.id, updated_entity.service.id)\n self.assertEqual(res_create.id, res.id)\n self.assertEqual(res.owner.id, self.user.id)", "def test_client_update(self):\n pass", "def test_update_no_note(self):\n self.my_task.notes = None\n self.my_task.key = self.task_storage.add(self.my_task)\n\n self.my_task.title = 'foo'\n key = self.task_storage.update(self.my_task)\n new_task = self.task_storage.find(key)\n\n self.assertEqual(self.my_task, new_task)", "def test_changedata(self):\n p = model.Person(firstname=\"Tobias\", lastname=\"Thelen\",\n email=\"[email protected]\", hobbies=[\"singen\",\"springen\",\"fröhlichsein\"])\n id = p.store()\n\n p = model.Person(id=id)\n p['firstname'] = \"Walter\"\n p.store()\n\n p2 = model.Person(id=id)\n self.assertEqual(p2.firstname, \"Walter\")\n self.assertEqual(p2.lastname, \"Thelen\")", "def test_update(self):\n c = city.City(name=\"Paris\")\n p1 = city.Citizen(name=\"Peter\")\n c.add(p1, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n p2 = city.Citizen(name=\"Georg\")\n cw.add(p2, rel=city.hasInhabitant)\n cw.name = \"Freiburg\"\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def test_create_or_update__update(collection, user_id, media_item, repo):\n insert = collection.insert_one(\n media_item\n | {\n \"userId\": user_id,\n \"filename\": \"other-filename.jpg\",\n }\n )\n repo.create_or_update(media_item)\n\n document = collection.find_one({\"id\": media_item[\"id\"]})\n assert document is not None\n assert document[\"userId\"] == user_id\n assert document[\"filename\"] == media_item[\"filename\"]", "def test_noop_model_update(self):\r\n m0 = TestUpdateModel.create(count=5, text='monkey')\r\n\r\n with patch.object(ConnectionPool, 'execute') as execute:\r\n m0.update()\r\n assert execute.call_count == 0\r\n\r\n with patch.object(ConnectionPool, 'execute') as execute:\r\n m0.update(count=5)\r\n assert execute.call_count == 0", "def test_update(self):\n\n user = CustomUser.objects.get(email=\"[email protected]\")\n user.update(first_name=\"UpdatedName\", second_name=\"UpdatedSecondName\")\n\n self.assertEqual(user.first_name, \"UpdatedName\")\n self.assertEqual(user.second_name, \"UpdatedSecondName\")", "def test_update_privileges_fails(self):\n user = User.create(name='foo', email='[email protected]', user_type='user',\n owned_teams=[], owned_organizations=[])\n user.put()\n\n # You get a 200, but the changes you requested don't happen.\n response = self.testapp.put_json(\n '/api/users/{}'.format(user.uid),\n {'user_type': 'super_admin', 'owned_teams': ['Team_foo'],\n 'owned_organizations': ['Organization_foo']},\n headers=self.login_headers(user),\n )\n user_dict = json.loads(response.body)\n self.assertEqual(user.user_type, user_dict['user_type'])\n self.assertEqual(user.owned_teams, user_dict['owned_teams'])\n self.assertEqual(user.owned_organizations,\n user_dict['owned_organizations'])\n\n # Also not changed in the db.\n fetched_user = User.get_by_id(user.uid)\n self.assertEqual(user.user_type, fetched_user.user_type)\n self.assertEqual(user.owned_teams, fetched_user.owned_teams)\n self.assertEqual(user.owned_organizations,\n fetched_user.owned_organizations)", "def test_update_nonexist(self):\n promotion = PromotionFactory()\n promotion.id = '1cak41-nonexist'\n try:\n promotion.update()\n except KeyError:\n self.assertRaises(KeyError)", "def test_update_state1(self):\n pass", "def test_noop_model_update(self):\n m0 = TestUpdateModel.create(count=5, text='monkey')\n\n with patch.object(self.session, 'execute') as execute:\n m0.update()\n assert execute.call_count == 0\n\n with patch.object(self.session, 'execute') as execute:\n m0.update(count=5)\n assert execute.call_count == 0", "def test_update_state2(self):\n pass", "def update_existing_key_fail(self, data, new_data):\n message = 'exists'\n rv = self.add_success(data)\n assert not in_response(rv, message)\n rv = self.add_success(new_data)\n assert not in_response(rv, message)\n rv = self.update_fail(data, message)\n assert self.verify_object(new_data)\n return rv", "def test_client_partial_update(self):\n pass", "def test_update(self, init_db, audit):\n params = {\n \"resource_type\": \"Category\",\n \"action\": \"Updated\",\n \"activity\": \"changed name\"\n }\n audit.update(**params)\n assert audit.resource_type == params['resource_type']\n assert audit.action == params['action']\n assert audit.activity == params['activity']", "def test_update_item_using_post(self):\n pass", "def test_update(self):\n u = stellr.UpdateCommand(TEST_HTTP, commit_within=60000)\n self.assertEqual(u.host, 'http://localhost:8983')\n self.assertEqual(u._handler, ('/solr/update/json?'\n 'wt=json&commitWithin=60000'))\n\n a = SimpleObject(DOCUMENTS[0][0], DOCUMENTS[0][1], DOCUMENTS[0][2])\n u.add_documents(a)\n\n b = dict()\n for i, field in enumerate(FIELDS):\n b[field] = DOCUMENTS[1][i]\n u.add_documents(b)\n\n self.assertEqual(len(u._commands), 2)\n for i, comm in enumerate(u._commands):\n self.assertEqual(comm[0], 'add')\n self.assertTrue('doc' in comm[1])\n for field, value in comm[1]['doc'].iteritems():\n field_ord = FIELDS.index(field)\n self.assertEqual(DOCUMENTS[i][field_ord], value)", "def test_update(self):\n doctor = DoctorFactory.create(id=21)\n data = {'name': 'Joe'}\n self.assertNotEqual(doctor.name, data['name'])\n\n response = self.unath_client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n response = self.client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)", "def test_duo_application_update(self):\n pass", "async def test_update_not_implemented(self):\n with self.assertRaises(NotImplementedError):\n await self.collection.update('x', {})", "def test_success_case(self):\r\n tm = TestModel.create(count=8, text='123456789')\r\n tm2 = tm.update(count=9)\r\n\r\n tm3 = TestModel.get(tm.vid)\r\n assert tm2.count == 9\r\n assert tm3.count == 9", "def test_deep_update_illegal_update(self):\n # Update with an illegal type\n for update_with in [42, None, [42], \"bar\"]:\n with self.assertRaisesRegex(\n SaltInvocationError,\n r\"Cannot update {} with a {}.\" \"\".format(type({}), type(update_with)),\n ):\n dictupdate.update_dict_key_value({}, \"foo\", update_with)\n # Again, but now using OrderedDicts\n for update_with in [42, None, [42], \"bar\"]:\n with self.assertRaisesRegex(\n SaltInvocationError,\n r\"Cannot update {} with a {}.\"\n \"\".format(type(OrderedDict()), type(update_with)),\n ):\n dictupdate.update_dict_key_value(\n {}, \"foo\", update_with, ordered_dict=True\n )", "def test_update(self):\n user = self.custodian_1_user\n user_client = self.custodian_1_client\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n new_first_name = \"New First Name\"\n data = {\n \"first_name\": new_first_name,\n }\n access = {\n \"forbidden\": [self.anonymous_client, self.readonly_client, self.custodian_2_client],\n \"allowed\": [self.admin_client, user_client]\n }\n\n for client in access['forbidden']:\n for url in urls:\n self.assertIn(\n client.patch(url, data, format='json').status_code,\n [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]\n )\n\n for client in access['allowed']:\n for url in urls:\n new_first_name += '1'\n data['first_name'] = new_first_name\n self.assertEqual(\n client.patch(url, data, format='json').status_code,\n status.HTTP_200_OK\n )\n user.refresh_from_db()\n self.assertEqual(user.first_name, new_first_name)", "def test_collection_put(testapp, execute_counter):\n initial = {\n 'title': \"Testing\",\n 'type': \"object\", # include a non-required field\n 'description': \"This is the initial insert\",\n }\n item_url = testapp.post_json('/embedding-tests', initial).location\n\n with execute_counter.expect(1):\n item = testapp.get(item_url).json\n\n for key in initial:\n assert item[key] == initial[key]\n\n update = {\n 'title': \"New Testing\",\n 'type': \"object\",\n 'description': \"This is the updated insert\",\n }\n testapp.put_json(item_url, update, status=200)\n\n res = testapp.get('/' + item['uuid']).follow().json\n\n for key in update:\n assert res[key] == update[key]", "def test_update_goal(self):\n pass", "def test_insert_or_update_query(self):\n\n row = (\n 'source',\n 'signal',\n 'time_type',\n 'geo_type',\n 'time_value',\n 'geo_value',\n 'value',\n 'stderr',\n 'sample_size',\n )\n mock_connector = MagicMock()\n database = Database()\n database.connect(connector_impl=mock_connector)\n\n database.insert_or_update(*row)\n\n connection = mock_connector.connect()\n cursor = connection.cursor()\n self.assertTrue(cursor.execute.called)\n\n sql, args = cursor.execute.call_args[0]\n self.assertEqual(args, row)\n\n sql = sql.lower()\n self.assertIn('insert into', sql)\n self.assertIn('`covidcast`', sql)\n self.assertIn('unix_timestamp', sql)\n self.assertIn('on duplicate key update', sql)", "def test_update_item(self, default_ms):\r\n self.initdb(default_ms)\r\n course = self.store.get_course(self.course_locations[self.XML_COURSEID1].course_key)\r\n # if following raised, then the test is really a noop, change it\r\n self.assertFalse(course.show_calculator, \"Default changed making test meaningless\")\r\n course.show_calculator = True\r\n with self.assertRaises(AttributeError): # ensure it doesn't allow writing\r\n self.store.update_item(course, None)\r\n # now do it for a r/w db\r\n course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)\r\n # if following raised, then the test is really a noop, change it\r\n self.assertFalse(course.show_calculator, \"Default changed making test meaningless\")\r\n course.show_calculator = True\r\n self.store.update_item(course, None)\r\n course = self.store.get_course(self.course_locations[self.MONGO_COURSEID].course_key)\r\n self.assertTrue(course.show_calculator)", "def case_update_record(self, refresh_db_before):\n original_name = \"tag3\"\n update_name = \"tag33\"\n\n original_tag = TagOp.add(original_name)\n\n updated_tag_local = TagOp.update(original_tag, update_name)\n\n self.assertEqual(updated_tag_local.name, update_name)\n\n updated_tag_get = TagOp.get(name=update_name)\n\n self.assertTrue(len(updated_tag_get) is 1)\n self.assertEqual(updated_tag_get[0].name, update_name)\n self.assertEqual(updated_tag_get[0].id, original_tag.id)", "def test_list_inplace_update(self):\r\n vm = List.value_manager(None, None, [1,2,3])\r\n assert not vm.changed\r\n vm.value.append(4)\r\n assert vm.changed", "def test_user_update_permissions(self):\n userPK = User.objects.get(username='c2e1').pk\n url = reverse('User-detail', kwargs={'pk': userPK})\n data = {'username': 'company1NewTest'}\n response = self.client.put(url, data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(User.objects.get(pk=userPK).username,\n 'company1NewTest')\n data = {'username': 'company1NewTest2'}\n response = self.client.patch(url, data, format='json')\n #This is 404 instead of 403 because there is no way to view a company\n #that you arent an employee of.\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n self.assertNotEqual(User.objects.get(pk=userPK).username,\n 'company1NewTest2')", "def test_update_users_Returns_false_for_invalid_id(\n self, mock_modify_users_file, mock_read_users_file\n ):\n mock_read_users_file.return_value = self.users_data\n mock_modify_users_file.return_value = True \n crud = CRUD()\n self.assertFalse(crud.update_users(\"10\",\"name\",\"Maxime\"))", "def test_update_activity(self):\n pass", "def test_update_book_details(self):\n\n first_book_list = BookList()\n first_book = Book()\n\n first_book.create_book({\n \"title\": \"First Man\",\n \"author\": \"James R. Hansen\",\n \"year\": 2005,\n \"publisher_name\": \"Simon & Schuster\",\n \"publication_date\": \"01/01/2018\",\n \"num_copies\": 1\n })\n\n first_book_list.add_book(first_book)\n\n new_book_details = {\n \"title\": \"First Man\",\n \"author\": \"James Hansen\",\n \"year\": 2018,\n \"publisher_name\": \"Simon & Schuster\",\n \"publication_date\": \"01/01/2018\",\n \"num_copies\": 5\n }\n\n assert first_book_list.update_book_details(new_book_details) == True\n assert first_book_list.find_book(\"First Man\") == True\n\n for book in first_book_list.show_all():\n assert book.get(\"title\") == \"First Man\"\n assert book.set(\"title\", \"First Man: The Life of Neil A. Armstrong\") == True\n\n assert first_book_list.find_book(\"First Man: The Life of Neil A. Armstrong\") == True", "def test_customer_update(self):\n # first performe create\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then performe update\n data = { \n \"name\": \"Changed the name\",\n \"email\": self.customer_data[\"email\"],\n \"phone\": self.customer_data[\"phone\"]\n }\n self._update_model(\"customer\", id, data, [\"name\"])\n self.assertIsNotNone(id)", "def test_update_inventory(self):\n pass", "def taco_test_put_new(self):\n body = '{ \"id\": 400, \"name\": \"item_new\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_update_system(self):\n pass", "def test_full_update(self):\n self.assertEqual(Product.objects.count(), 2)\n self.assertEqual(self.product_1.name, 'Nike Vapor')\n self.assertEqual(self.product_1.sku, '44444444')\n self.assertEqual(self.product_1.category, self.category_1)\n self.assertEqual(self.product_1.description, 'Some product description')\n self.assertEqual(self.product_1.price, 129.99)\n self.assertEqual(self.product_1.featured, False)\n\n payload = {\n 'name': 'Updated name',\n 'category': self.category_2.id,\n 'sku': '11111111',\n 'description': 'New product description',\n 'price': 39.99,\n 'featured': True\n }\n\n headers = {\n 'HTTP_AUTHORIZATION': 'Token ' + str(self.token_admin)\n }\n response = self.client.put(\n '/api/products/{}/'.format(self.product_1.id),\n data=payload, content_type='application/json', **headers)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response['Content-Type'], 'application/json')\n self.assertEqual(Product.objects.count(), 2)\n\n product = Product.objects.get(id=self.product_1.id)\n self.assertEqual(product.name, 'Updated name')\n self.assertEqual(product.sku, '11111111')\n self.assertEqual(product.category, self.category_2)\n self.assertEqual(product.description, 'New product description')\n self.assertEqual(float(product.price), 39.99)\n self.assertEqual(product.featured, True)", "def testAdd2(self):\n self.assertEquals(models.SUCCESS, self.users.add(\"userC\", \"password\"))\n self.assertEquals(models.SUCCESS, self.users.add(\"userD\", \"password\"))", "def test_full_update_recipe(self):\n recipe = sample_recipe(user=self.user)\n recipe.tag.add(sample_tag(user=self.user))\n payload = {\n 'title':'chicken noodles',\n 'time_minutes':50,\n 'price':12.67,\n }\n url = detail_url(recipe.id)\n self.client.put(url,payload)\n\n recipe.refresh_from_db()\n self.assertEqual(recipe.title,payload['title'])\n self.assertEqual(recipe.time_minutes,payload['time_minutes'])\n self.assertEqual(float(recipe.price),payload['price'])\n tags = recipe.tag.all()\n self.assertEqual(len(tags),0)\n self.assertEqual(recipe.user,self.user)", "def test_update_state3(self):\n pass", "def test_attempt_to_add_uid_key_causes_error():\n starting_db = create_db(STARTING_DB_INPUT)\n starting_db.put_item(\n Item={\n \"uid\": \"I can TOTALLY update someone else's object\"\n }\n )\n with pytest.raises(ValueError):\n o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n json.dumps({\n \"uid\": \"I can TOTALLY update someone else's object\"\n })\n )", "def test_update_state4(self):\n pass", "def test_update_no_commit(self):\n album = Album(artist='Artist', album='Album', album_type='ep',\n totaltracks=1, totalseconds=120)\n pk = album.insert(self.app.db, self.app.curs)\n self.assertNotEqual(pk, None)\n self.assertNotEqual(pk, 0)\n self.assertEqual(self.get_album_count(), 1)\n album_row = self.get_album_by_id(pk)\n self.assertEqual(album_row['alartist'], 'Artist')\n self.assertEqual(album_row['alalbum'], 'Album')\n self.assertEqual(album_row['altype'], 'ep')\n self.assertEqual(album_row['totaltracks'], 1)\n self.assertEqual(album_row['totalseconds'], 120)\n\n # Now update the object and save out, and test.\n album.artist = 'Artist 2'\n album.album = 'Album 2'\n album.album_type = 'live'\n album.totaltracks = 2\n album.totalseconds = 240\n album.update(self.app.db, self.app.curs, commit=False)\n self.assertEqual(self.get_album_count(), 1)\n album_row = self.get_album_by_id(pk)\n self.assertEqual(album_row['alartist'], 'Artist 2')\n self.assertEqual(album_row['alalbum'], 'Album 2')\n self.assertEqual(album_row['altype'], 'live')\n self.assertEqual(album_row['totaltracks'], 2)\n self.assertEqual(album_row['totalseconds'], 240)\n self.app.db.rollback()\n self.assertEqual(self.get_album_count(), 1)\n album_row = self.get_album_by_id(pk)\n self.assertEqual(album_row['alartist'], 'Artist')\n self.assertEqual(album_row['alalbum'], 'Album')\n self.assertEqual(album_row['altype'], 'ep')\n self.assertEqual(album_row['totaltracks'], 1)\n self.assertEqual(album_row['totalseconds'], 120)", "def test_otoroshi_controllers_adminapi_tcp_service_api_controller_update_entity_action(self):\n pass", "async def test_updates_no_user(database,valid_data):\n #reset the database and add values with ids [0,10]\n test_valid_insert(database,valid_data)\n\n for _id in range(100,150):\n try:\n await database.update(_id=_id,user_id=_id)\n assert False\n except:\n assert True\n await database.close_pool()", "def test_add_duplicate(self, api):\n self.builder.add_user(api.get_user())\n resp = api.add_user(api.get_user())\n self.builder.del_user(api.get_user())\n assert resp.status_code == 304", "def test_updating_record_with_kwargs(self, test_domain):\n identifier = uuid4()\n person = test_domain.repository_for(Person)._dao.create(\n id=identifier, first_name=\"Johnny\", last_name=\"John\", age=2\n )\n\n test_domain.repository_for(Person)._dao.update(person, age=10)\n u_person = test_domain.repository_for(Person)._dao.get(identifier)\n assert u_person is not None\n assert u_person.age == 10", "def test_add(self):\n self.assertEqual(3, foo.add(1, 2))\n self.assertNotEqual(3, foo.add(2, 2))", "def test_request_do_update(test_dao, test_configuration):\r\n DUT = dtcFunction(test_dao, test_configuration, test=True)\r\n DUT.request_do_select_all(revision_id=1)\r\n\r\n assert not DUT.request_do_update(1)", "def api_can_update(self):\n person1 = User(name=\"test person1\",\n bio=\"test person1\",\n contact_info=\"test person\")\n person2 = User(name=\"test person2\",\n bio=\"test person2\",\n contact_info=\"test person\")\n person1.save()\n person2.save()\n # update_person = self.client.put(\n # reverse('details', kwargs={'pk': person1.id}),\n # person2, format='json'\n # )\n self.assertEqual(self.client.get('/api/guru'), 200)", "def testInequalityDifferentModifications(self):\n first = delta.ModifyOp(\"cn=john,dc=example,dc=com\", [delta.Add(\"description\")])\n\n second = delta.ModifyOp(\n \"cn=john,dc=example,dc=com\", [delta.Delete(\"description\")]\n )\n\n self.assertNotEqual(first, second)", "def test_vault_update_vault_item(self):\n pass", "def test_update_jwp(self):\n v1, = set_resources_and_sync([make_video(media_id='1234')])\n jwp1 = jwpmodels.Video.objects.get(key=v1.key)\n self.assertEqual(jwp1.updated, v1['updated'])\n\n v1['updated'] += 20\n v1, = set_resources_and_sync([v1])\n jwp1 = jwpmodels.Video.objects.get(key=v1.key)\n self.assertEqual(jwp1.updated, v1['updated'])" ]
[ "0.79237324", "0.7314126", "0.7272837", "0.72685695", "0.72685695", "0.72685695", "0.712033", "0.69949913", "0.6982817", "0.69368035", "0.69113", "0.68479264", "0.68319446", "0.6824832", "0.6721132", "0.6690393", "0.6686279", "0.66511667", "0.6622248", "0.6599744", "0.6579878", "0.6571779", "0.6547084", "0.6512169", "0.6478061", "0.6466246", "0.64359325", "0.6411041", "0.63484573", "0.63103485", "0.6308383", "0.628732", "0.6268244", "0.6264796", "0.6258671", "0.6251083", "0.6227754", "0.62043804", "0.6203913", "0.6193558", "0.61878324", "0.61874944", "0.6186949", "0.6175689", "0.6168581", "0.6166178", "0.6154626", "0.61433166", "0.6132265", "0.6121185", "0.61176634", "0.6114559", "0.61096436", "0.6108814", "0.61028546", "0.6096815", "0.6096594", "0.6091824", "0.6090503", "0.60879093", "0.60702735", "0.6064617", "0.6062022", "0.605338", "0.60528827", "0.6049695", "0.60469127", "0.60413706", "0.6036552", "0.60301447", "0.6029099", "0.6026818", "0.60198396", "0.6017708", "0.60176396", "0.6013674", "0.6009889", "0.60095906", "0.60095716", "0.600568", "0.6005275", "0.60052276", "0.60036343", "0.5998893", "0.5996754", "0.5990715", "0.59898114", "0.59821045", "0.59786165", "0.59775794", "0.5975366", "0.59704065", "0.5965631", "0.5961888", "0.59558105", "0.5955347", "0.59550416", "0.59506345", "0.59485537", "0.5945791" ]
0.90939504
0
Test case for delete_case
def test_delete_case(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_delete_run(self):\n pass", "def test_delete(self):\n pass", "def test_delete1(self):\n pass", "def test_delete_record(self):\n pass", "def test_CovidCase_delete(self):\n # setting up by creating and saving the the database\n del_Covid = self.create_CovidCase()\n del_Covid.save()\n del_id = del_Covid.id\n # we are going to delete by calling the delete function\n del_deleted = CovidCase.objects.get(id=del_id)\n del_deleted.delete()\n\n self.assertNotIn(del_Covid, CovidCase.objects.all())", "def test_delete7(self):\n pass", "def test_delete_records(self):\n pass", "def test_delete_goal(self):\n pass", "def test_delete_item_using_delete(self):\n pass", "def test_delete_occurrence(self):\n pass", "def test_delete_rule(self):\n pass", "def test_delete_activity(self):\n pass", "def test_delete(self):\r\n course = CourseFactory.create(org='edX', course='999')\r\n with self.assertRaises(ValueError):\r\n tabs.primitive_delete(course, 0)\r\n with self.assertRaises(ValueError):\r\n tabs.primitive_delete(course, 1)\r\n with self.assertRaises(IndexError):\r\n tabs.primitive_delete(course, 6)\r\n tabs.primitive_delete(course, 2)\r\n self.assertFalse({u'type': u'textbooks'} in course.tabs)\r\n # Check that discussion has shifted up\r\n self.assertEquals(course.tabs[2], {'type': 'discussion', 'name': 'Discussion'})", "def test_delete_risk_profile_using_delete(self):\n pass", "def test_delete_boat(self):\n pass", "def delete():", "def test_delete(self):\n SampleTemplate.create(self.metadata, self.new_study)\n SampleTemplate.delete(2)\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.required_sample_info WHERE study_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.study_sample_columns WHERE study_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n with self.assertRaises(QiitaDBExecutionError):\n self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.sample_2\")", "def test_delete(self):\n scenario = factories.Scenario(config='', status=Scenario.Status.INACTIVE)\n scenario.delete()\n self.assertEqual(scenario.status, Scenario.Status.INACTIVE)", "def test_delete_identity(self):\n pass", "def test_delete(self):\n self.assertTrue(self.run_function(\"group.add\", [self._group]))\n\n # correct functionality\n self.assertTrue(self.run_function(\"group.delete\", [self._group]))\n\n # group does not exist\n self.assertFalse(self.run_function(\"group.delete\", [self._no_group]))", "def test_delete(self) -> None:\n\n expected = False\n actual = self.helper.set_name(self.test_name).exists()\n\n self.assertEqual(expected, actual)\n\n self.helper.set_value(\"Hello, World!\")\n\n expected = True\n actual = self.helper.exists()\n\n self.assertEqual(expected, actual)\n\n self.helper.delete()\n\n expected = False\n actual = self.helper.exists()\n\n self.assertEqual(expected, actual)", "def test_deleting_a_segment(self):\n pass", "def test_delete(self):\n # add a task\n self.add(title=\"Sample task doing\", description=\"for sample\", state=\"doing\")\n task = Task.query.filter_by(title='Sample task doing').first()\n\n # delete\n self.delete(id=task.id)\n task = Task.query.filter_by(title='Sample task doing').first()\n self.assertIsNone(task)", "def test_delete(self):\n person = Person('test_person_b')\n person.delete()\n with database() as db:\n results = db.query(\"SELECT * FROM persons WHERE person_name = 'test_person_b'\")\n self.assertEqual(results, [])", "def test_delete_category(self):\n pass", "def test_delete_category(self):\n self.add_success(self.test_data['pants'])\n self.delete_success('pants')", "def test_delete_cases_with_dry_run(cli_runner, base_context, base_store: Store, helpers, caplog):\n # GIVEN a database with a case\n case_obj = helpers.add_case(base_store)\n case_id = case_obj.internal_id\n sample = helpers.add_sample(base_store)\n helpers.add_relationship(store=base_store, case=case_obj, sample=sample)\n\n # WHEN deleting a case\n caplog.set_level(logging.DEBUG)\n cli_runner.invoke(\n delete_cases,\n [\"--sample-identifier\", \"name\", sample.name, \"--dry-run\"],\n obj=base_context,\n )\n\n # THEN it should not have been deleted\n assert \"Cases (that will NOT be deleted due to --dry-run):\" in caplog.text\n assert case_id in caplog.text", "def test_delete_activity_template(self):\n pass", "def delete(self):\n ...", "def test_delete_cloud(self):\n pass", "def test_delete_device(self):\n pass", "def test_delete_device(self):\n pass", "def test_delete_recipe_category(self):\n self.signup('Bo', 'Theo', '[email protected]', 'Bo1995', 'Bo1995')\n self.login('[email protected]', 'Bo1995')\n self.dashboard()\n self.category('JunkFood')\n self.dashboard()\n self.recipe_dashboard()\n self.create_recipe('cakes', 'blah, blah, blah....mix ingredient, heat')\n self.edit_recipe('edited cakes', 'edited blah blah blah spoon , heat')\n rv = self.del_recipe()\n self.assertIn(b'deleted successfully', rv.data)", "def test_delete_failure(self):\r\n problem_url_name = 'H1P1'\r\n location = InstructorTaskModuleTestCase.problem_location(problem_url_name)\r\n self.define_option_problem(problem_url_name)\r\n self.submit_student_answer('u1', problem_url_name, [OPTION_1, OPTION_1])\r\n\r\n expected_message = \"bad things happened\"\r\n with patch('courseware.models.StudentModule.delete') as mock_delete:\r\n mock_delete.side_effect = ZeroDivisionError(expected_message)\r\n instructor_task = self.delete_problem_state('instructor', location)\r\n self._assert_task_failure(instructor_task.id, 'delete_problem_state', problem_url_name, expected_message)", "def test_delete_group(self):\n pass", "def test_delete_group(self):\n pass", "def test_delete_collection(self):\n pass", "def test_delete_user(self):\n pass", "def test_delete_user(self):\n pass", "def test_delete(self):\n self.request.access.allow_delete = [\"everyone\"]\n pkg = make_package(factory=DynamoPackage)\n self._save_pkgs(pkg)\n self.db.delete(pkg)\n count = self.engine.scan(DynamoPackage).count()\n self.assertEqual(count, 0)\n count = self.engine.scan(PackageSummary).count()\n self.assertEqual(count, 0)\n self.storage.delete.assert_called_with(pkg)", "def do_delete(self, arg):\n \treturn False", "def test_delete_note(self):\n pass", "def test_duo_application_delete(self):\n pass", "def test_delete(self):\n self.request.access.allow_delete = [\"everyone\"]\n pkg = make_package(factory=SQLPackage)\n self.sql.add(pkg)\n transaction.commit()\n self.sql.add(pkg)\n self.db.delete(pkg)\n count = self.sql.query(SQLPackage).count()\n self.assertEqual(count, 0)\n self.storage.delete.assert_called_with(pkg)", "def test_client_risk_assessment_delete(self):\n pass", "def test_delete_alert_by_id(self):\n pass", "def test_remove(self):\n pass", "def test_delete_client(self):\n pass", "def test_007_delete(self):\n HEADING()\n db = self.db\n\n db.connect()\n print (\"AAA\")\n before_count = len(db)\n print (\"CCC\", len(db))\n job = db.insert(\"deleteme\")\n print (\"DDD\", len(db))\n\n job = db.delete_jobs(\"job_name\", \"deleteme\")\n print (\"EEE\")\n after_count = len(db)\n print (\"FFF\", len(db))\n assert(before_count - after_count == 0)", "def delete(self, *args, **kwargs):\n return 0", "def test_handle_delete(self):\n team = Team(\"BRS\", \"brs\", \"web\")\n team.github_team_id = \"12345\"\n test_user = User(\"userid\")\n test_user.github_id = \"1234\"\n team.add_team_lead(\"1234\")\n self.db.retrieve.return_value = test_user\n self.db.query.return_value = [team]\n self.assertTupleEqual(self.testcommand.handle(\"team delete brs\", user),\n (f\"Team brs deleted\", 200))\n self.db.delete.assert_called_once_with(Team, \"12345\")\n self.gh.org_delete_team.assert_called_once_with(int(\"12345\"))", "def test_duo_account_delete(self):\n pass", "def test_client_nationlity_delete(self):\n pass", "def test_client_verification_document_delete(self):\n pass", "def test_user_id_delete(self):\n pass", "def test_delete_findings(upload, test_id):\n check_delete()\n upload.test_delete_findings(test_id)", "def test_delete_edge_case_with_write_concern_0_return_None(self):\n p1 = self.Person(name=\"User Z\", age=20).save()\n del_result = p1.delete(w=0)\n assert del_result is None", "def test_delete_category(self):\n self.signup('Bo', 'Theo', '[email protected]', 'Bo1995', 'Bo1995')\n self.login('[email protected]', 'Bo1995')\n self.dashboard()\n self.category('JunkFood')\n self.dashboard()\n rv = self.del_category()\n self.assertIn(b'successfully deleted category', rv.data)", "def test_delete_team(self):\n pass", "def test_datatransformationsetups_id_delete(self):\n pass", "def test_sequence_delete(self):\n self.t(\"1,2 delete\", input=\"y\\ny\\n\")\n code, out, err = self.t(\"_get 1.status 2.status\")\n self.assertEqual(\"deleted deleted\\n\", out)", "def test_variablepresentations_id_delete(self):\n pass", "def test_delete_groups(self):\n pass", "def test_delete_device_by_id(self):\n pass", "def test_delete_device_by_id1(self):\n pass", "def test_delete(self):\n self.request.access.allow_delete = [\"everyone\"]\n pkg = make_package()\n key = self.db.redis_key(pkg.filename)\n self.redis[key] = \"foobar\"\n self.db.delete(pkg)\n val = self.redis.get(key)\n self.assertIsNone(val)\n count = self.redis.scard(self.db.redis_set)\n self.assertEqual(count, 0)\n self.storage.delete.assert_called_with(pkg)", "def test_00_cascade(self):\n cat = self.cat\n\n # get the id's of all objects that should be deleted.\n uid = cat.uaccess.id\n orid = self.scratching.id\n arid = self.scratching.raccess.id\n ogid = self.felines.id\n agid = self.felines.gaccess.id\n gpid = UserGroupPrivilege.objects.get(user=cat).id\n rpid = UserResourcePrivilege.objects.get(user=cat).id\n mpid = GroupMembershipRequest.objects.get(request_from=cat).id\n\n # all objects exist before the delete\n self.assertEqual(UserAccess.objects.filter(id=uid).count(), 1)\n self.assertEqual(UserGroupPrivilege.objects.filter(id=gpid).count(), 1)\n self.assertEqual(\n UserResourcePrivilege.objects.filter(\n id=rpid).count(), 1)\n self.assertEqual(\n GroupMembershipRequest.objects.filter(\n id=mpid).count(), 1)\n self.assertEqual(ResourceAccess.objects.filter(id=arid).count(), 1)\n self.assertEqual(GroupAccess.objects.filter(id=agid).count(), 1)\n self.assertEqual(BaseResource.objects.filter(id=orid).count(), 1)\n self.assertEqual(Group.objects.filter(id=ogid).count(), 1)\n\n cat.delete()\n\n # objects tied to the user are deleted, other objects continue to exist\n self.assertEqual(UserAccess.objects.filter(id=uid).count(), 0)\n self.assertEqual(UserGroupPrivilege.objects.filter(id=gpid).count(), 0)\n self.assertEqual(\n UserResourcePrivilege.objects.filter(\n id=rpid).count(), 0)\n self.assertEqual(\n GroupMembershipRequest.objects.filter(\n id=mpid).count(), 0)\n # deleting a user should not remove the groups that user owns\n self.assertEqual(GroupAccess.objects.filter(id=agid).count(), 1)\n self.assertEqual(Group.objects.filter(id=ogid).count(), 1)\n\n # the following tests will fail, because the resource field\n # \"creator\" is a foreign key to User with on_delete=models.CASCADE\n # and null=False. Thus removing the creator of a resource will\n # remove the resource record (and orphan many files in the process).\n\n # print('resource access count is ', ResourceAccess.objects.filter(id=arid).count())\n # print('resource count is ', BaseResource.objects.filter(id=orid).count())\n # self.assertEqual(ResourceAccess.objects.filter(id=arid).count(), 1)\n # self.assertEqual(BaseResource.objects.filter(id=orid).count(), 1)", "def test_data_source_soaps_id_delete(self):\n pass", "def test_delete_project(self):\n pass", "def test_delete_project(self):\n pass", "def test_delete(self):\n pt = PrepTemplate.create(self.metadata, self.new_raw_data,\n self.test_study, self.data_type_id)\n PrepTemplate.delete(pt.id)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_template WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.common_prep_info WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n obs = self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_columns WHERE prep_template_id=2\")\n exp = []\n self.assertEqual(obs, exp)\n\n with self.assertRaises(QiitaDBExecutionError):\n self.conn_handler.execute_fetchall(\n \"SELECT * FROM qiita.prep_2\")", "def test_delete(self):\n\n value = self.instance.delete()\n self.client.delete_instance.assert_called_once_with('nginx')\n self.assertEqual(value, self.client.delete_instance.return_value)", "def test_meeting_delete(self):\n pass", "def test_delete__valid(self):\n testing_config.sign_in('[email protected]', 123567890)\n\n with register.app.test_request_context(self.request_path):\n actual_json = self.handler.do_delete(self.feature_id)\n self.assertEqual({'message': 'Done'}, actual_json)\n\n revised_feature = models.Feature.get_by_id(self.feature_id)\n self.assertTrue(revised_feature.deleted)", "def test_workflows_id_delete(self):\n pass", "def test_delete_device_template(self):\n pass", "def test_issue_delete_issue_reaction(self):\n pass", "def delete():\n click.echo('delete was called.')", "def test_delete(self):\n self.assertFalse(self.user1.ad_deleted)\n self.assertTrue(self.user1.active)\n url = '/api/users/{}/'.format(self.user1.ad_guid)\n data = {'Deleted': True}\n response = self.client.put(url, json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, 202)\n user = DepartmentUser.objects.get(pk=self.user1.pk) # Refresh from db\n self.assertTrue(user.ad_deleted)\n self.assertFalse(user.active)\n self.assertTrue(user.in_sync)\n # Also delete a second object, to check for silly 'empty string' collisions.\n url = '/api/users/{}/'.format(self.user2.ad_guid)\n response = self.client.put(url, json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, 202)", "def test_delete(self, init_db, category):\n category.delete()\n assert Category.get(category.id) == None", "def test_delete(self):\n\n\t\titem_id = mock_item()[0]\n\t\tmodels.delete(item_id)\n\n\t\titem = models.item(item_id)\n\t\tself.assertIsNone(item)", "def test_delete_device_user(self):\n pass", "def test_delete_function(self): \n self.new_user.save_prof()\n user2 = User_prof(username = \"mbugua\", bio = \"the world revolves\" ) \n user2.save_prof()\n \n user2.delete_prof()\n all = User_prof.objects.all()\n self.assertEqual(len(all),1)", "def test_client_document_delete(self):\n pass", "def Deletetest(self):\n # Test delete()\n result = self.runner.invoke(\n yoda.cli,\n [\"setup\", \"delete\"],\n input=\"n\\n\"\n )\n self.assertEqual(result.exit_code, 0)\n self.assertIn(\"Operation cancelled\", result.output)\n\n result = self.runner.invoke(\n yoda.cli,\n [\"setup\", \"delete\"],\n input=\"y\\n\"\n )\n self.assertEqual(result.exit_code, 0)\n self.assertIn(\"Configuration file deleted\", result.output)", "def test_delete(self):\n # login as library manager\n self.authenticate(self.user)\n\n # check there are 3 works\n self.assertEqual(Work.objects.count(), 3)\n\n self.assertNotEqual(self.work1.song_set.count(), 0)\n\n # prune works\n response = self.client.delete(self.url)\n\n # check http status\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # check the response\n self.assertDictEqual(response.data, {\"deleted_count\": 2})\n\n # check there are only 1 work remaining\n self.assertEqual(Work.objects.count(), 1)\n\n # check artists with songs remains\n self.assertEqual(Work.objects.filter(pk=self.work2.pk).count(), 0)\n self.assertEqual(Work.objects.filter(pk=self.work3.pk).count(), 0)", "def delete_case(\n case_id: str,\n db: Session = Depends(get_db),\n) -> Any:\n return crud.case.remove(db, id=case_id)", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def delete(self):\n pass", "def test_deletehardwares_item(self):\n pass", "def test_delete(self):\n uid = self._game.uid\n self._game.save(self._ds)\n self._game.delete(self._ds)\n self._game = None\n self.assertIsNone(self._ds.get_strict_controller(Game, uid))\n with self.assertRaises(ValueError):\n self._ds.get_controller(Game, uid)", "def delete(self, _id):", "def test_delete(self, init_db, audit):\n audit.delete()\n assert Audit.get(audit.id) == None", "def test_delete(self):\n self.assertEqual(['DELETE', 'FROM', 'test', ''],\n grammar._DELETE_EXPR.parseString(\"DELETE FROM test;\").asList())", "def test_vault_delete_vault_item(self):\n pass", "def test_db_delete(env_setup, env_table, db_delete_test_data, response_test_data):\n test_string = DbManager(SqLiteHelper, {\"db_path\": env_setup, \"master_table\": env_table}) \\\n .processor(db_delete_test_data.get(\"valid\"))\n assert test_string == response_test_data.get(\"valid_delete\")", "def test_delete_team_member(self):\n pass", "def test_data_object_del(self):\n pass" ]
[ "0.85045195", "0.83174175", "0.8187092", "0.81214935", "0.7901589", "0.78933346", "0.7868206", "0.7762365", "0.7726762", "0.77229226", "0.7537628", "0.74579", "0.73829216", "0.735922", "0.7357545", "0.7353939", "0.73434424", "0.7334559", "0.7333317", "0.7319243", "0.72404516", "0.7239504", "0.72335696", "0.7200652", "0.718772", "0.71629775", "0.7159546", "0.7159288", "0.71318567", "0.7113472", "0.71067", "0.71067", "0.71011424", "0.70708275", "0.7067932", "0.7067932", "0.70647705", "0.70601183", "0.70601183", "0.70506835", "0.7043535", "0.7037767", "0.7022682", "0.70178366", "0.70165765", "0.7015189", "0.7012611", "0.6995133", "0.69840217", "0.6980812", "0.6978908", "0.69775426", "0.6973418", "0.6955638", "0.6949224", "0.69332564", "0.69104236", "0.69086695", "0.69019127", "0.6901348", "0.6896028", "0.68953776", "0.68907887", "0.68855745", "0.6873987", "0.6870149", "0.6868065", "0.6860899", "0.6855267", "0.6855267", "0.68485737", "0.6847119", "0.6843307", "0.684214", "0.6837876", "0.6833162", "0.68168116", "0.6813658", "0.68026245", "0.68007106", "0.68000203", "0.67951345", "0.6793547", "0.67903996", "0.6788151", "0.67856234", "0.6769655", "0.6762277", "0.6762277", "0.6762277", "0.6762277", "0.67601174", "0.67599374", "0.6756749", "0.67540413", "0.67503774", "0.6750146", "0.6728089", "0.672773", "0.6724378" ]
0.94501704
0
Test case for get_case_by_id
def test_get_case_by_id(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_case(loqusdbapi, mocker):\n # GIVEN a loqusdb api\n case_id = 'a_case'\n # WHEN fetching a case with the adapter\n mocker.patch.object(subprocess, 'check_output')\n loqusdb_output = (b\"{'_id': 'one_case', 'case_id': 'one_case'}\\n\"\n b\"{'_id': 'a_case', 'case_id': 'a_case'}\\n\")\n subprocess.check_output.return_value = loqusdb_output\n case_obj = loqusdbapi.get_case(case_id)\n # THEN assert that the correct case id is returned\n assert case_obj['_id'] == case_id", "def get_case(\n case_id: str,\n db: Session = Depends(get_db),\n) -> Any:\n case_and_site = crud.case.get_case_with_site(db, id=case_id)\n if not case_and_site:\n return None\n (case, site) = case_and_site\n return schemas.CaseWithTaskInfo.get_case_with_task_info(case, site)", "def get_case(self, key: str):\n case = self.cases.get(key)\n if not hasattr(case, 'case_id'):\n message = \"get_case(): Case key {} does not have a case_id\"\n logmessage(message.format(key))\n else:\n logmessage(\"get_case(): \" + \"Retrieved case {}\".format(str(case)))\n return case", "def view_cases(context,case_id):\n\n adapter = context.obj['adapter']\n\n if case_id is not None:\n results = adapter.find_case({'case_id': case_id})\n\n else:\n results = adapter.find_cases({})\n\n click.echo(pprint(results))", "def test_id_only_int(self):\n td = self.create_testdata()\n res = self.filter([int(td[\"cv1\"].case.id)])\n\n self.assertEqual(res.get().name, \"CV 1\")", "def get_case(self, case_id: str) -> Union[int, None]:\n for index, case in enumerate(self.cases):\n if case.id == case_id:\n return index\n return None", "def test_get_recipe_by_id(self):\n recipe = self.request_mgr.get_recipe_by_id(35354)\n self.assertIn(\"Guinness\", recipe.get('title'))", "def test_cyclingleagues_id_get(self):\n pass", "def case_id():\n return 3000", "def get_case(self, case_id, full_case=False):\n url = self._get_api_url() + \"cases/\" + str(case_id)\n\n if (full_case):\n url = url + \"/?full_case=true\"\n\n case = self._request(url)\n return case.json()", "def test_get_comment_information_by_id():\n get_comment_information_by_id('g99c7c0')", "def test_get_campaign_by_id_passes(self):\n response = self.client.get(f\"{self.endpoint_url}{self.test_campaign.id}/\")\n response_body = response.get_json()\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response_body, {\"id\": CAMPAIGN_ID, \"name\": CAMPAIGN_NAME})", "def test_id_case_without_prefix(self):\n td = self.create_testdata()\n res = self.filter([unicode(td[\"cv2\"].case.id)])\n\n self.assertEqual(res.get().name, \"CV 2\")", "def test_solareclipses_id_get(self):\n pass", "def case(self, case_id, is_external=False):\r\n return cases.Case(self, case_id, is_external)", "def test_get_by_id(self):\n actual = chef_role.get_by_id(self.role_id)\n eq_(actual['chef_role_name'], self.role_name)", "def test_prefectures_id_get(self):\n pass", "def test_get_chain_by_id(self):\n pass", "def test_christiandoctrines_id_get(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/christiandoctrines/{id}'.format(id='id_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_brains_id_get(self):\n pass", "def test_drugs_id_get(self):\n pass", "def get_case(person_id, disease_id):\n\n ctable = current.s3db.disease_case\n query = (ctable.person_id == person_id) & \\\n (ctable.disease_id == disease_id) & \\\n (ctable.deleted != True)\n record = current.db(query).select(ctable.id,\n ctable.case_number,\n limitby = (0, 1)).first()\n return record", "def test_intercommunalitys_id_get(self):\n pass", "def test_get_by_id_wrong_type(self):\n assert ExampleUserModel.get_by_id(\"xyz\") is None", "def test_get_recipe_equipment_by_id(self):\n pass", "def get(self, case_number):\n return self._cases_service.get(case_number)", "def test_medicians_id_get(self):\n pass", "def test_getId(self):\n cases = [\n (self.test_eac + 'NE00401.xml','NE00401'),\n (self.test_eac + 'NE00101.xml','NE00101'),\n (self.test_eac + 'NE00915.xml','NE00915'),\n (self.test_eac + 'NE01001.xml','NE01001'),\n ]\n for case in cases:\n source, expected = case\n doc = EacCpf.EacCpf(source, 'http://www.example.com/metadata.xml', 'http://www.example.com/presentation.html')\n result = doc.getRecordId()\n self.assertNotEqual(doc, None)\n self.assertEquals(result, expected)", "def check_access_and_get_testcase(testcase_id):\n if not helpers.get_user_email():\n raise helpers.UnauthorizedError()\n\n if not testcase_id:\n raise helpers.EarlyExitError('No test case specified!', 404)\n\n try:\n testcase = data_handler.get_testcase_by_id(testcase_id)\n except errors.InvalidTestcaseError:\n raise helpers.EarlyExitError('Invalid test case!', 404)\n\n if not can_user_access_testcase(testcase):\n raise helpers.AccessDeniedError()\n\n return testcase", "def test_resids(self):\n cr = CaseReader(self.filename)\n last_case = cr.get_case(-1)\n self.assertIsNone(last_case.resids,\n \"Case erroneously contains resids.\")", "def get(self, _id):", "def test_beneficiaries_retrieve_withoutID_that_will_fail(self):\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n try:\n url = reverse('beneficiary:beneficiary-entity-by-id-retrieve')\n response = self.client.get(url)\n self.assertTrue(response.status_code, 200)\n except Exception as e:\n print(\"reason: \", e)", "def test_workflows_id_get(self):\n pass", "def test_CovidCase_creation(self):\n new_Covid = self.create_CovidCase()\n\n self.assertTrue(isinstance(new_Covid, CovidCase))\n self.assertEqual(new_Covid.country_id, \"TE\")", "def cases(\n case_id,\n institute,\n reruns,\n finished,\n causatives,\n research_requested,\n rerun_monitor,\n is_research,\n status,\n within_days,\n json,\n):\n adapter = store\n\n models = []\n if case_id:\n case_obj = adapter.case(case_id=case_id)\n if case_obj:\n models.append(case_obj)\n else:\n LOG.info(\"No case with id {}\".format(case_id))\n\n else:\n models = adapter.cases(\n collaborator=institute,\n reruns=reruns,\n rerun_monitor=rerun_monitor,\n finished=finished,\n has_causatives=causatives,\n research_requested=research_requested,\n is_research=is_research,\n status=status,\n within_days=within_days,\n )\n models = [case_obj for case_obj in models]\n if len(models) == 0:\n LOG.info(\"No cases could be found\")\n\n if json:\n click.echo(json_lib.dumps(models, default=jsonconverter))\n return\n\n for model in models:\n pp(model)", "def test_variablepresentations_id_get(self):\n pass", "def test_comicscreators_id_get(self):\n pass", "def testIdReturn(self):\n self.assertEqual(\n 'uniqueId',\n self.cc.id\n )", "def check_id(self, id):", "def get(self, case_number, event_id):\n return self._connection.get(\n u\"{}/{}\".format(self._uri_prefix.format(case_number), event_id)\n )", "def test_find_by_id(session, id, has_results):\n section: MhrSection = MhrSection.find_by_id(id)\n if has_results:\n assert section\n assert section.id == 200000000\n assert section.registration_id == 200000000\n assert section.change_registration_id == 200000000\n assert section.status_type == MhrStatusTypes.ACTIVE\n assert section.compressed_key == '002783'\n assert section.serial_number == '003000ZA002783A'\n assert section.length_feet == 60\n assert section.width_feet == 14\n assert section.length_inches == 10\n assert section.width_inches == 11\n else:\n assert not section", "def test_basketballteams_id_get(self):\n pass", "def test_installments_id_get(self):\n pass", "def get_case(case_id: Optional[str] = None,\n v2beta_id1: Optional[str] = None,\n v2betum_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetCaseResult:\n __args__ = dict()\n __args__['caseId'] = case_id\n __args__['v2betaId1'] = v2beta_id1\n __args__['v2betumId'] = v2betum_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('google-native:cloudsupport/v2beta:getCase', __args__, opts=opts, typ=GetCaseResult).value\n\n return AwaitableGetCaseResult(\n classification=pulumi.get(__ret__, 'classification'),\n contact_email=pulumi.get(__ret__, 'contact_email'),\n create_time=pulumi.get(__ret__, 'create_time'),\n creator=pulumi.get(__ret__, 'creator'),\n description=pulumi.get(__ret__, 'description'),\n display_name=pulumi.get(__ret__, 'display_name'),\n escalated=pulumi.get(__ret__, 'escalated'),\n language_code=pulumi.get(__ret__, 'language_code'),\n name=pulumi.get(__ret__, 'name'),\n priority=pulumi.get(__ret__, 'priority'),\n severity=pulumi.get(__ret__, 'severity'),\n state=pulumi.get(__ret__, 'state'),\n subscriber_email_addresses=pulumi.get(__ret__, 'subscriber_email_addresses'),\n test_case=pulumi.get(__ret__, 'test_case'),\n time_zone=pulumi.get(__ret__, 'time_zone'),\n update_time=pulumi.get(__ret__, 'update_time'))", "def test_get_case_non_existing(loqusdbapi, mocker):\n\n # GIVEN a loqusdb api and a case id\n case_id = 'a_case'\n\n # WHEN case is not in the loqusdb output\n mocker.patch.object(subprocess, 'check_output')\n subprocess.check_output.return_value = b\"{'_id': 'case', 'case_id': 'case'}\\n\"\n\n # THEN CaseNotFoundError should be raised\n with pytest.raises(CaseNotFoundError):\n loqusdbapi.get_case(case_id)\n\n # WHEN loqusdb output is empty string\n mocker.patch.object(subprocess, 'check_output')\n subprocess.check_output.return_value = b\"\"\n\n # THEN CaseNotFoundError should be raised\n with pytest.raises(CaseNotFoundError):\n loqusdbapi.get_case(case_id)", "def test_coupledmodels_id_get(self):\n pass", "def test_detail(self):\n response = self.client.get('/routines/{}/'.format(self.rout1.id))\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data['id'], self.rout1.id)", "def test_sample_one_patient_id(self):\r\n self.assertEqual(self.test_sample.patientID.id, 2)", "def test_racetracks_id_get(self):\n pass", "def test_get_recipe_price_breakdown_by_id(self):\n pass", "def test_get_article_by_id():\n article = Article(\n author = '[email protected]',\n title = 'New Article',\n content = 'Super extra awesome article'\n ).save()\n\n query = GetArticleByIDQuery(\n id = article.id\n )\n\n assert query.execute().id == article.id", "def test_api_can_get_employee_by_id(self):\n res = self.client().get(service_url_emp+'/1')\n self.assertEqual(res.status_code, 200)\n self.assertIn('name1', str(res.data))", "def test_CovidCase_delete(self):\n # setting up by creating and saving the the database\n del_Covid = self.create_CovidCase()\n del_Covid.save()\n del_id = del_Covid.id\n # we are going to delete by calling the delete function\n del_deleted = CovidCase.objects.get(id=del_id)\n del_deleted.delete()\n\n self.assertNotIn(del_Covid, CovidCase.objects.all())", "def test_xml_template_get_by_id(self):\n xmlTemplateExpected = XmlTemplate.objects.get(id=1)\n self.assertEqual(XmlTemplate.get_by_id(1), xmlTemplateExpected)", "def test_otoroshi_controllers_adminapi_tcp_service_api_controller_find_entity_by_id_action(self):\n pass", "def test_fetch_task_by_id(self, mock_fetch):\n mock_fetch.return_value = self.url_task\n\n entity = TaskService.fetch_task_by_id(self.url_task.id)\n\n self.assertEquals(entity.id, self.url_task.id)", "def test_data_source_soaps_id_get(self):\n pass", "def test_get_by_id(self):\n\n user = CustomUser.get_by_id(2)\n expected_user = CustomUser.objects.get(id=2)\n self.assertEqual(user, expected_user)", "def testValidateId(self):\n #create a different person and try to use their id\n self.directory.invokeFactory(type_name=\"FSDPerson\",id=\"def456\",firstName=\"Joe\",lastName=\"Blow\")\n self.failUnless('def456' in self.person.validate_id('def456'))\n #create a different content object and try to use its id\n self.directory.invokeFactory(\"Document\", \"mydoc\")\n self.failUnless('mydoc' in self.person.validate_id('mydoc'))", "def test_beneficiaries_retrieve_that_will_pass(self):\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n url = reverse('beneficiary:beneficiary-entity-by-id-retrieve', kwargs={'pk': 1})\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)", "def test_fetch_cases(mocked_update, mocked_fogbugz, transactional_db, case):\n mocked_case = mock.Mock()\n mocked_fogbugz.return_value.search.return_value.findAll.return_value = [mocked_case]\n mocked_case.attrs = dict(ixbug=case.id)\n mocked_case.sfixfor.string = '1516'\n mocked_case.dtfixfor.string = '2015-01-18T23:00:00Z'\n mocked_case.dtlastupdated.string = '2015-01-18T23:00:00Z'\n mocked_case.stitle.string = 'Some title'\n mocked_case.soriginaltitle.string = 'Some original title'\n mocked_case.cixproject.string = 'some-ci-project'\n mocked_case.sproject.string = 'Some project'\n mocked_case.sarea.string = 'Some area'\n fetch_cases()\n mocked_update.apply_async.assert_called_once_with(kwargs=dict(case_id=case.id))", "def test_get_item_by_id(self):\n response = self.client.get('/api/v1/category/1',\n headers=self.attendant_headers)\n self.assertEqual(response.status_code, 200)", "def test_flow(client, unrestricted_case, elasticsearch):\n # start with case\n response = client.get(api_reverse(\"cases-detail\", args=[unrestricted_case.id]))\n check_response(response)\n content = response.json()\n # onwards to court\n court_url = content.get(\"court\")[\"url\"]\n assert court_url\n response = client.get(court_url)\n check_response(response)\n # onwards to jurisdiction\n jurisdiction_url = content.get(\"jurisdiction\")[\"url\"]\n assert jurisdiction_url\n response = client.get(jurisdiction_url)\n check_response(response)\n content = response.json()\n assert content.get(\"name\") == unrestricted_case.jurisdiction.name", "def test_variables_id_get(self):\n pass", "def test_get_device_by_id(self):\n pass", "def test_sample_one_id(self):\r\n self.assertEqual(self.test_sample.id, 1)", "def test_datatransformationsetups_id_get(self):\n pass", "def test_findContact(self):\n qs = Contact.objects.all()\n contact = qs[0]\n contact2 = Contact.objects.get(id=contact.id)\n self.assertEqual(contact, contact2)", "def get_object(id):", "def test_getitem_id_column(self):\n self.assertEqual(self.tester['required_sample_info_status'],\n 'completed')", "def test_get_actors_by_id(self):\n # need to insert an actor record in order to find it\n res = self.client().post('/actors', headers={\n 'Authorization': \"Bearer {}\".format(self.casting_director_token)\n }, json=self.VALID_NEW_ACTOR)\n # find actor by id\n res = self.client().get('/actors/1', headers={\n 'Authorization': \"Bearer {}\".format(self.casting_director_token)\n })\n data = json.loads(res.data)\n\n self.assertEqual(res.status_code, 200)\n self.assertTrue(data[\"success\"])\n self.assertIn('actor', data)\n self.assertIn('full_name', data['actor'])", "def case_id():\n return \"angrybird\"", "def test_get_specific_book_method(self):\n # When book id is int\n book_id = 1\n result = self.book.get_book(book_id)\n self.assertEqual(result, [{\"Title\": \"Harry Potter and Chamber of Secrets\",\n \"Author\": \"J.K Rowling\",\n \"Copies\": 2}])", "def test_get_sentence_by_id(self):\n response = self.client.open(\n '/api/v1/sentence/{sentenceID}'.fpgapiat(sentenceID=56),\n method='GET')\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_plays_id_get(self):\n pass", "def test_get_sdb_by_id(self, mock_get, mock_sdb_id):\n\n mock_resp = self._mock_response(content=json.dumps(self.sdb_data))\n mock_get.return_value = mock_resp\n\n details = self.client.get_sdb_by_id(\"5f0-99-414-bc-e5909c\")\n\n assert_equals(details, self.sdb_data)\n assert_in('X-Cerberus-Client', self.client.HEADERS)\n mock_get.assert_called_with(\n self.cerberus_url + '/v2/safe-deposit-box/5f0-99-414-bc-e5909c',\n headers=self.client.HEADERS\n )", "def test_idem_make_feature_existing_case(self):\n # set up\n mock_rowcount = PropertyMock(return_value=1)\n type(self.mock_get_cur.return_value).rowcount = mock_rowcount\n self.mock_get_cur.return_value.fetchone.return_value = (10,)\n\n # run SUT\n feature_id = idem_make_feature('mock-feature-name', 1)\n\n # confirm we only called execute once (to get existing)\n self.assertEqual(self.mock_get_cur.return_value.execute.call_count, 1)\n # and ended up with the corect id\n self.assertEqual(feature_id, 10)\n\n # make sure we closed the cursor\n self.mock_get_cur.return_value.close.assert_called_once_with()", "def test_get_id(self):\n\n self.metadata.create_or_update(data=self.create)\n\n # First pick up by name\n res_name = self.metadata.get_by_name(\n entity=Dashboard, fqn=self.entity.fullyQualifiedName\n )\n # Then fetch by ID\n res = self.metadata.get_by_id(entity=Dashboard, entity_id=res_name.id)\n\n self.assertEqual(res_name.id, res.id)", "def test_GET_room_by_id(self):\n\t\tdata = self.GET_data('/api/room/53c47cdbb81c825566b1a9e2')\n\t\tself.assertEqual(data, None)\n\t\tself.POST_room()\n\t\tself.POST_room()\n\t\tspecified_room_id = self.POST_room()\n\t\tdata = self.GET_data('/api/room/' + self.room_id)\n\t\tself.assertNotEqual(data, None)\n\t\tself.assertEqual(data[\"_id\"], self.room_id)", "def test_get_record(self):\n pass", "def test_get_uniqueId():\n rep=RentRepository()\n rep.store(\"12\",\"23\",\"1\", \"1\")\n try:\n\n idBook=\"13\"\n idCustomer=\"54\"\n flag=\"1\"\n id=\"1\"\n Validator.get_uniqueId(rep.get_all(),id)\n assert False\n\n except RepositoryExceptionRent as msg:\n assert True", "def test_patient_one_id(self):\r\n self.assertEqual(self.test_patient.id, 1)", "def test_user_id_get(self):\n pass", "def test_workflows_id_exists_get(self):\n pass", "def test_austriansettlements_id_get(self):\n pass", "def test_api_can_get_expense_by_id(self):\n self.register_user()\n result = self.login_user()\n access_token = json.loads(result.data.decode())['access_token']\n rv = self.client().post('/expenses/', headers=dict(Authorization=\"Bearer \" + access_token), data=self.expense)\n self.assertEqual(rv.status_code, 201)\n result_in_json = json.loads(rv.data.decode('utf-8').replace(\"'\", \"\\\"\"))\n results = self.client().get(\n '/expenses/{}'.format(result_in_json['id']), headers=dict(Authorization=\"Bearer \" + access_token))\n res = json.loads(results.data)\n self.assertEqual(results.status_code, 200)\n self.assertEqual('snacks', str(res['name']))", "def test_deaths_id_get(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/deaths/{id}'.format(id='id_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_get_id():\n vc = vtec.parse(EX1)\n assert vc[0].get_id(2005) == \"2005-KJAN-TO-W-0130\"", "def test_get_device_by_id1(self):\n pass", "def test_get_payments_by_id(self):\n pass", "def delete_case(\n case_id: str,\n db: Session = Depends(get_db),\n) -> Any:\n return crud.case.remove(db, id=case_id)", "def test_find_stock_item_by_id(self):\n pass", "def ballot_get_contest_by_id(contest_id):\r\n return make_request({\"method\": \"ballot_get_contest_by_id\",\r\n \"params\": [contest_id],\r\n \"jsonrpc\": \"2.0\",\r\n \"id\": 0, })", "def test_api_get_activity_by_id(self):\n # create a bucket\n res = self.register_login_get_token()\n self.assertEqual(res.status_code, 201)\n\n # create a activity\n res = self.client().post('/bucketlist/1/activities',\n headers=dict(\n Authorization=\"Bearer \" + self.access_token),\n data=self.activity)\n self.assertEqual(res.status_code, 201)\n # get activity created\n activity_created = json.loads(res.data.decode())\n # get activity by its ID\n res = self.client().get('/bucketlist/1/activities/{}'.format(activity_created['id']),\n headers=dict(\n Authorization=\"Bearer \" + self.access_token))\n self.assertEqual(res.status_code, 200)\n self.assertIn('Shop in', str(res.data))", "def test_findContact(self):\n response = self.client.get(self.url)\n qs = response.json()\n contact = qs[0]\n response = self.client.get(self.url + str(contact['id']) + '/')\n self.assertEqual(response.status_code, 200)\n contact2 = response.json()\n self.assertEqual(contact2['name'], 'contact1')", "def test_abbeys_id_get(self):\n headers = { \n 'Accept': 'application/json',\n }\n response = self.client.open(\n '/v0.0.1/abbeys/{id}'.format(id='id_example'),\n method='GET',\n headers=headers)\n self.assert200(response,\n 'Response body is : ' + response.data.decode('utf-8'))", "def test_get(self):\n params= { \"table\": \"${table}\",\n \"id\": self.${table}_id,\n \"languageid\": \"1033\"\n }\n \n sql = \"select mtp_get_cf1 as result from mtp_get_cf1('%s')\" %(json.dumps(params) )\n \n #print( sql )\n \n self.dbi.execute(sql)\n \n rtn = self.dbi.fetchone()\n \n #print(rtn)\n assert \"id\" in rtn[0][\"result\"][0]\n assert self.${table}_id ==rtn[0][\"result\"][0][\"id\"]\n #assert 'id' in rtn[0]['result'][0]", "def test_get_virtual_account_by_id(self):\n pass", "def test_study_id(self):\n self.assertEqual(self.tester.study_id, 1)", "def test_study_id(self):\n self.assertEqual(self.tester.study_id, 1)" ]
[ "0.7206168", "0.7135359", "0.70535195", "0.6983086", "0.69185466", "0.6779661", "0.6560765", "0.6444879", "0.6441801", "0.643984", "0.64307916", "0.6415146", "0.63614887", "0.63151497", "0.6309817", "0.6308288", "0.6291289", "0.62707704", "0.62072265", "0.62045175", "0.6164589", "0.61460763", "0.61310136", "0.61180604", "0.6108853", "0.60624456", "0.6054673", "0.6047162", "0.60330063", "0.60254186", "0.6020209", "0.60140806", "0.6002067", "0.5993986", "0.5982126", "0.5971645", "0.5969344", "0.59641427", "0.5948057", "0.59449583", "0.5944093", "0.59432924", "0.59403384", "0.5937888", "0.5931644", "0.59133476", "0.5912367", "0.59089243", "0.59082735", "0.5896315", "0.5888641", "0.58879805", "0.5858313", "0.5856686", "0.5841545", "0.5841143", "0.58402455", "0.5837481", "0.58352244", "0.5827406", "0.5822369", "0.5811316", "0.5803966", "0.5801058", "0.57971025", "0.5796499", "0.576977", "0.5765945", "0.5749581", "0.574911", "0.57487833", "0.5744215", "0.57198954", "0.57027006", "0.5687755", "0.5685036", "0.56830007", "0.5679125", "0.5675698", "0.5672835", "0.56663924", "0.5665662", "0.56642455", "0.5663364", "0.56599206", "0.5645571", "0.5644065", "0.5624444", "0.56191266", "0.5618081", "0.5610611", "0.56103724", "0.56024015", "0.55973905", "0.5595517", "0.5594629", "0.5589881", "0.55860025", "0.5580105", "0.5580105" ]
0.949092
0
Test case for get_cases_for_dict
def test_get_cases_for_dict(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def navigate_case_dictionary(case_list_for_run, num_cases):", "def test_create_results_dict_1(self):\n dict = find_domains.create_results_dict(self.rps_results)\n with self.subTest():\n self.assertEqual(len(dict.keys()), 4)\n with self.subTest():\n self.assertEqual(len(dict[\"ABCDE\"]), 2)\n with self.subTest():\n self.assertEqual(len(dict[\"FGHIJ\"]), 2)", "def test_map(self):\n\n test_cases = [\n Case(\n description=\"lists of objects\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[\"title\"],\n kwargs={},\n expect=[\"foo\", \"bar\", \"baz\"],\n ),\n Case(\n description=\"missing argument\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[],\n kwargs={},\n expect=FilterArgumentError,\n ),\n Case(\n description=\"too many arguments\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[\"title\", \"\"],\n kwargs={},\n expect=FilterArgumentError,\n ),\n Case(\n description=\"missing property\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"heading\": \"baz\"}],\n args=[\"title\"],\n kwargs={},\n expect=[\"foo\", \"bar\", None],\n ),\n Case(\n description=\"value not an array\",\n val=123,\n args=[\"title\"],\n kwargs={},\n expect=FilterValueError,\n ),\n Case(\n description=\"array contains non object\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, 5, []],\n args=[\"title\"],\n kwargs={},\n expect=FilterValueError,\n ),\n Case(\n description=\"undefined left value\",\n val=self.env.undefined(\"test\"),\n args=[\"title\"],\n kwargs={},\n expect=[],\n ),\n Case(\n description=\"undefined argument\",\n val=[{\"title\": \"foo\"}, {\"title\": \"bar\"}, {\"title\": \"baz\"}],\n args=[self.env.undefined(\"test\")],\n kwargs={},\n expect=[None, None, None],\n ),\n ]\n\n self._test(Map, test_cases)", "def test_dict(self, obj: dict) -> None:\r\n properties = read_properties(obj)\r\n for key, value in properties.items():\r\n conditional_check(key, self.case_check, self.ignored_keys)\r\n if read_type(value) == 'object':\r\n logger.debug('dict -> dict')\r\n self.test_dict(obj=value)\r\n elif read_type(value) == 'array':\r\n logger.debug('dict -> list')\r\n self.test_list(array=value)", "def check_for_dict(check):", "def test_values(self):\n obs = self.tester.values()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {Sample('1.SKB1.640202', self.tester),\n Sample('1.SKB2.640194', self.tester),\n Sample('1.SKB3.640195', self.tester),\n Sample('1.SKB4.640189', self.tester),\n Sample('1.SKB5.640181', self.tester),\n Sample('1.SKB6.640176', self.tester),\n Sample('1.SKB7.640196', self.tester),\n Sample('1.SKB8.640193', self.tester),\n Sample('1.SKB9.640200', self.tester),\n Sample('1.SKD1.640179', self.tester),\n Sample('1.SKD2.640178', self.tester),\n Sample('1.SKD3.640198', self.tester),\n Sample('1.SKD4.640185', self.tester),\n Sample('1.SKD5.640186', self.tester),\n Sample('1.SKD6.640190', self.tester),\n Sample('1.SKD7.640191', self.tester),\n Sample('1.SKD8.640184', self.tester),\n Sample('1.SKD9.640182', self.tester),\n Sample('1.SKM1.640183', self.tester),\n Sample('1.SKM2.640199', self.tester),\n Sample('1.SKM3.640197', self.tester),\n Sample('1.SKM4.640180', self.tester),\n Sample('1.SKM5.640177', self.tester),\n Sample('1.SKM6.640187', self.tester),\n Sample('1.SKM7.640188', self.tester),\n Sample('1.SKM8.640201', self.tester),\n Sample('1.SKM9.640192', self.tester)}\n # Creating a list and looping over it since unittest does not call\n # the __eq__ function on the objects\n for o, e in zip(sorted(list(obs), key=lambda x: x.id),\n sorted(exp, key=lambda x: x.id)):\n self.assertEqual(o, e)", "def test_dict(self, dictionary: dict) -> None:\r\n if not isinstance(dictionary, dict):\r\n raise ValueError(f'Expected dictionary, but received {type(dictionary)}')\r\n for key, value in dictionary.items():\r\n conditional_check(key, self.case_check, self.ignored_keys)\r\n if isinstance(value, dict):\r\n self.test_dict(dictionary=value)\r\n elif isinstance(value, list):\r\n self.test_list(items=value)", "def test_values(self):\n obs = self.tester.values()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {PrepSample('1.SKB1.640202', self.tester),\n PrepSample('1.SKB2.640194', self.tester),\n PrepSample('1.SKB3.640195', self.tester),\n PrepSample('1.SKB4.640189', self.tester),\n PrepSample('1.SKB5.640181', self.tester),\n PrepSample('1.SKB6.640176', self.tester),\n PrepSample('1.SKB7.640196', self.tester),\n PrepSample('1.SKB8.640193', self.tester),\n PrepSample('1.SKB9.640200', self.tester),\n PrepSample('1.SKD1.640179', self.tester),\n PrepSample('1.SKD2.640178', self.tester),\n PrepSample('1.SKD3.640198', self.tester),\n PrepSample('1.SKD4.640185', self.tester),\n PrepSample('1.SKD5.640186', self.tester),\n PrepSample('1.SKD6.640190', self.tester),\n PrepSample('1.SKD7.640191', self.tester),\n PrepSample('1.SKD8.640184', self.tester),\n PrepSample('1.SKD9.640182', self.tester),\n PrepSample('1.SKM1.640183', self.tester),\n PrepSample('1.SKM2.640199', self.tester),\n PrepSample('1.SKM3.640197', self.tester),\n PrepSample('1.SKM4.640180', self.tester),\n PrepSample('1.SKM5.640177', self.tester),\n PrepSample('1.SKM6.640187', self.tester),\n PrepSample('1.SKM7.640188', self.tester),\n PrepSample('1.SKM8.640201', self.tester),\n PrepSample('1.SKM9.640192', self.tester)}\n # Creating a list and looping over it since unittest does not call\n # the __eq__ function on the objects\n for o, e in zip(sorted(list(obs), key=lambda x: x.id),\n sorted(exp, key=lambda x: x.id)):\n self.assertEqual(o, e)", "def test_fn_call_with_dict():\n l = [1, 2, 3, 4, 5]\n ds = [defaultdict(int), defaultdict(int), defaultdict(int)]\n for d in ds:\n for fn in [s7.div, s7.mul, s7.add, \"abcd\", 1234]:\n try:\n f = s7.count_fn_called_with_dict(dict_=d, fn=fn)\n for i in range(0, random.randint(2, 10)):\n f(*l)\n assert fn in d.keys() and d[fn] == (i + 1)\n except Exception as e:\n assert e.__class__.__name__ == TypeError.__name__", "def test_sample_mapped_keys(self):\r\n\r\n # With num_coverage=1 only the keys will be sampled\r\n actual = sample_mapped_keys(self.test_map, 1)\r\n self.assertEqual(actual, {'1': ['1'], '2': ['2']})\r\n\r\n actual = sample_mapped_keys(self.test_map, 3)\r\n for key in actual.keys():\r\n # check number of sampled keys\r\n self.assertEqual(3, len(actual[key]))\r\n for x in actual[key]:\r\n # check that sampled key is in the full list\r\n correct = list(self.test_map[key])\r\n correct.append(key)\r\n self.assertTrue(x in correct)", "def test_dict(self, testdata: TestData) -> None:\n for data in testdata['observation_type']:\n observation_type = ObservationType.from_dict(data)\n assert data == observation_type.to_dict()", "def test_convert(self):\n for test in self.test_dict_data:\n self.assertEqual(dottedDict(test[0]).data, test[1])", "def test_returns_dict(self):\n metrics = ('input', 'output')\n\n @callback_return(*metrics)\n def returns_dict():\n return {'output': 1, 'input': 2, 'extra': 3}\n\n r = returns_dict()\n self.assertEqual(len(metrics), len(r.keys()), 'Extra return values should be dropped.')\n self.assertEqual(2, r['input'])\n self.assertEqual(1, r['output'])\n self.assertNotIn('extra', r)", "def test1(self) -> None:\n dict_ = {\"key0\": \"value0\", \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\",), \"value0\"), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def test_cases():\n assert count('aba') == {'a': 2, 'b': 1}\n assert count('abcddbacdb') == {'a': 2,'b': 3,'c': 2,'d': 3}\n assert count('') == {}\n print(\"Test Success!\")", "def test_1():\n results = base_tests()\n assert type(results) is list\n assert type(results[0]) is dict\n assert len(results) == 3", "def test_comparing(self):\n for test in self.test_dict_data:\n self.assertEqual(dottedDict(test[0]), test[1])", "def verifyData(self, expectedDict):\n pass", "def test_items(self):\n obs = self.tester.items()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {('center_name', 'ANL'), ('center_project_name', None),\n ('emp_status', 'EMP'), ('barcodesequence', 'AGCGCTCACATC'),\n ('library_construction_protocol',\n 'This analysis was done as in Caporaso et al 2011 Genome '\n 'research. The PCR primers (F515/R806) were developed against '\n 'the V4 region of the 16S rRNA (both bacteria and archaea), '\n 'which we determined would yield optimal community clustering '\n 'with reads of this length using a procedure similar to that '\n 'of ref. 15. [For reference, this primer pair amplifies the '\n 'region 533_786 in the Escherichia coli strain 83972 sequence '\n '(greengenes accession no. prokMSA_id:470367).] The reverse '\n 'PCR primer is barcoded with a 12-base error-correcting Golay '\n 'code to facilitate multiplexing of up to 1,500 samples per '\n 'lane, and both PCR primers contain sequencer adapter '\n 'regions.'), ('linkerprimersequence', 'GTGCCAGCMGCCGCGGTAA'),\n ('target_subfragment', 'V4'), ('target_gene', '16S rRNA'),\n ('run_center', 'ANL'), ('run_prefix', 's_G1_L001_sequences'),\n ('run_date', '8/1/12'), ('experiment_center', 'ANL'),\n ('experiment_design_description',\n 'micro biome of soil and rhizosphere of cannabis plants '\n 'from CA'), ('experiment_title', 'Cannabis Soil Microbiome'),\n ('platform', 'Illumina'), ('samp_size', '.25,g'),\n ('sequencing_meth', 'Sequencing by synthesis'),\n ('illumina_technology', 'MiSeq'), ('sample_center', 'ANL'),\n ('pcr_primers',\n 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT'),\n ('study_center', 'CCME')}\n self.assertEqual(set(obs), exp)", "def test_dict(self):\n self.assertValue(\n {'foo': 'foo', 'bar': 43, 'zippy': 'zoo'},\n 'bar: 43 foo: foo zippy: zoo\\n'\n )", "def test_parametrized_cases_tests( self ):\n my_cases = JSON_FILES_PATH + \"testingCases_RF1.csv\"\n with open(my_cases, newline='', encoding='utf-8') as csvfile:\n param_test_cases = csv.DictReader(csvfile, delimiter=';')\n my_code = AccessManager()\n for row in param_test_cases:\n print(\"Param:\" + row[ 'ID TEST' ] + row[ \"VALID INVALID\" ])\n if row[ \"VALID INVALID\" ] == \"VALID\":\n valor = my_code.request_access_code( row[ \"DNI\" ], row[ \"NAME SURNAME\" ],\n row[ \"ACCESS TYPE\" ], row[ \"email\" ],\n int(row[ \"VALIDITY\" ]))\n self.assertEqual( row[ 'EXPECTED RESULT' ], valor)\n # Check if this DNI is store in storeRequest.json\n generated_request = AccessRequest.create_request_from_code(valor,row[ \"DNI\" ])\n my_request = AccessRequest(row[ \"DNI\" ], row[ \"NAME SURNAME\" ],\n row[ \"ACCESS TYPE\" ], row[ \"email\" ],\n int(row[ \"VALIDITY\" ]))\n self.assertDictEqual(generated_request.__dict__, my_request.__dict__)\n else:\n with self.assertRaises(AccessManagementException) as c_m:\n valor = my_code.request_access_code(row[ \"DNI\" ], row[ \"NAME SURNAME\" ],\n row[ \"ACCESS TYPE\" ], row[ \"email\" ],\n int(row[ \"VALIDITY\" ]))\n self.assertEqual(c_m.exception.message, row[ 'EXPECTED RESULT' ])", "def test():\n test = [{'key': 'val1'}, ['key']]\n assert fetch_data_by_keys(*test).unwrap() == 'val1'", "def test_values(self):\n obs = self.tester.values()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {'ANL', None, None, None, 'EMP', 'AGCGCTCACATC',\n 'This analysis was done as in Caporaso et al 2011 Genome '\n 'research. The PCR primers (F515/R806) were developed against '\n 'the V4 region of the 16S rRNA (both bacteria and archaea), '\n 'which we determined would yield optimal community clustering '\n 'with reads of this length using a procedure similar to that of'\n ' ref. 15. [For reference, this primer pair amplifies the '\n 'region 533_786 in the Escherichia coli strain 83972 sequence '\n '(greengenes accession no. prokMSA_id:470367).] The reverse PCR'\n ' primer is barcoded with a 12-base error-correcting Golay code'\n ' to facilitate multiplexing of up to 1,500 samples per lane, '\n 'and both PCR primers contain sequencer adapter regions.',\n 'GTGCCAGCMGCCGCGGTAA', 'V4', '16S rRNA', 'ANL',\n 's_G1_L001_sequences', '8/1/12', 'ANL',\n 'micro biome of soil and rhizosphere of cannabis plants from '\n 'CA', 'Cannabis Soil Microbiome', 'Illumina', '.25,g',\n 'Sequencing by synthesis', 'MiSeq', 'ANL',\n 'FWD:GTGCCAGCMGCCGCGGTAA; REV:GGACTACHVGGGTWTCTAAT', 'CCME'}\n self.assertEqual(set(obs), exp)", "def test_d(self):\n user_dict = {'A': 3, 'B': 4, 'C': 5, 'D': 6, 'E': 7}\n user_key = 'd'\n self.assertEqual(6, switch_average(user_dict, user_key.upper()))", "def test_sum_dict_values(self, mocker):\n\n mocked = mocker.patch.object(\n LeafNodeScaledConformalPredictor, \"_sum_dict_values\"\n )\n\n dummy_confo_model = DummyLeafNodeScaledConformalPredictor()\n\n # set leaf_node_counts attribute so np.apply_along_axis can run\n dummy_confo_model.leaf_node_counts = {\"a\": 1}\n\n leaf_node_predictions_value = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])\n\n dummy_confo_model._count_leaf_node_visits_from_calibration(\n leaf_node_predictions_value\n )\n\n assert (\n mocked.call_count == leaf_node_predictions_value.shape[0]\n ), \"incorrect number of calls to _sum_dict_values\"\n\n for call_no in range(leaf_node_predictions_value.shape[0]):\n\n call_args = mocked.call_args_list[call_no]\n call_pos_args = call_args[0]\n call_kwargs = call_args[1]\n\n assert call_kwargs == {\n \"counts\": dummy_confo_model.leaf_node_counts\n }, f\"keyword args in _sum_dict_values call {call_no} incorrect\"\n\n assert (\n len(call_pos_args) == 1\n ), f\"number of positional args in _sum_dict_values call {call_no} incorrect\"\n\n np.testing.assert_array_equal(\n call_pos_args[0], leaf_node_predictions_value[call_no, :]\n )", "def test_dictionary(self):\n self.assertIsInstance(self.test1json, dict)", "def test_2():\n results = base_tests()\n correct = {\n \"Consequence\": \"synonymous_variant\",\n \"Codons\": \"tgC/tgT\",\n \"Amino_acids\": \"C\",\n \"Gene\": \"ENSG00000130164\",\n \"SYMBOL\": \"LDLR\",\n \"Feature\": \"ENST00000558013\",\n \"EXON\": \"2/18\",\n \"PolyPhen\": \"\",\n \"SIFT\": \"\",\n \"Protein_position\": \"27/858\",\n 'BIOTYPE\"': \"protein_coding\",\n }\n assert results[0] == correct", "def test_sum_dict_values_returned(self, mocker):\n\n # set the return value from _sum_dict_values calls\n sum_dict_values_return_values = [-2, 1, 0]\n\n mocker.patch.object(\n LeafNodeScaledConformalPredictor,\n \"_sum_dict_values\",\n side_effect=sum_dict_values_return_values,\n )\n\n dummy_confo_model = DummyLeafNodeScaledConformalPredictor()\n\n # set leaf_node_counts attribute so np.apply_along_axis can run\n dummy_confo_model.leaf_node_counts = {\"a\": 1}\n\n # set leaf_node_predictions arg so _sum_dict_values will be called 3 times\n leaf_node_predictions_value = np.array([[1], [2], [3]])\n\n results = dummy_confo_model._count_leaf_node_visits_from_calibration(\n leaf_node_predictions_value\n )\n\n np.testing.assert_array_equal(results, np.array(sum_dict_values_return_values))", "def test_if_keys_or_values_in_result_dict_are_int(self):\n for key, value in add_expressions(1, 2, 8)(2, 3).items():\n self.assertIsInstance(key, int)\n self.assertIsInstance(value, int)", "def test_c(self):\n user_dict = {'A': 3, 'B': 4, 'C': 5, 'D': 6, 'E': 7}\n user_key = 'C'\n self.assertEqual(5, switch_average(user_dict, user_key.upper()))", "def test4(self) -> None:\n dict_ = {\"key0\": \"value0\", \"key1\": None}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\",), \"value0\"), ((\"key1\",), None)]\n self.assertListEqual(actual_result, expected_result)", "def test_create_cds_translation_dict_1(self):\n t1 = \"ABCDE\"\n t2 = \"FGHIJ\"\n t3 = \"RSTUV\"\n self.gene_data_1[\"Translation\"] = t1\n self.gene_data_2[\"Translation\"] = t2\n self.gene_data_3[\"Translation\"] = t3\n cdd_list = [self.gene_data_1, self.gene_data_2, self.gene_data_3]\n dict = find_domains.create_cds_translation_dict(cdd_list)\n\n exp_keys = {t1, t2, t3}\n with self.subTest():\n self.assertEqual(dict.keys(), exp_keys)\n with self.subTest():\n self.assertEqual(dict[t1], {self.gene_data_1[\"GeneID\"]})\n with self.subTest():\n self.assertEqual(dict[t2], {self.gene_data_2[\"GeneID\"]})\n with self.subTest():\n self.assertEqual(dict[t3], {self.gene_data_3[\"GeneID\"]})", "def test2(self) -> None:\n dict_ = {\n \"key0\": {\"key00\": \"value00\", \"key01\": \"value01\"},\n \"key1\": \"value1\",\n }\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [\n ((\"key0\", \"key00\"), \"value00\"),\n ((\"key0\", \"key01\"), \"value01\"),\n ((\"key1\",), \"value1\"),\n ]\n self.assertListEqual(actual_result, expected_result)", "def test_get_facet_dictionary(self):\n pass", "def test3(self) -> None:\n dict_ = {\"key0\": {\"key00\": {\"key000\": \"value000\"}}, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [\n ((\"key0\", \"key00\", \"key000\"), \"value000\"),\n ((\"key1\",), \"value1\"),\n ]\n self.assertListEqual(actual_result, expected_result)", "def test_case_assumptions(self):\n self.assertTrue(\n self.existing_map_id in self.example_map.feedline_map_lookup,\n msg=f'Expects {self.existing_map_id} to be in example map keys ({list(self.example_map.feedline_map_lookup.keys())}).'\n )\n feedline_map: FeedlineMap = self.example_map.feedline_map_lookup[self.existing_map_id]\n self.assertTrue(\n self.existing_feedline_nr in feedline_map.bitmap_lookup,\n msg=f'Expects {self.existing_feedline_nr} to be in example map keys ({list(feedline_map.bitmap_lookup.keys())}).'\n )\n self.assertFalse(\n self.not_existing_map_id in self.example_map.feedline_map_lookup,\n msg=f'Expects {self.existing_map_id} NOT to be in example map keys ({list(self.example_map.feedline_map_lookup.keys())}).'\n )\n self.assertFalse(\n self.not_existing_feedline_nr in feedline_map.bitmap_lookup,\n msg=f'Expects {self.not_existing_feedline_nr} NOT to be in example map keys ({list(feedline_map.bitmap_lookup.keys())}).'\n )", "def generateTests(self, dict, dictname='totest'):\n for name, cases in dict.items():\n for casenum in range(len(cases)):\n case = cases[casenum]\n run_in_debugger = 0\n if len(case) == 4:\n if case[-1]:\n run_in_debugger = 1\n else:\n continue\n self.addTestCase(self.test_case_class, 'test_parse_table',\n input=case[0], expected=case[1],\n id='%s[%r][%s]' % (dictname, name, casenum),\n run_in_debugger=run_in_debugger)\n self.addTestCase(self.test_case_class, 'test_parse',\n input=case[0], expected=case[2],\n id='%s[%r][%s]' % (dictname, name, casenum),\n run_in_debugger=run_in_debugger)", "def test_get_whole_dict(self):\n result = self.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"config\", \"get\", \"skills.dummy.behaviours\"],\n standalone_mode=False,\n )\n assert result.exit_code == 0\n\n actual_object = json.loads(result.output)\n expected_object = {\n \"dummy\": {\n \"args\": {\"behaviour_arg_1\": 1, \"behaviour_arg_2\": \"2\"},\n \"class_name\": \"DummyBehaviour\",\n },\n \"dummy_behaviour_same_classname\": {\n \"args\": {\"behaviour_arg_1\": 1, \"behaviour_arg_2\": \"2\"},\n \"class_name\": \"DummyBehaviour\",\n \"file_path\": \"dummy_subpackage/foo.py\",\n },\n }\n assert actual_object == expected_object", "def test_iterate_arlequin_with_dict_return():\n for entry in iterate_arlequin(SNPS_TWO_POPS_TEXT):\n assert isinstance(entry, dict)", "def testCasDict(self):\n casDict = {\"Singular\":\"Singular\", \"Magma\":\"magma\", \"Maple\":\"maple\"}\n self.assertEqual(casDict, self.msTest.getCASDict(),\n \"The dictionary inside the MachineSettings was not validly initialized\")", "def inner_test(param: dict):\n pass", "def test_mappings():\n\n data = {\n 'succeeded': [],\n 'failed': [],\n 'partial': []\n }\n\n zipcodes = get_postcodes()\n LOGGER.info('analyzing %d zip codes', len(zipcodes))\n\n session = requests.Session()\n for code in zipcodes:\n try:\n response = session.get('http://localhost:10847/zipcode/' + str(code))\n response.raise_for_status()\n\n payload = response.json()['data']\n if 'economic_region' not in payload:\n data['partial'].append(code)\n else:\n data['succeeded'].append(code)\n except requests.HTTPError:\n LOGGER.exception('unable to validate zip code')\n data['failed'].append(code)\n\n with open('./results.json', 'w') as f:\n json.dump(data, f)", "def test_create_offense_by_zip_dict(self):\r\n\r\n data = [\r\n ['Report_No', 'Reported_Date', 'Reported_Time', 'From_Date', 'From_Time', 'To_Date', 'To_Time', 'Offense', 'IBRS', 'Description', 'Beat', 'Address', 'City', 'Zip Code', 'Rep_Dist', 'Area', 'DVFlag', 'Involvement', 'Race', 'Sex', 'Age', 'Firearm Used Flag', 'Location'],\r\n ['', '03/19/2019', '', '', '', '', '', 'Vehicular', '', '', '', '', '', '64161', '', '', '', '', '', '', '', '', ''],\r\n ['', '03/19/2019', '', '', '', '', '', 'Vehicular', '', '', '', '', '', '64125', '', '', '', '', '', '', '', '', ''],\r\n ['', '04/21/2019', '', '', '', '', '', 'Arson', '', '', '', '', '', '64161', '', '', '', '', '', '', '', '', '']\r\n ]\r\n result = cds.create_offense_by_zip(data)\r\n self.assertIsInstance(result, dict, \"function should return a dictionary\")\r\n self.assertEqual(len(result), 2, \"There should be 2 items in the dictionary\")\r\n self.assertIsInstance(result[\"Vehicular\"], dict, \"value for key Vehicular should be a dicationary\")\r\n self.assertIsInstance(result[\"Arson\"], dict, \"value for key Arson should be a dicationary\")\r\n\r\n self.assertEqual(len(result[\"Vehicular\"]), 2, \"dictionary for key Vehicular should have 2 items in it\")\r\n self.assertEqual(result[\"Vehicular\"], {'64161':1,'64125':1}, \"value for dictionary key Vehicular incorrect\")\r\n self.assertEqual(result[\"Arson\"], {'64161':1}, \"value for dictionary key Vehicular incorrect\")", "def testGetConfDict():\n\n conf = naiveConf.NaiveConf(exampleConfFname)\n confDict = conf.getConfDict()\n assert type(confDict) == dict\n assert confDict['x'] == conf.x\n assert confDict['y'] == conf.y\n assert confDict['L'] == conf.L", "def test_get_results_by_race():\n data = race.get_results_by_race(random.randint(1, 3))\n assert type(data) == list\n for lines in data:\n assert type(lines) == dict\n assert len(lines) == 7\n assert \"Points\" in lines.keys() and \"Place\" in lines.keys()\n assert len(data) == 11", "def test5(self) -> None:\n dict_ = {\"key0\": {\"key00\": None}, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\", \"key00\"), None), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def test_has_correct_number_of_keys_and_values(self):\n self.has_correct_number_of_keys_and_values(2, 1)", "def test_has_correct_number_of_keys_and_values(self):\n self.has_correct_number_of_keys_and_values(2, 1)", "def testTestExpectationMap(self):\n self._StringToMapHelper(data_types.TestExpectationMap,\n data_types.ExpectationBuilderMap)", "def inner_test(param: dict):\n self.assertEqual(param, {'foo': 1, 'bar': ['bat', 2]})", "def test(self):\n bs = verif.metric.Bs()\n bsrel = verif.metric.BsRel()\n bsres = verif.metric.BsRes()\n bsunc = verif.metric.BsUnc()\n bss = verif.metric.Bss()\n obs = [[0],\n [0],\n [0],\n [1],\n [0, 0, 1, 1, 1],\n [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]\n fcst = [[0],\n [1],\n [0.3],\n [0.1],\n [0.21, 0.21, 0.21, 0.91, 0.91],\n [0.06, 0.61, 0.45, 0.87, 0.13, 0.61, 0.79, 0.61, 0.06, 0.06, 0.79, 0.61, 0.13, 0.13, 0.79, 0.21, 0.06, 0.55, 0.37, 0.37]]\n ans = {bs: [0, 1, 0.09, 0.81, 0.1457, 0.34928],\n bsrel: [0, 1, 0.09, 0.81, 0.01236667, 0.2076133],\n bsres: [0, 0, 0, 0, 0.1066667, 0.1083333],\n bsunc: [0, 0, 0, 0, 0.24, 0.25],\n bss: [np.nan, np.nan, np.nan, np.nan, 0.3929167, -0.39712]}\n for i in range(len(obs)):\n o = np.array(obs[i])\n f = np.array(fcst[i])\n for key in ans:\n print(key, i)\n calculated = key.compute_from_obs_fcst(o, f)\n expected = ans[key][i]\n if np.isnan(expected):\n self.assertTrue(np.isnan(expected), np.isnan(calculated))\n else:\n self.assertAlmostEqual(expected, calculated, places=5)", "def generateTests(self, dict, dictname='totest'):\n for name, cases in dict.items():\n for casenum in range(len(cases)):\n case = cases[casenum]\n run_in_debugger = 0\n if len(case)==3:\n if case[2]:\n run_in_debugger = 1\n else:\n continue\n self.addTestCase(\n self.test_case_class, 'test_parser',\n input=case[0], expected=case[1],\n id='%s[%r][%s]' % (dictname, name, casenum),\n run_in_debugger=run_in_debugger)", "def test_create_cds_translation_dict_2(self):\n t1 = \"ABCDE\"\n t2 = \"FGHIJ\"\n self.gene_data_1[\"Translation\"] = t1\n self.gene_data_2[\"Translation\"] = t2\n self.gene_data_3[\"Translation\"] = t2\n cdd_list = [self.gene_data_1, self.gene_data_2, self.gene_data_3]\n dict = find_domains.create_cds_translation_dict(cdd_list)\n\n exp_keys = {t1, t2}\n with self.subTest():\n self.assertEqual(dict.keys(), exp_keys)\n with self.subTest():\n self.assertEqual(dict[t1], {self.gene_data_1[\"GeneID\"]})\n with self.subTest():\n self.assertEqual(dict[t2], {self.gene_data_2[\"GeneID\"],\n self.gene_data_3[\"GeneID\"]})", "def generateTests(self, dict, dictname='totest'):\n for name, cases in dict.items():\n for casenum in range(len(cases)):\n case = cases[casenum]\n run_in_debugger = 0\n if len(case) == 3:\n if case[-1]:\n run_in_debugger = 1\n else:\n continue\n self.addTestCase(self.test_case_class, 'test_parse',\n input=case[0], expected=case[1],\n id='%s[%r][%s]' % (dictname, name, casenum),\n run_in_debugger=run_in_debugger)", "def verifyDicts(cityCodeDict, aircraftCodeDict, flightDict):\n print 'verify dictionary creation by length'\n print 'length of cityCodeDict', len(cityCodeDict)\n print 'length of aircraftCodeDict', len(aircraftCodeDict)\n print 'length of flightDict', len(flightDict)\n print 'display a row from each dictionary to verify contents'\n for key in cityCodeDict.keys():\n print 'for key', key, 'the row is', cityCodeDict[key].cityCode, cityCodeDict[key].city\n break\n for key in aircraftCodeDict.keys():\n print 'for key', key, 'the row is', aircraftCodeDict[key].aircraftCode, aircraftCodeDict[key].name\n break \n for key in flightDict.keys():\n print 'for key', key, 'the row is', flightDict[key].flightnum, flightDict[key].departCity,flightDict[key].arriveCity\n break", "def verifyDicts(cityCodeDict, aircraftCodeDict, flightDict):\n print 'verify dictionary creation by length'\n print 'length of cityCodeDict', len(cityCodeDict)\n print 'length of aircraftCodeDict', len(aircraftCodeDict)\n print 'length of flightDict', len(flightDict)\n print 'display a row from each dictionary to verify contents'\n for key in cityCodeDict.keys():\n print 'for key', key, 'the row is', cityCodeDict[key].cityCode, cityCodeDict[key].city\n break\n for key in aircraftCodeDict.keys():\n print 'for key', key, 'the row is', aircraftCodeDict[key].aircraftCode, aircraftCodeDict[key].name\n break \n for key in flightDict.keys():\n print 'for key', key, 'the row is', flightDict[key].flightnum, flightDict[key].departCity,flightDict[key].arriveCity\n break", "def test_b(self):\n user_dict = {'A': 3, 'B': 4, 'C': 5, 'D': 6, 'E': 7}\n user_key = 'B'\n self.assertEqual(4, switch_average(user_dict, user_key.upper()))", "def func4(key):\n return key in list(my_test_dict.keys())", "def func2(key):\n return key in my_test_dict.keys()", "def selectedComparisons(samples_dict):\n sectors=\"peripherals\",\"intermediaries\",\"hubs\"\n ks_measures={}\n for analyses_type in samples_dict[sectors[0]]:\n ks_measures[analyses_type]={}\n for analysis_grouping in samples_dict[sectors[0]][analyses_type]:\n ks_measures[analyses_type][analysis_grouping]={}\n if samples_dict[sectors[0]][analyses_type][analysis_grouping]==dict:\n ks_measures[analyses_type][analysis_grouping]={}\n for analysis in samples_dict[sectors[0]][analyses_type][analysis_grouping]:\n for var in samples_dict[sectors[0]][analyses_type][analysis_grouping][analysis]:\n samples_peripherals=samples_dict[sectors[0]][analyses_type][analysis_grouping][analysis]\n samples_intermediaries=samples_dict[sectors[1]][analyses_type][analysis_grouping][analysis]\n samples_hubs=samples_dict[sectors[2]][analyses_type][analysis_grouping][analysis]\n ks_measures[analysis][\"peripherals_intermediaries\"]=P.kolmogorovSmirnovTest(samples_peripherals,samples_intermediaries)\n ks_measures[analysis][\"peripherals_hubs\"]=P.kolmogorovSmirnovTest(samples_peripherals,samples_hubs)\n ks_measures[analysis][\"hubs_intermediaries\"]=P.kolmogorovSmirnovTest(samples_hubs,samples_intermediaries)\n else:\n for var in samples_dict[sectors[0]][analyses_type][analysis_grouping]:\n samples_peripherals=samples_dict[sectors[0]][analyses_type][analysis_grouping]\n samples_intermediaries=samples_dict[sectors[1]][analyses_type][analysis_grouping]\n samples_hubs=samples_dict[sectors[2]][analyses_type][analysis_grouping]\n\n\n\n samples[sector][analyses][analysis_grouping]=updateDict(samples[sector][analyses][analysis_grouping],getSamples(authors_analysis[analyses][author][analysis_grouping]))", "def _testResultsEqual(self, expected_dict, gotten_result):\n gotten_dict = {k: t.eval() for k, t in gotten_result._asdict().items()}\n self.assertItemsEqual(\n list(expected_dict.keys()), list(gotten_dict.keys()))\n\n for key, expected_values in expected_dict.items():\n self.assertAllClose(expected_values, gotten_dict[key])", "def step_impl(context, key):\n entries = set()\n print('Collected entries:')\n for row in context.table:\n print(' ', row[key])\n entries.add(row[key])\n print('Tested entries:')\n for entry in context.response_json:\n print(' ', entry[key])\n assert entry[key] in entries", "def test6(self) -> None:\n dict_ = {\"key0\": {}, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\",), {}), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def test_a(self):\n user_dict = {'A': 3, 'B': 4, 'C': 5, 'D': 6, 'E': 7}\n user_key = 'a'\n self.assertEqual(3, switch_average(user_dict, user_key.upper()))", "def test_word_info(self):\n word = \"vitality\"\n rv = self.wordInfo(input_word=word)\n expected_output = {\n word: {\n \"frequency\": \"975\",\n \"defination\": \"{'Noun': ['an energetic style', 'a healthy capacity for vigorous activity', '(biology', 'not physical or chemical', 'the property of being able to survive and grow']}\",\n \"antonyms\": \"['enervation', 'inactivity', 'lethargy', 'weakness', 'lack']\",\n \"examples\": \"{1: 'And finally, both Lord Robertson and Secretary of State Powell pointed to what they called the vitality and the relevance of NATO, and said any damage done to the reputation of NATO over the last couple weeks can quite, in their words, be easily overcome.', 2: \\\"Professor Huxley himself has told us that he lived in 'the hope and the faith that in course of time we shall see our way from the constituents of the protoplasm to its properties,' _i. e._ from carbonic acid, water, and ammonia to that mysterious thing which we call vitality or life -- from the molecular motion of the brain to Socratic wisdom,\\\", 3: 'The strongest, the most amply endowed with what we call vitality or power to live, win.', 4: 'But the thought that it is mechanics and chemistry applied by something of which they as such, form no part, some agent or principle which we call vitality, is welcome to us.', 5: '\\\"The Indian savages,\\\" said Margrave, sullenly, \\\"have not a health as perfect as mine, and in what you call vitality -- the blissful consciousness of life -- they are as sticks and stones compared to me.\\\"'}\",\n \"pronounciation\": \"V AY0 T AE1 L AH0 T IY0\",\n \"synonyms\": \"['vigor', 'continuity', 'spunk', 'strength', 'verve']\"\n }\n }\n response_data = json.loads(rv.get_data(as_text=True))\n\n self.assertEquals(rv.status_code, 200)\n self.assertEquals(response_data[word][\"defination\"], expected_output[word][\"defination\"])\n self.assertEquals(response_data[word][\"antonyms\"], expected_output[word][\"antonyms\"])\n self.assertEquals(response_data[word][\"examples\"], expected_output[word][\"examples\"])\n self.assertEquals(response_data[word][\"frequency\"], expected_output[word][\"frequency\"])\n self.assertEquals(response_data[word][\"pronounciation\"], expected_output[word][\"pronounciation\"])\n self.assertEquals(response_data[word][\"synonyms\"], expected_output[word][\"synonyms\"])", "def test9(self) -> None:\n config = cconfig.Config()\n dict_ = {\"key0\": {\"key00\": config}, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\", \"key00\"), config), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def test_retrieve_data_dict_1(self):\n list_of_data_dicts = \\\n basic.retrieve_data_dict(self.test_import_table_1)\n self.assertEqual(len(list_of_data_dicts), 2)", "def getTestResults():", "def test_apply_scalar_map(self):\n super(TestObjDict, self).test_apply_scalar_map(_as_obj=True)", "def test_no_reproducible_for_varinat_analysis(self):\n self.testcases[0].job_type = 'some_type1'\n self.testcases[0].project_name = 'project1'\n self.testcases[0].crash_state = 'abcde'\n self.testcases[0].one_time_crasher_flag = False\n self.testcases[0].crash_type = 'crash_type1'\n self.testcases[0].security_flag = True\n self.testcases[1].job_type = 'some_type2'\n self.testcases[1].project_name = 'project1'\n self.testcases[1].crash_state = 'vwxyz'\n self.testcases[1].crash_type = 'crash_type2'\n self.testcases[1].one_time_crasher_flag = True\n self.testcases[1].security_flag = True\n\n for t in self.testcases:\n t.put()\n\n # testcase2's varinat will be evaluated against testcase1\n self.testcase_variants[0].job_type = 'fake_engine_asan_project1'\n self.testcase_variants[0].testcase_id = self.testcases[0].key.id()\n self.testcase_variants[0].security_flag = True\n self.testcase_variants[1].job_type = 'some_type1'\n self.testcase_variants[1].crash_state = 'abcde'\n self.testcase_variants[1].crash_type = 'crash_type1'\n self.testcase_variants[1].testcase_id = self.testcases[1].key.id()\n self.testcase_variants[1].security_flag = True\n\n for v in self.testcase_variants:\n v.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def expected_result() -> Mapping[str, Any]:\n result = {\n \"count\": 3,\n \"min_value\": 1.0,\n \"time\": 1.0,\n \"window_size\": 1.0,\n \"max_value\": 3.0,\n \"mean_value\": 2.0,\n \"median_value\": 2.0,\n \"stdev_value\": 1.0,\n }\n return result", "def test_call_result_as_dict(self):\r\n exp_assignments = rdp_test1_expected_dict\r\n min_confidence = self.default_app.Params['Confidence']\r\n\r\n # Since there is some variation in the assignments, run\r\n # 10 trials and make sure we get the expected result at least once\r\n num_trials = 10\r\n unverified_seq_ids = set(exp_assignments.keys())\r\n for i in range(num_trials):\r\n obs_assignments = self.default_app(self.tmp_seq_filepath)\r\n for seq_id in list(unverified_seq_ids):\r\n obs_assignment, obs_confidence = obs_assignments[seq_id]\r\n exp_assignment, exp_confidence = exp_assignments[seq_id]\r\n self.assertTrue(obs_confidence >= min_confidence)\r\n if obs_assignment == exp_assignment:\r\n unverified_seq_ids.remove(seq_id)\r\n if not unverified_seq_ids:\r\n break\r\n\r\n messages = []\r\n for seq_id in unverified_seq_ids:\r\n messages.append(\r\n \"Unable to verify %s in %s trials\" % (seq_id, num_trials))\r\n messages.append(\" Expected: %s\" % exp_assignments[seq_id][0])\r\n messages.append(\" Observed: %s\" % obs_assignments[seq_id][0])\r\n messages.append(\" Confidence: %s\" % obs_assignments[seq_id][1])\r\n\r\n # make sure all taxonomic results were correct at least once\r\n self.assertFalse(unverified_seq_ids, msg='\\n'.join(messages))", "def test_get_triangle_dict_all_int(self):\n triangle = {'a': 1, 'b': 2, 'c': 3}\n result = get_triangle_type(triangle)\n self.assertEqual(result, 'scalene')", "def test_log_remaining_rounds(self):\r\n\r\n # all empty\r\n self.assertEqual(log_remaining_rounds(dict(), dict(), 0), 0)\r\n\r\n # with something in it\r\n ids = dict({'1': 1, '2': 1, '3': 1, '4': 1})\r\n mapping = dict({'1': [5, 6], '2': [], '3': [7], '4': [8, 9, 10]})\r\n\r\n self.assertEqual(log_remaining_rounds(ids, mapping, 0), 4)\r\n self.assertEqual(log_remaining_rounds(ids, mapping, 1), 3)\r\n self.assertEqual(log_remaining_rounds(ids, mapping, 2), 2)\r\n self.assertEqual(log_remaining_rounds(ids, mapping, 5), 0)", "def func1(key, my_test_dict=my_test_dict):\n return key in my_test_dict", "def test_values(self):\n self.assertEqual([self.expected_described_model], list(self.mapped_model.values()))", "def test_getitem(self):\n for name, det in self.sampler.detectors.items():\n fromGetItem = self.sampler[name]\n self.assertIs(det, fromGetItem, msg=name)\n with self.assertRaises(KeyError):\n self.sampler['this should fail']", "def test_isadict(self):\n # It is a dict-subclass, so this kind of pointless, but it doen't hurt.\n d, m = dict(a=5), ConfigDict(a=5)\n d['key'], m['key'] = 'value', 'value'\n d['k2'], m['k2'] = 'v1', 'v1'\n d['k2'], m['k2'] = 'v2', 'v2'\n self.assertEqual(d.keys(), m.keys())\n self.assertEqual(list(d.values()), list(m.values()))\n self.assertEqual(d.get('key'), m.get('key'))\n self.assertEqual(d.get('cay'), m.get('cay'))\n self.assertEqual(list(iter(d)), list(iter(m)))\n self.assertEqual([k for k in d], [k for k in m])\n self.assertEqual(len(d), len(m))\n self.assertEqual('key' in d, 'key' in m)\n self.assertEqual('cay' in d, 'cay' in m)\n self.assertRaises(KeyError, lambda: m['cay'])", "def test_RestrictingNodeTransformer__visit_In_Dict():\n assert restricted_eval('2 in {1: 1, 2: 2, 3: 3}') is True", "def test_create_mimic_dict_1(self):\n result = self.module.create_mimic_dict(\"imdev.txt\")\n self.assertIsInstance(\n result, dict,\n \"The return value of create_mimic_dict() should be a dict.\"\n )", "def step_impl(context, key):\n print('Collected entries:')\n field_value = context.response_json[key]\n collected_entries = _get_collected_entries(field_value)\n\n print('Tested entries:')\n for row in context.table:\n value = row[key]\n print(' ', value)\n assert value in collected_entries", "def test_getitem_dynamic(self):\n self.assertEqual(self.tester['SEASON_ENVIRONMENT'], 'winter')\n self.assertEqual(self.tester['depth'], 0.15)", "def test_ignored_crash_type_for_varinat_analysis(self):\n self.testcases[0].job_type = 'some_type1'\n self.testcases[0].project_name = 'project1'\n self.testcases[0].crash_state = 'abcde'\n self.testcases[0].one_time_crasher_flag = False\n self.testcases[0].crash_type = 'crash_type1'\n self.testcases[0].security_flag = True\n self.testcases[1].job_type = 'some_type2'\n self.testcases[1].project_name = 'project1'\n self.testcases[1].crash_state = 'vwxyz'\n self.testcases[1].crash_type = 'Data race'\n self.testcases[1].one_time_crasher_flag = False\n self.testcases[1].security_flag = True\n\n for t in self.testcases:\n t.put()\n\n # testcase2's varinat will be evaluated against testcase1\n self.testcase_variants[0].job_type = 'fake_engine_asan_project1'\n self.testcase_variants[0].testcase_id = self.testcases[0].key.id()\n self.testcase_variants[0].security_flag = True\n self.testcase_variants[1].job_type = 'some_type1'\n self.testcase_variants[1].crash_state = 'abcde'\n self.testcase_variants[1].crash_type = 'crash_type1'\n self.testcase_variants[1].testcase_id = self.testcases[1].key.id()\n self.testcase_variants[1].security_flag = True\n\n for v in self.testcase_variants:\n v.put()\n\n grouper.group_testcases()\n\n for index, t in enumerate(self.testcases):\n self.testcases[index] = data_handler.get_testcase_by_id(t.key.id())\n self.assertEqual(self.testcases[index].group_id, 0)\n self.assertTrue(self.testcases[index].is_leader)", "def test_dict_type(self):\n\n expected = TypeError\n input_ = []\n with self.assertRaises(expected):\n math.factorial(input_)", "def step_impl(context, key, parent):\n collected_entries = set()\n print('Collected entries:')\n for row in context.table:\n field_value = row[key]\n # Some fields may be a list of values\n if isinstance(field_value, list):\n for item in field_value:\n print(' ', item)\n collected_entries.add(item)\n else: # assume a simple scalar\n print(' ', field_value)\n collected_entries.add(field_value)\n\n print('Tested entries:')\n tested_entries = set()\n for entry in context.response_json:\n field_value = entry.get(parent).get(key)\n if isinstance(field_value, list):\n for item in field_value:\n tested_entries.add(item)\n else: # assume a simple scalar\n tested_entries.add(field_value)\n\n for item in tested_entries:\n print(' ', item)\n assert item in collected_entries", "def test8(self) -> None:\n config = cconfig.Config()\n dict_ = {\"key0\": config, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\",), config), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def get_test_info(test_number, lookup_dict, test_limits_dict, test_mode):\n get_limit_args = [\"test_suite_name\", \"test_name\", \"test_pin\", \"test_mode\", \"test_limits_dict\"]\n a_dict = lookup_dict[test_number]\n get_limit_entries = [a_dict[\"tsname\"], a_dict[\"tname\"], a_dict[\"tpin\"], test_mode, test_limits_dict]\n return dict([(a, b) for a, b in zip(get_limit_args, get_limit_entries)])", "def testcases(self, request, *args, **kwargs):\n response = self.retrieve(request, *args, **kwargs)\n response.data = response.data['testcases']\n return response", "def test7(self) -> None:\n dict_ = {\"key0\": {\"key00\": {}}, \"key1\": \"value1\"}\n actual_result = list(hdict.get_nested_dict_iterator(dict_))\n expected_result = [((\"key0\", \"key00\"), {}), ((\"key1\",), \"value1\")]\n self.assertListEqual(actual_result, expected_result)", "def test_values(self):\n obs = self.tester.values()\n self.assertTrue(isinstance(obs, Iterable))\n exp = {'ANL', True, True, 'ENVO:soil', 'completed',\n datetime(2011, 11, 11, 13, 00, 00), '1001:M7',\n 'Cannabis Soil Microbiome', 'winter', 'n',\n '64.6 sand, 17.6 silt, 17.8 clay', '1118232', 0.15, '3483',\n 'root metagenome', 0.164, 114, 15, 1.41, 7.15, 0,\n 'ENVO:Temperate grasslands, savannas, and shrubland biome',\n 'GAZ:United States of America', 6.94, 'SKB8', 5,\n 'Burmese root', 'ENVO:plant-associated habitat', 74.0894932572,\n 65.3283470202}\n self.assertEqual(set(obs), exp)", "def getExpectations():", "def test_map(name, num_keys, num_tests=50):\n map_func = eval(name)\n m = map_func()\n all_keys = list(range(num_keys))\n for k in all_keys:\n m.add(k, 'a')\n times = []\n for n in range(num_tests):\n lookup = random.choice(all_keys)\n start = etime()\n m.get(lookup)\n end = etime()\n t = end - start\n times.append(t)\n average = sum(times) / len(times)\n return average", "def test_creation_dict():\n with pytest.raises(ValueError) as __:\n value = dict()\n __ = param.Integer(value=value)", "def get_performance_test_cases(test_suite):\n return get_cases(test_suite, r'test_perf_')", "def test_e(self):\n user_dict = {'A': 3, 'B': 4, 'C': 5, 'D': 6, 'E': 7}\n user_key = 'e'\n self.assertEqual(7, switch_average(user_dict, user_key.upper()))", "def test_get_facet_dictionary_content(self):\n pass", "def test_data():\n return {\"David Andrews\" : [200.50, 400.00, 250.75],\n \"John Goodfellow\" : [25.00, 175.50],\n \"Mary Suzuki\" : [75.00, 125.00, 250.00],\n \"Bonney Lake\" : [500.50, 700.75, 500.25],\n \"DeMarcus Rollins\" : [155.00, 165.00]\n }", "def checkDicts(steps, foamCase='.'):\n foamFiles = getFoamFiles(foamCase)\n Files = [foamFile.split('/')[-1] for foamFile in foamFiles]\n result = 0\n exceptions = ['renumberMesh', 'prepare', 'checkMesh']\n for step in steps:\n if step=='run':\n dictFile = 'controlDict'\n elif step in exceptions:\n pass\n else: \n dictFile = step + 'Dict'\n\n if dictFile not in Files:\n if any(step in file for file in Files):\n print(\"A dictionary file for \" + step + \" is present, but has to be called with -dict option, take caution.\")\n else:\n print(\"Cound not find \" + dictFile + \" in the openFOAM case \" + foamCase)\n result = 1\n return result", "def test_bad_valuetype():\n test = [{'key': {'key1': 'val'}}, ['key']]\n t_result = fetch_data_by_keys(*test)\n assert not is_successful(t_result)\n assert 'Bad data found' in str(t_result.failure())", "def __generate_dict(self, test_output_dir, types, _table):\n # dump json from file to dict\n\n table_dict = CaseDump(os.path.join(test_output_dir, _table + OUT_SUFFIX), CASE_FILE_TYPE_JSON).dump()\n\n # foreach key from statistic report, split it into table/type/key/sub_value\n type_dict = {}\n for _key in table_dict.keys():\n # wipe off table\n _key = _key.replace(\"%s_\" % _table, \"\")\n for _type in types:\n # wipe off type\n if _type and _type in _key:\n if _type not in type_dict.keys():\n type_dict[_type] = {}\n _key = _key.replace(\"%s_\" % _type, \"\")\n # if has sub_value, set RESULT_DICT[table][type][key] = sub_value\n # else set RESULT_DICT[table][type][key] = None\n if \"#\" in _key:\n sub_value = _key.split(\"#\")\n type_dict[_type][sub_value[0]] = sub_value[1]\n else:\n type_dict[_type][_key] = \"\"\n break\n\n global RESULT_DICT\n RESULT_DICT[_table] = type_dict" ]
[ "0.70761895", "0.65705884", "0.647785", "0.6304288", "0.6191103", "0.61260873", "0.6123775", "0.60676205", "0.60428697", "0.60196096", "0.59500134", "0.58859193", "0.5870065", "0.5868873", "0.5839127", "0.5819128", "0.57976145", "0.5794307", "0.57906353", "0.57838327", "0.57612455", "0.5760646", "0.57536936", "0.57531035", "0.575071", "0.57481056", "0.572681", "0.5693534", "0.5678608", "0.56696934", "0.5667924", "0.5651233", "0.56264746", "0.5617171", "0.5561739", "0.55613077", "0.55595237", "0.55588377", "0.5548343", "0.5545291", "0.5531201", "0.5527918", "0.5526695", "0.55210555", "0.5514995", "0.55098015", "0.5506271", "0.5506271", "0.5494414", "0.54890686", "0.5484938", "0.5480064", "0.5479216", "0.54789174", "0.5475977", "0.5475977", "0.5471786", "0.546603", "0.5462505", "0.545493", "0.5454201", "0.54507345", "0.5450508", "0.5449062", "0.5442019", "0.5431838", "0.5431819", "0.5419276", "0.54168", "0.5415273", "0.5413308", "0.5410483", "0.5405909", "0.5403121", "0.5397007", "0.53937995", "0.53936094", "0.53882277", "0.5384931", "0.5379649", "0.53791916", "0.5372013", "0.5371926", "0.53662556", "0.53649294", "0.5363419", "0.5353833", "0.5345441", "0.53437895", "0.53437555", "0.53427863", "0.5328768", "0.53281176", "0.5326168", "0.53213996", "0.5319267", "0.5319037", "0.5311792", "0.53088087", "0.5307725" ]
0.93498963
0
Test case for get_sync_history
def test_get_sync_history(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_tracker_getHistory():\n\n trackers, cap = init_tracker()\n tr = trackers[0]\n tr.addHistory([1, 1, 1, 1])\n\n assert tr.getHistory()[1] == [1, 1, 1, 1]", "def test_get_team_history(self):\n pass", "def QueryHistory(self):\n return []", "def testGetHistory(self):\n self.maxDiff = None\n container_obj = self.explorer_object.GetContainer(\n 'de44dd97cfd1c8d1c1aad7f75a435603991a7a39fa4f6b20a69bf4458809209c')\n expected = {\n '1cee97b18f87b5fa91633db35f587e2c65c093facfa2cbbe83d5ebe06e1d9125':\n collections.OrderedDict({\n 'size': 0\n }),\n 'df557f39d413a1408f5c28d8aab2892f927237ec22e903ef04b331305130ab38':\n collections.OrderedDict({\n 'created_at':\n '2018-12-26T08:20:42.687925+00:00',\n 'container_cmd': '/bin/sh -c #(nop) ADD file:ce026b62356eec3ad1214f92be2c9dc063fe205bd5e600be3492c4dfb17148bd in / ',\n 'size': 1154361\n })\n }\n\n self.assertEqual(expected, container_obj.GetHistory())", "def testGetHistory(self):\n self.maxDiff = None\n container_obj = self.explorer_object.GetContainer(\n '7b02fb3e8a665a63e32b909af5babb7d6ba0b64e10003b2d9534c7d5f2af8966')\n expected = collections.OrderedDict({\n 'sha256:'\n '7968321274dc6b6171697c33df7815310468e694ac5be0ec03ff053bb135e768': {\n 'created_at': '2017-01-13T22:13:54.401355+00:00',\n 'container_cmd': '/bin/sh -c #(nop) CMD [\"sh\"]',\n 'size': 0\n }\n })\n\n self.assertEqual(expected, container_obj.GetHistory())", "def testGetHistory(self):\n self.maxDiff = None\n container_obj = self.explorer_object.GetContainer(\n '5dc287aa80b460652a5584e80a5c8c1233b0c0691972d75424cf5250b917600a')\n expected = collections.OrderedDict({\n 'sha256:'\n '5b0d59026729b68570d99bc4f3f7c31a2e4f2a5736435641565d93e7c25bd2c3': {\n 'created_at': '2018-01-24T04:29:35.590938+00:00',\n 'container_cmd': '/bin/sh -c #(nop) CMD [\"sh\"]',\n 'size': 0\n }\n })\n self.assertEqual(expected, container_obj.GetHistory())", "def test_tracker_addHistory():\n\n trackers, cap = init_tracker()\n tr = trackers[0]\n tr.addHistory([1, 1, 1, 1])\n\n assert len(tr.history) >= 1", "async def test_retrieve_history_orders_by_ticket(self):\n history_orders = {\n 'historyOrders': [{\n 'clientId': 'TE_GBPUSD_7hyINWqAlE',\n 'currentPrice': 1.261,\n 'currentVolume': 0,\n 'doneTime': '2020-04-15T02:45:06.521Z',\n 'id': '46214692',\n 'magic': 1000,\n 'platform': 'mt5',\n 'positionId': '46214692',\n 'state': 'ORDER_STATE_FILLED',\n 'symbol': 'GBPUSD',\n 'time': '2020-04-15T02:45:06.260Z',\n 'type': 'ORDER_TYPE_BUY',\n 'volume': 0.07\n }],\n 'synchronizing': False\n }\n client.get_history_orders_by_ticket = AsyncMock(return_value=history_orders)\n actual = await api.get_history_orders_by_ticket('46214692')\n assert actual == history_orders\n client.get_history_orders_by_ticket.assert_called_with('accountId', '46214692')", "def history():", "def testGetHistory(self):\n self.maxDiff = None\n container_obj = self.explorer_object.GetContainer(\n '8e8b7f23eb7cbd4dfe7e91646ddd0e0f524218e25d50113559f078dfb2690206')\n expected = {\n 'sha256:8ac48589692a53a9b8c2d1ceaa6b402665aa7fe667ba51ccc03002300856d8c7':\n collections.OrderedDict({\n 'created_at': '2018-04-05T10:41:28.876407+00:00',\n 'container_cmd': '/bin/sh -c #(nop) CMD [\"sh\"]',\n 'size': 0\n })\n }\n self.assertEqual(expected, container_obj.GetHistory(container_obj))", "async def test_retrieve_history_orders_by_time_range(self):\n history_orders = {\n 'historyOrders': [{\n 'clientId': 'TE_GBPUSD_7hyINWqAlE',\n 'currentPrice': 1.261,\n 'currentVolume': 0,\n 'doneTime': '2020-04-15T02:45:06.521Z',\n 'id': '46214692',\n 'magic': 1000,\n 'platform': 'mt5',\n 'positionId': '46214692',\n 'state': 'ORDER_STATE_FILLED',\n 'symbol': 'GBPUSD',\n 'time': '2020-04-15T02:45:06.260Z',\n 'type': 'ORDER_TYPE_BUY',\n 'volume': 0.07\n }],\n 'synchronizing': False\n }\n client.get_history_orders_by_time_range = AsyncMock(return_value=history_orders)\n start_time = datetime.now() - timedelta(seconds=1)\n end_time = datetime.now()\n actual = await api.get_history_orders_by_time_range(start_time, end_time, 1, 100)\n assert actual == history_orders\n client.get_history_orders_by_time_range.assert_called_with('accountId', start_time, end_time, 1, 100)", "def fetch_history(*args, **kwargs):\n return collect_history(*args, **kwargs)", "def get_history(self):\r\n\r\n return self.board_history", "def test_get_derived_metric_history(self):\n pass", "def getChanges():", "async def test_retrieve_history_orders_by_position(self):\n history_orders = {\n 'historyOrders': [{\n 'clientId': 'TE_GBPUSD_7hyINWqAlE',\n 'currentPrice': 1.261,\n 'currentVolume': 0,\n 'doneTime': '2020-04-15T02:45:06.521Z',\n 'id': '46214692',\n 'magic': 1000,\n 'platform': 'mt5',\n 'positionId': '46214692',\n 'state': 'ORDER_STATE_FILLED',\n 'symbol': 'GBPUSD',\n 'time': '2020-04-15T02:45:06.260Z',\n 'type': 'ORDER_TYPE_BUY',\n 'volume': 0.07\n }],\n 'synchronizing': False\n }\n client.get_history_orders_by_position = AsyncMock(return_value=history_orders)\n actual = await api.get_history_orders_by_position('46214692')\n assert actual == history_orders\n client.get_history_orders_by_position.assert_called_with('accountId', '46214692')", "def test_query_trade_history_not_shared_cache(data_dir):\n\n def first_trades(currencyPair, start, end): # pylint: disable=unused-argument\n return {'BTC': [{'data': 1}]}\n\n def second_trades(currencyPair, start, end): # pylint: disable=unused-argument\n return {'BTC': [{'data': 2}]}\n\n messages_aggregator = MessagesAggregator()\n end_ts = 99999999999\n first_user_dir = os.path.join(data_dir, 'first')\n os.mkdir(first_user_dir)\n second_user_dir = os.path.join(data_dir, 'second')\n os.mkdir(second_user_dir)\n a = Poloniex(b'', b'', first_user_dir, messages_aggregator)\n with patch.object(a, 'returnTradeHistory', side_effect=first_trades):\n result1 = a.query_trade_history(0, end_ts, end_ts)\n\n b = Poloniex(b'', b'', second_user_dir, messages_aggregator)\n with patch.object(b, 'returnTradeHistory', side_effect=second_trades):\n result2 = b.query_trade_history(0, end_ts, end_ts)\n\n assert result1['BTC'][0]['data'] == 1\n assert result2['BTC'][0]['data'] == 2", "def get_history_data(self, exchange, pair, timedelta):\n return self.ccxt.get_history_data(exchange, pair, timedelta)", "def get_history():\n return response_texts_to_entries(make_post_request(HISTORY_API, data={\"k\": config[\"api_key\"]}))", "def test_users_can_track_edit_history(self):\n token = self.create_user(VALID_USER_DATA)\n response = self.create_article(VALID_ARTICLE, token)\n\n response = self.create_comment(\n token=token,\n parentId=0,\n slug=response.data['article']['slug']\n )\n update_comment_url = reverse('crud-comment', kwargs={\n 'id': response.data['comment']['id']\n })\n response = self.client.put(\n update_comment_url,\n HTTP_AUTHORIZATION=token,\n data=VALID_COMMENT_2,\n format='json'\n )\n get_comment_url = reverse(\n 'crud-comment',\n kwargs={'id':response.data['id']}\n )\n token2 = self.create_user(VALID_USER_DATA_2)\n response = self.client.get(\n get_comment_url,\n HTTP_AUTHORIZATION=token2\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK\n )\n self.assertIn(\n 'version',\n response.data['commentHistory'],\n )", "def get_history(self):\n return self.history", "def assert_history(self, rows):\r\n self.assertEqual(self.parse_rows(rows), self.read_history())", "async def test_get_dispatch_route_history(client):\n params = [('access_token', 'access_token_example'),\n ('start_time', 56),\n ('end_time', 56)]\n headers = { \n 'Accept': 'application/json',\n }\n response = await client.request(\n method='GET',\n path='/v1/fleet/dispatch/routes/{route_id}/history'.format(route_id=56),\n headers=headers,\n params=params,\n )\n assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')", "def get_history(self, clocked: 'Clocked'):\n history = {}\n\n new_tick = self._get_new_tick(clocked)\n\n vclock_history = attributes.get_history(clocked, 'vclock')\n is_vclock_unchanged = (vclock_history.unchanged and\n new_tick == vclock_history.unchanged[0])\n\n for prop in self.history_models.keys():\n value = self._get_prop_value(clocked, prop)\n\n if value is not NOT_FOUND_SENTINEL:\n history[prop] = value\n\n return history, is_vclock_unchanged", "def slot_history_changed(self, _sender, _data):\r\n last_candle = self.history.last_candle()\r\n if last_candle:\r\n self.client.history_last_candle = last_candle.tim", "def get_order_history(self):\n return self.__call__('orders', 'getorderhistory')", "def test_bookmark_sync(self):\r\n self._get_good_request(content=True, second_bmark=True)\r\n\r\n # test that we only get one resultback\r\n res = self.testapp.get('/api/v1/admin/extension/sync',\r\n params={'api_key': API_KEY},\r\n status=200)\r\n\r\n self.assertEqual(\r\n res.status, \"200 OK\",\r\n msg='Get status is 200, ' + res.status)\r\n\r\n self.assertTrue(\r\n GOOGLE_HASH in res.body,\r\n \"The google hash id should be in the json: \" + res.body)\r\n self.assertTrue(\r\n BMARKUS_HASH in res.body,\r\n \"The bmark.us hash id should be in the json: \" + res.body)\r\n self._check_cors_headers(res)", "def mock_history_processing(rotki: Rotkehlchen, remote_errors=False):\n mock_function = check_result_of_history_creation\n if remote_errors:\n mock_function = check_result_of_history_creation_for_remote_errors\n accountant_patch = patch.object(\n rotki.accountant,\n 'process_history',\n side_effect=mock_function,\n )\n return accountant_patch", "def test_setInputHistory(self):\n self.widget.setInputHistory(History([\"a\", \"b\", \"c\"]))\n self.assertEqual(self.widget.getInputHistory(), [\"a\", \"b\", \"c\"])", "def test_tracker_clearHistory():\n\n trackers, cap = init_tracker()\n tr = trackers[0]\n tr.clearHistory()\n\n assert len(tr.getHistory()) == 0", "def slot_history_changed(self, history, _dummy):\r\n pass", "def verify_history(client, fragment, design=None, reverse=False, server_name='beeswax'):\n resp = client.get('/%(server_name)s/query_history' % {'server_name': server_name})\n my_assert = reverse and assert_false or assert_true\n my_assert(fragment in resp.content, resp.content)\n if design:\n my_assert(design in resp.content, resp.content)\n\n if resp.context:\n try:\n return len(resp.context['page'].object_list)\n except KeyError:\n pass\n\n LOG.warning('Cannot find history size. Response context clobbered')\n return -1", "def History(self):\n return self.historydict.get('history', [])", "def test_get_events_history_filtering_by_timestamp(rotkehlchen_api_server: 'APIServer'):\n tx_hex = deserialize_evm_tx_hash('0xb226ddb8cbb286a7a998a35263ad258110eed5f923488f03a8d890572cd4608e') # noqa: E501\n ethereum_inquirer = rotkehlchen_api_server.rest_api.rotkehlchen.chains_aggregator.ethereum.node_inquirer # noqa: E501\n database = rotkehlchen_api_server.rest_api.rotkehlchen.data.db\n get_decoded_events_of_transaction(\n evm_inquirer=ethereum_inquirer,\n database=database,\n tx_hash=tx_hex,\n )\n # Call time range\n from_timestamp = 1627401169\n to_timestamp = 1627401170\n async_query = random.choice([False, True])\n with patch(\n 'rotkehlchen.chain.ethereum.modules.sushiswap.sushiswap.Sushiswap.get_balances',\n side_effect=lambda _: {},\n ):\n response = requests.get(\n api_url_for(\n rotkehlchen_api_server,\n 'modulestatsresource',\n module='sushiswap',\n ),\n json={\n 'async_query': async_query,\n 'from_timestamp': from_timestamp,\n 'to_timestamp': to_timestamp,\n },\n )\n if async_query:\n task_id = assert_ok_async_response(response)\n outcome = wait_for_async_task(rotkehlchen_api_server, task_id, timeout=120)\n assert outcome['message'] == ''\n result = outcome['result']\n else:\n result = assert_proper_response_with_result(response)\n\n events_balances = result[TEST_EVENTS_ADDRESS_1]\n\n assert len(events_balances) == 1", "def onRegisterHistory(self):\n pass", "def test_empty_history_reencoding(self):\n data = \"\"\"\n {\n \"resultType\" : \"history\",\n \"version\" : \"0.1alpha\",\n \"uploadKeys\" : [\n { \"name\" : \"emk\", \"key\" : \"abc\" },\n { \"name\" : \"ec\" , \"key\" : \"def\" }\n ],\n \"generator\" : { \"name\" : \"Yapeal\", \"version\" : \"11.335.1737\" },\n \"currentTime\" : \"2011-10-22T15:46:00+00:00\",\n \"columns\" : [\"date\",\"orders\",\"quantity\",\"low\",\"high\",\"average\"],\n \"rowsets\" : [\n {\n \"generatedAt\" : \"2011-10-22T15:42:00+00:00\",\n \"regionID\" : 10000065,\n \"typeID\" : 11134,\n \"rows\" : []\n }\n ]\n }\n \"\"\"\n decoded_list = unified.parse_from_json(data)\n re_encoded_list = unified.encode_to_json(decoded_list)\n re_decoded_list = json.loads(re_encoded_list)\n # There should always be one rowset, even if it ends up being empty.\n self.assertEqual(1, len(re_decoded_list['rowsets']))\n first_rowset = re_decoded_list['rowsets'][0]\n # Check for the empty rowsets with all data intact.\n self.assertListEqual(first_rowset['rows'], [])\n self.assertTrue('generatedAt' in first_rowset)\n self.assertTrue('regionID' in first_rowset)\n self.assertTrue('typeID' in first_rowset)", "def testDirtyRefresh(self):\n \n pass", "def GetHistory(index=0):\n if index == \"clear\":\n state_mgr.entire_history = []\n else:\n print state_mgr.entire_history[int(index):]", "def test_update_revision_from_history_without_diffsets(self):\n diffset_history = DiffSetHistory.objects.create()\n\n diffset = DiffSet()\n diffset.update_revision_from_history(diffset_history)\n\n self.assertEqual(diffset.revision, 1)", "async def fetch_history(stock, start, end):\n disable_stdout()\n with timer(logtime(\"ts.get_h_data('%s', autype=None, start='%s', end='%s', drop_factor=False)\" % (stock, start, end))):\n df = await wait_concurrent(event_loop, proc_pool, ts.get_h_data, stock, autype=None, start=start, end=end, drop_factor=False, pause=0.05)\n if df is None:\n logging.debug(\"no history data for stock: ts.get_h_data('%s', autype=None, start='%s', end='%s')\" % (stock, start, end))\n return\n df['stock'] = stock\n ans = df.reset_index()\n return history_buffer.proc_data(ans)", "def check_history(progress_controller=None):\n if progress_controller is None:\n progress_controller = ProgressControllerBase()\n excluded_types = [\"mesh\", \"shadingEngine\", \"groupId\", \"RedshiftProxyMesh\"]\n nodes_with_history = []\n\n # delete any objectSets with name textureEditorIsolateSelectSet for Maya 2018\n pm.delete(pm.ls(\"textureEditorIsolateSelectSet*\"))\n\n # get all shapes\n all_shapes = pm.ls(type=\"mesh\")\n progress_controller.maximum = len(all_shapes)\n for node in all_shapes:\n history_nodes = []\n for h_node in node.listHistory(pdo=1, lv=1):\n if h_node.type() not in excluded_types:\n history_nodes.append(h_node)\n\n if len(history_nodes) > 0:\n nodes_with_history.append(node)\n progress_controller.increment()\n\n progress_controller.complete()\n if len(nodes_with_history):\n pm.select(nodes_with_history)\n # there is history\n raise PublishError(\n \"There is history on:\\n\\n\"\n \"%s\"\n \"\\n\\n\"\n \"there should be no \"\n \"history in Model versions\"\n % \"\\n\".join(map(lambda x: x.name(), nodes_with_history[:MAX_NODE_DISPLAY]))\n )", "def test_history_SinglePlayer_HasHistory(test_name):\n\n env = build_test_env(\n test_name,\n \"tests/gdy/test_step_SinglePlayer_SingleActionType.yaml\"\n )\n\n obs, reward, done, info = env.step(1)\n\n expected_history = [{\n 'PlayerId': 1,\n 'ActionName': 'move',\n 'Tick': 0,\n 'Rewards': {},\n 'Delay': 0,\n 'SourceObjectName': 'avatar',\n 'DestinationObjectName': '_empty',\n 'SourceObjectPlayerId': 1,\n 'DestinationObjectPlayerId': 0,\n 'SourceLocation': [2.0, 3.0],\n 'DestinationLocation': [1.0, 3.0]\n }]\n\n assert info['History'] == expected_history", "def history():\n backup_history()\n yield\n reset_history()", "def get_history(self):\n return self.__history[:]", "def query_history(self, req: HistoryRequest) -> List[BarData]:\n history = []\n\n start_time = generate_datetime3(req.start)\n end_time = generate_datetime3(req.end)\n\n mt5_req = {\n \"type\": FUNCTION_QUERYHISTORY,\n \"symbol\": req.symbol.replace('-', '.'),\n \"interval\": INTERVAL_VT2MT[req.interval],\n \"start_time\": start_time,\n \"end_time\": end_time,\n }\n packet = self.client.send_request(mt5_req)\n\n if packet[\"result\"] == -1:\n self.write_log(\"获取历史数据失败\")\n else:\n for d in packet[\"data\"]:\n bar = BarData(\n symbol=req.symbol.replace('.', '-'),\n exchange=Exchange.OTC,\n datetime=generate_datetime2(d[\"time\"]),\n interval=req.interval,\n volume=d[\"real_volume\"],\n open_price=d[\"open\"],\n high_price=d[\"high\"],\n low_price=d[\"low\"],\n close_price=d[\"close\"],\n gateway_name=self.gateway_name\n )\n history.append(bar)\n\n data = packet[\"data\"]\n begin = generate_datetime2(data[0][\"time\"])\n end = generate_datetime2(data[-1][\"time\"])\n\n msg = f\"获取历史数据成功,{req.symbol.replace('.','-')} - {req.interval.value},{begin} - {end}\"\n self.write_log(msg)\n\n return history", "def test_patient_history_list(self):\n stats_data = self.stats_data\n facility = self.facility\n facility_data = self.facility_data.copy()\n\n facility_data[\"district\"] = self.district\n facility_2 = self.create_facility(district=self.district, name=\"Facility 2\")\n\n FacilityPatientStatsHistory.objects.create(\n facility=facility, entry_date=datetime.date(2020, 4, 1), **stats_data\n )\n FacilityPatientStatsHistory.objects.create(\n facility=facility, entry_date=datetime.date(2020, 4, 2), **stats_data\n )\n FacilityPatientStatsHistory.objects.create(\n facility=facility_2, entry_date=datetime.date(2020, 4, 2), **stats_data\n )\n\n response = self.client.get(self.get_url())\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertDictEqual(\n response.json(),\n {\n \"count\": 2,\n \"next\": None,\n \"previous\": None,\n \"results\": [\n {\n \"id\": mock_equal,\n \"facility\": str(facility.external_id),\n \"entry_date\": datetime.date(2020, 4, 2).strftime(\"%Y-%m-%d\"),\n \"created_date\": mock_equal,\n \"modified_date\": mock_equal,\n **stats_data,\n },\n {\n \"id\": mock_equal,\n \"facility\": str(facility.external_id),\n \"entry_date\": datetime.date(2020, 4, 1).strftime(\"%Y-%m-%d\"),\n \"created_date\": mock_equal,\n \"modified_date\": mock_equal,\n **stats_data,\n },\n ],\n },\n )", "def new_get_buys_transaction_history(self, cb_account):\n date: datetime = now()\n if cb_account == \"wallet_id_btc\":\n return MockAPIObject(\n data=[{\n \"created_at\": str(date + timedelta(days=-1)),\n \"resource\": \"buy\",\n \"status\": \"completed\",\n \"amount\": {\n \"amount\": 10,\n \"currency\": \"BTC\"\n },\n \"total\": {\n \"amount\": 10,\n \"currency\": \"BTC\"\n },\n \"fees\": [{\n \"amount\": {\n \"amount\": 1,\n \"currency\": \"EUR\"\n }\n }]\n }, {\n \"created_at\": str(date + timedelta(days=1)),\n \"resource\": \"buy\",\n \"status\": \"completed\",\n \"amount\": {\n \"amount\": 5,\n \"currency\": \"BTC\"\n },\n \"total\": {\n \"amount\": 5,\n \"currency\": \"BTC\"\n },\n \"fees\": [{\n \"amount\": {\n \"amount\": 0.5,\n \"currency\": \"EUR\"\n }\n }]\n }])\n else:\n return MockAPIObject()", "def test_historyReport(self):\n history = self.o.getInterface(\"history\")\n history.interactBOL()\n history.interactEOL()\n testLoc = self.o.r.core.spatialGrid[0, 0, 0]\n testAssem = self.o.r.core.childrenByLocator[testLoc]\n fileName = history._getAssemHistoryFileName(testAssem)\n actualFilePath = os.path.join(THIS_DIR, fileName)\n expectedFileName = os.path.join(THIS_DIR, fileName.replace(\".txt\", \"-ref.txt\"))\n # copy from fast path so the file is retrievable.\n shutil.move(fileName, os.path.join(THIS_DIR, fileName))\n\n self.compareFilesLineByLine(expectedFileName, actualFilePath)\n\n # test that detailAssemblyNames() is working\n self.assertEqual(len(history.detailAssemblyNames), 1)\n history.addAllFuelAssems()\n self.assertEqual(len(history.detailAssemblyNames), 51)", "def _grab_history(self):\n self.data['history_lines'] = []\n self.data['history_file'] = None\n self.data['history_encoding'] = None\n self.data['headings'] = []\n self.data['history_last_release'] = ''\n self.data['history_insert_line_here'] = 0\n default_location = None\n config = self.setup_cfg.config\n if config and config.has_option('zest.releaser', 'history_file'):\n default_location = config.get('zest.releaser', 'history_file')\n history_file = self.vcs.history_file(location=default_location)\n self.data['history_file'] = history_file\n if not history_file:\n logger.warn(\"No history file found\")\n return\n logger.debug(\"Checking %s\", history_file)\n history_lines, history_encoding = read_text_file(history_file)\n history_lines = history_lines.split('\\n')\n headings = utils.extract_headings_from_history(history_lines)\n if not headings:\n logger.warn(\"No detectable version heading in the history \"\n \"file %s\", history_file)\n return\n self.data['history_lines'] = history_lines\n self.data['history_encoding'] = history_encoding\n self.data['headings'] = headings\n\n # Grab last header.\n start = headings[0]['line']\n if len(headings) > 1:\n # Include the next header plus underline, as this is nice\n # to show in the history_last_release.\n end = headings[1]['line'] + 2\n else:\n end = len(history_lines)\n history_last_release = '\\n'.join(history_lines[start:end])\n self.data['history_last_release'] = history_last_release\n\n # Add line number where an extra changelog entry can be inserted. Can\n # be useful for entry points. 'start' is the header, +1 is the\n # underline, +2 is probably an empty line, so then we should take +3.\n # Or rather: the first non-empty line.\n insert = start + 2\n while insert < end:\n if history_lines[insert].strip():\n break\n insert += 1\n self.data['history_insert_line_here'] = insert", "def test_Sms_history_page(self):\n self.client.login(username='arch', password='admin')\n response = self.client.get(reverse('echo:sms_history'))\n self.assertEqual(response.status_code, 200)", "def get_history(hdr):\n return hdr['HISTORY']", "def test_simple_history_deserialization(self):\n data = \"\"\"\n {\n \"resultType\" : \"history\",\n \"version\" : \"0.1alpha\",\n \"uploadKeys\" : [\n { \"name\" : \"emk\", \"key\" : \"abc\" },\n { \"name\" : \"ec\" , \"key\" : \"def\" }\n ],\n \"generator\" : { \"name\" : \"Yapeal\", \"version\" : \"11.335.1737\" },\n \"currentTime\" : \"2011-10-22T15:46:00+00:00\",\n \"columns\" : [\"date\",\"orders\",\"quantity\",\"low\",\"high\",\"average\"],\n \"rowsets\" : [\n {\n \"generatedAt\" : \"2011-10-22T15:42:00+00:00\",\n \"regionID\" : 10000065,\n \"typeID\" : 11134,\n \"rows\" : [\n [\"2011-12-03T00:00:00+00:00\",40,40,1999,499999.99,35223.50],\n [\"2011-12-02T00:00:00+00:00\",83,252,9999,11550,11550]\n ]\n }\n ]\n }\n \"\"\"\n decoded_list = unified.parse_from_json(data)\n self.assertIsInstance(decoded_list, MarketHistoryList)\n self.assertEqual(len(decoded_list), 2)", "def test_history_MultiplePlayer_History(test_name):\n env = build_test_env(\n test_name,\n \"tests/gdy/test_step_MultiPlayer_SingleActionType.yaml\"\n )\n\n obs, reward, done, info = env.step([\n 1,\n 3,\n ])\n\n expected_history = [\n {'PlayerId': 1, 'ActionName': 'move', 'Tick': 0, 'Rewards': {}, 'Delay': 0, 'SourceObjectName': 'avatar',\n 'DestinationObjectName': '_empty', 'SourceObjectPlayerId': 1, 'DestinationObjectPlayerId': 0,\n 'SourceLocation': [1.0, 3.0], 'DestinationLocation': [0.0, 3.0]},\n {'PlayerId': 2, 'ActionName': 'move', 'Tick': 0, 'Rewards': {}, 'Delay': 0, 'SourceObjectName': 'avatar',\n 'DestinationObjectName': '_empty', 'SourceObjectPlayerId': 2, 'DestinationObjectPlayerId': 0,\n 'SourceLocation': [3.0, 3.0], 'DestinationLocation': [4.0, 3.0]}]\n\n assert info['History'] == expected_history", "def history(self):\n return self.info['history']", "def history(self, key, _from='-', _to='+', _desc=True):\n return [self.klass.from_json(_object)\n for _object in self.storage.history(key, _from, _to, _desc)]", "def query_history_events(self) -> None:\n for exchange in self.iterate_exchanges():\n exchange.query_history_events()", "def history(self, name, _from=0, to=None):\n params = {}\n if _from is not None:\n params[\"from\"] = str(_from)\n if to is not None:\n params[\"to\"] = str(to)\n with self.get(\n create_url(\"/v3/schedule/history/{name}\", name=name), params\n ) as res:\n code, body = res.status, res.read()\n if code != 200:\n self.raise_error(\"List history failed\", res, body)\n js = self.checked_json(body, [\"history\"])\n\n return [history_to_tuple(m) for m in js[\"history\"]]", "def test_workflows_change_stream_get(self):\n pass", "def test_updatePlayerbHist_singleaction(self):\n self.assertEqual(self.player.bHist[1], [cardutils.BETSTRING_DICT['CALL']])", "def history_testnet(btc_address):\n history = []\n response = json.loads(make_request('http://tbtc.blockr.io/api/v1/address/txs/' + btc_address))\n if response.get('status') == 'success':\n data = response.get('data')\n txs = data.get('txs')\n\n for tx in reversed(txs):\n history.append(get_tx_info(tx.get('tx')))\n\n return history", "def get_trade_history(self, pair, limit=20):\r\n method = self.private_endpoints['trade_history']['method']\r\n url = self.base_url + self.private_endpoints['trade_history']['url'].format(pair=pair, limit=str(limit))\r\n req = requests.request(method, url, headers=self.get_auth_headers())\r\n res = req.json()\r\n\r\n if res['success'] == True:\r\n return res[\"result\"]\r\n else:\r\n return res", "def get_cache_history_items(self):\n #gdb.execute(\"p cache->history_items\")\n history_items = ZabbixHashset(gdb.parse_and_eval ('cache->history_items'))\n self.data = history_items.parse()", "def get_history(self, symbol, limit=1000, offset=0):\r\n return self.api.get_history(self.account, symbol, limit, offset)", "def history(self):\n return self.board.history", "def orders_history(self): \n return(self._d_orders['history'])", "def get_room_history(self, room):\n pass", "def history(self):\n return _TestA_swig.cleanslate_sptr_history(self)", "def slot_fullhistory(self, dummy_sender, data):\r\n (history) = data\r\n\r\n if not len(history):\r\n self.debug(\"### history download was empty\")\r\n return\r\n\r\n def get_time_round(date):\r\n \"\"\"round timestamp to current candle timeframe\"\"\"\r\n return int(date / self.timeframe) * self.timeframe\r\n\r\n #remove existing recent candle(s) if any, we will create them fresh\r\n date_begin = get_time_round(int(history[0][\"date\"]))\r\n while len(self.candles) and self.candles[0].tim >= date_begin:\r\n self.candles.pop(0)\r\n\r\n new_candle = OHLCV(0, 0, 0, 0, 0, 0) #this is a dummy, not actually inserted\r\n count_added = 0\r\n for trade in history:\r\n date = int(trade[\"date\"])\r\n price = int(trade[\"price_int\"])\r\n volume = int(trade[\"amount_int\"])\r\n time_round = get_time_round(date)\r\n if time_round > new_candle.tim:\r\n if new_candle.tim > 0:\r\n self._add_candle(new_candle)\r\n count_added += 1\r\n new_candle = OHLCV(\r\n time_round, price, price, price, price, volume)\r\n new_candle.update(price, volume)\r\n\r\n # insert current (incomplete) candle\r\n self._add_candle(new_candle)\r\n count_added += 1\r\n self.debug(\"### got %d updated candle(s)\" % count_added)\r\n self.ready_history = True\r\n self.signal_fullhistory_processed(self, None)\r\n self.signal_changed(self, (self.length()))", "def query_test_history(self,\n response,\n test_id,\n parent_step_name=None,\n step_iteration=1):\n parent_step_prefix = ''\n if parent_step_name:\n parent_step_prefix = ('%s.' % parent_step_name)\n step_suffix = ''\n if step_iteration > 1:\n step_suffix = ' (%d)' % step_iteration\n step_name = ('%sTest history query rpc call for %s%s' %\n (parent_step_prefix, test_id, step_suffix))\n\n return self.step_data(\n step_name,\n self.m.json.output_stream(json_format.MessageToDict(response)))", "def test_update_trx_coinbase_transaction_history(monkeypatch: MonkeyPatch):\n user = mixer.blend(\"auth.User\")\n account: Account = mixer.blend(\n \"accounts.Account\", owner=user, service_type=\"coinbase\", api_key=\"123\", api_secret=\"456\")\n\n date: datetime = now()\n\n monkeypatch.setattr(cryptocompare, \"get_historical_price\",\n new_get_historical_price)\n monkeypatch.setattr(coinbase.wallet.client.Client, \"get_accounts\",\n new_get_accounts)\n monkeypatch.setattr(coinbase.wallet.client.Client, \"get_transactions\",\n new_get_transactions)\n monkeypatch.setattr(coinbase.wallet.client.Client, \"get_buys\",\n new_get_buys_transaction_history)\n monkeypatch.setattr(coinbase.wallet.client.Client, \"get_sells\",\n lambda self, cb_account: MockAPIObject())\n\n mixer.blend(\n \"transactions.TransactionUpdateHistoryEntry\",\n date=date,\n account=account,\n fetched_transactions=3)\n\n update_coinbase_trx(account)\n transaction = Transaction.objects.filter(target_peer=account)\n assert transaction.count(\n ) == 1, \"Should not import transactions older than last update time\"", "def GetBankExecHistory():\n redispool = redis.ConnectionPool(host=RedisIP,port=RedisPort,db=RedisDB)\n redata = redis.Redis(connection_pool=redispool)\n TmpDict=redata.lrange(\"BankExecHisInfo\",0,-1)\n return request.query.jsoncallback + \"(\" + ujson.encode(TmpDict) + \")\"", "def _hook_syncs(self, cluster, level):\n syncs = []\n for i in cluster.ispace[:level]:\n for s in cluster.syncs.get(i.dim, ()):\n if isinstance(s, (FetchUpdate, PrefetchUpdate)):\n syncs.append(s)\n return tuple(syncs)", "def test_calculator_add(clear_history):\n assert Calculator.add_number(1, 2) == 3\n assert Calculator.add_number(2, 2) == 4\n assert Calculator.add_number(3, 2) == 5\n assert Calculator.add_number(4, 2) == 6\n assert Calculator.history_count() == 4\n assert Calculator.get_result_of_last_calculation_added_to_history() == 6\n pprint.pprint(Calculator.history)", "def history(self) -> List[SnapshotLogEntry]:\n return self.metadata.snapshot_log", "def get_diffs(history):\n\n # First get all possible representations\n mgr = plugins_get_mgr() \n keys = mgr.search('representation')['representation']\n representations = [mgr.get_by_key('representation', k) for k in keys]\n\n for i in range(len(history)):\n if i+1 > len(history) - 1:\n continue\n\n prev = history[i]\n curr = history[i+1]\n\n #print(prev['subject'], \"==>\", curr['subject'])\n #print(curr['changes'])\n for c in curr['changes']:\n \n path = c['path']\n\n # Skip the metadata file\n if c['path'].endswith('datapackage.json'): \n continue \n\n # Find a handler for this kind of file...\n handler = None \n for r in representations: \n if r.can_process(path): \n handler = r \n break \n \n if handler is None: \n continue \n\n # print(path, \"being handled by\", handler)\n\n v1_hex = prev['commit']\n v2_hex = curr['commit']\n\n temp1 = tempfile.mkdtemp(prefix=\"dgit-diff-\") \n \n try: \n for h in [v1_hex, v2_hex]: \n filename = '{}/{}/checkout.tar'.format(temp1, h)\n try:\n os.makedirs(os.path.dirname(filename))\n except:\n pass \n extractcmd = ['git', 'archive', '-o', filename, h, path]\n output = run(extractcmd)\n if 'fatal' in output: \n raise Exception(\"File not present in commit\") \n with cd(os.path.dirname(filename)): \n cmd = ['tar', 'xvf', 'checkout.tar']\n output = run(cmd) \n if 'fatal' in output: \n print(\"Cleaning up - fatal 1\", temp1)\n shutil.rmtree(temp1)\n continue \n\n # Check to make sure that \n path1 = os.path.join(temp1, v1_hex, path) \n path2 = os.path.join(temp1, v2_hex, path) \n if not os.path.exists(path1) or not os.path.exists(path2): \n # print(\"One of the two output files is missing\") \n shutil.rmtree(temp1)\n continue \n\n #print(path1, path2) \n\n # Now call the handler\n diff = handler.get_diff(path1, path2)\n\n # print(\"Inserting diff\", diff)\n c['diff'] = diff\n\n except Exception as e: \n #traceback.print_exc() \n #print(\"Cleaning up - Exception \", temp1)\n shutil.rmtree(temp1)", "def test_search_history(monkeypatch, records):\n found = [records[1], records[2], records[7]]\n instrument_mock = mock.MagicMock()\n instrument_mock.scan.return_value = found\n monkeypatch.setattr(\"app.search.InstrumentModel\", instrument_mock)\n\n response = search.history({\"body\": ujson.dumps({\"term\": \"Test Name\"})}, {})\n instrument_mock.history.contains.assert_called_with(\"Test Name\")\n\n assert response[\"statusCode\"] == 200\n assert _result_id_set(response[\"body\"]) == _result_id_set(found)", "def get_calls_history(self, plugin):\n if plugin in self._plugin2calls:\n return list(self._plugin2calls[plugin])\n return []", "def test_last_updated_sync(self):\n last_updated_at = timezone.now()\n videos = [\n make_video(title='test title', media_id='1'),\n ]\n channels = [\n make_channel(title='test channel', media_ids=['1'], collection_id='2',\n last_updated_at=last_updated_at)\n ]\n set_resources_and_sync(videos, channels)\n c = mpmodels.Channel.objects.filter(sms__id=2).first()\n self.assertIsNotNone(c)\n self.assertEqual(c.sms.last_updated_at, last_updated_at)", "def get_verification_history(db_conn, transaction_id):\n history = []\n\n # Get the first entry (the predicted verifier)\n s = select([transaction_tbl.c.verifier_predicted_result,\n transaction_tbl.c.verifier_predicted_date,\n user_tbl.c.name.label('verifier_predicted_user_name')]) \\\n .select_from(transaction_tbl.join(user_tbl,\n onclause=transaction_tbl.c.verifier_predicted_user_id == user_tbl.c.id)) \\\n .where(transaction_tbl.c.id == transaction_id)\n rs = db_conn.execute(s)\n assert rs.rowcount == 1\n row = rs.fetchone()\n history.append(get_verification_history_first_entry(row.verifier_predicted_date,\n row.verifier_predicted_user_name,\n row.verifier_predicted_result))\n\n # Get the rest of the entries (the verifications that have come so far)\n s = select([verification_attempt_tbl.c.attempt_replied_to,\n verification_attempt_tbl.c.did_verify,\n verification_attempt_tbl.c.comment,\n user_tbl.c.name.label('user_name')]) \\\n .select_from(verification_attempt_tbl.join(user_tbl)) \\\n .where(verification_attempt_tbl.c.transaction_id == transaction_id) \\\n .order_by(verification_attempt_tbl.c.attempt_replied_to)\n existing_verifications = db_conn.execute(s).fetchall()\n for i, r in enumerate(existing_verifications[:-1]): # We don't want the last verification attempt -- that's the one we're creating now\n next_verifier = existing_verifications[i + 1].user_name\n history.append({'date': r.attempt_replied_to,\n 'who': r.user_name,\n 'action': 'Forward to %s' % next_verifier if not r.did_verify else '???',\n 'comment': r.comment})\n return history", "def do_gethistory(self,args):\n #Very rough. pretty print it\n history=bitstamp.get_usertransactions()\n ppdict(history)", "def test_filter_sync_2(self):\n res = self.machine_template_1._filter_sync_values(self.sync_vals)\n self.assertEqual(\n res, self.sync_vals)", "def get_history_queue():\n response = houston.get(\"/history/queue\")\n houston.raise_for_status_with_json(response)\n return response.json()", "def testNewHistoryFile(self):\n history_file = os.path.join(self.base_path, \"new_places.sqlite\")\n with io.open(history_file, mode=\"rb\") as history_filedesc:\n history = firefox3_history.Firefox3History()\n entries = [x for x in history.Parse(history_filedesc)]\n\n self.assertLen(entries, 3)\n self.assertEqual(entries[1][3],\n \"Slashdot: News for nerds, stuff that matters\")\n self.assertEqual(entries[2][0], 1342526323608384)\n self.assertEqual(entries[2][1], \"FIREFOX3_VISIT\")\n self.assertEqual(\n entries[2][2],\n \"https://blog.duosecurity.com/2012/07/exploit-mitigations\"\n \"-in-android-jelly-bean-4-1/\")\n\n # Check that our results are properly time ordered\n time_results = [x[0] for x in entries]\n self.assertEqual(time_results, sorted(time_results))", "def get_history(self, taxlot_view):\n history = []\n\n def record_dict(log):\n filename = None if not log.import_filename else path.basename(log.import_filename)\n if filename:\n # Attempt to remove NamedTemporaryFile suffix\n name, ext = path.splitext(filename)\n pattern = re.compile('(.*?)(_[a-zA-Z0-9]{7})$')\n match = pattern.match(name)\n if match:\n filename = match.groups()[0] + ext\n return {\n 'state': TaxLotStateSerializer(log.state).data,\n 'date_edited': convert_to_js_timestamp(log.created),\n 'source': log.get_record_type_display(),\n 'filename': filename,\n # 'changed_fields': json.loads(log.description) if log.record_type == AUDIT_USER_EDIT else None\n }\n\n log = TaxLotAuditLog.objects.select_related('state', 'parent1', 'parent2').filter(\n state_id=taxlot_view.state_id\n ).order_by('-id').first()\n master = {\n 'state': TaxLotStateSerializer(log.state).data,\n 'date_edited': convert_to_js_timestamp(log.created),\n }\n\n # Traverse parents and add to history\n if log.name in ['Manual Match', 'System Match', 'Merge current state in migration']:\n done_searching = False\n while not done_searching:\n if (log.parent1_id is None and log.parent2_id is None) or log.name == 'Manual Edit':\n done_searching = True\n elif log.name == 'Merge current state in migration':\n record = record_dict(log.parent1)\n history.append(record)\n if log.parent1.name == 'Import Creation':\n done_searching = True\n else:\n tree = log.parent1\n log = tree\n else:\n tree = None\n if log.parent2:\n if log.parent2.name in ['Import Creation', 'Manual Edit']:\n record = record_dict(log.parent2)\n history.append(record)\n elif log.parent2.name == 'System Match' and log.parent2.parent1.name == 'Import Creation' and \\\n log.parent2.parent2.name == 'Import Creation':\n # Handle case where an import file matches within itself, and proceeds to match with\n # existing records\n record = record_dict(log.parent2.parent2)\n history.append(record)\n record = record_dict(log.parent2.parent1)\n history.append(record)\n else:\n tree = log.parent2\n if log.parent1.name in ['Import Creation', 'Manual Edit']:\n record = record_dict(log.parent1)\n history.append(record)\n else:\n tree = log.parent1\n\n if not tree:\n done_searching = True\n else:\n log = tree\n elif log.name == 'Manual Edit':\n record = record_dict(log.parent1)\n history.append(record)\n elif log.name == 'Import Creation':\n record = record_dict(log)\n history.append(record)\n\n return history, master", "def history(self):\n return _TestA_swig.my_qpsk_demod_cb_sptr_history(self)", "def svn_fs_history_prev(*args):\r\n return _fs.svn_fs_history_prev(*args)", "def test_fill_quote_history(self):\n ticker = \"ibm\"\n name = \"IBM\"\n data = {'name': name, 'ticker': ticker}\n request = self.client.post('/stocks/addstock/', data, follow=True, secure=True)\n stock_id = request.content\n data = DailyStockQuote.objects.filter(stock_id=stock_id)\n stock_data = Stock.objects.filter(id=stock_id)\n self.assertGreater(len(data), 0)\n self.assertEqual(len(stock_data), 1)", "def save(self, *args, **kwargs):\n changesDict = self.diff\n\n # Let's construct the history record entry and add it to the dict.\n # Each record is further going to be dictionary stored in the DB as a string.\n\n # We need to keep track of which user updated the data. Check if the user has\n # assigned a callable to fetch it. Else we will initialize it to be empty.\n who = None\n if self.history__get_user_hook is not None:\n if callable(self.history__get_user_hook):\n who = self.history__get_user_hook()\n else:\n logger.error(\"history_get_user initialized with a non-callable\")\n\n # only add the 'who' field if we have a string value.\n if who is not None and isinstance(who,str):\n entry = {\n \"who\" : str(who),\n \"when\" : timezone.now().isoformat(),\n \"what\" : changesDict,\n }\n else:\n entry = {\n \"when\" : timezone.now().isoformat(),\n \"what\" : changesDict,\n }\n\n # prepend this entry into the list\n if isinstance(self.history, list):\n self.history.insert(0,entry)\n if isinstance(self.history__max_entry_count, int):\n if self.history__max_entry_count >= 0:\n # Value has been specified. Trim the list. Value of 0 deletes the whole list.\n del self.history[self.history__max_entry_count: ]\n logger.debug('Added the following record to history:\\n %s'%(str(entry),))\n else:\n logger.error(\"History field not saved initially as list!!\")\n\n # Let's not hold up the save. Save and reset the initial state.\n super(ChangeHistoryMixin, self).save(*args, **kwargs)\n self.__initial = self._dict", "def test_hist_flush():\n FNAME = 'xonsh-SESSIONID.json'\n FNAME += '.flush'\n hist = History(filename=FNAME, here='yup', **HIST_TEST_KWARGS)\n hf = hist.flush()\n yield assert_is_none, hf\n with mock_xonsh_env({'HISTCONTROL': set()}):\n hist.append({'joco': 'still alive'})\n hf = hist.flush()\n yield assert_is_not_none, hf\n while hf.is_alive():\n pass\n with LazyJSON(FNAME) as lj:\n obs = lj['cmds'][0]['joco']\n yield assert_equal, 'still alive', obs\n os.remove(FNAME)", "def _get_history_data(self) -> List[Dict[str, Any]]:\n try:\n with open(self._path.as_posix(), \"r\", encoding=\"utf-8\") as history_file:\n data = json.load(history_file)\n data.append(History._get_empty_session_object())\n return data\n except FileNotFoundError:\n self._path.touch()\n return History._get_empty_json_object()\n except json.decoder.JSONDecodeError:\n return History._get_empty_json_object()", "def get_data_history(\n self, \n rec_id=None, \n ver_id=None, \n output='df'\n ):\n if rec_id is None:\n raise Exception('Please enter a valid data clone RecId.')\n else:\n if ver_id is None:\n suffix = f\"data/{rec_id}/versions\"\n data = self.session.api_call(suffix=suffix)\n if output == 'df':\n df = pd.DataFrame.from_dict(data.json().get('versions'))\n df[\"data_rec_id\"] = rec_id\n return df\n elif output == 'dict':\n data_dict = data.json().get('versions')\n for version in data_dict:\n version.update({\"data_rec_id\": rec_id})\n return data_dict\n else:\n suffix = f\"data/{rec_id}/v/{ver_id}\"\n data = self.retrieve_paginated_data(suffix=suffix)\n if output == 'df':\n return pd.DataFrame.from_dict(data)\n elif output == 'dict':\n return data", "def test_get_measurement_history(self):\n device = DeviceFactory(node=Node.objects.first(), external_id='123', type__code=SecureDeviceType.SRT321,\n device_param__type__code=SecureDeviceParameterType.MEASURED_TEMPERATURE)\n d_id_1 = device.external_id\n\n now_loc = datetime.datetime.now(bst)\n ts_loc = now_loc - datetime.timedelta(seconds=30)\n ts_str = ts_loc.strftime('%Y-%m-%dT%H:%M:%S')\n\n data = self.create_secure_server_push_data(d_id_1, ts_str)\n\n SecureClient.process_push_data(data)\n time.sleep(.5)\n\n # get newer timestamp\n ts_str = now_loc.strftime('%Y-%m-%dT%H:%M:%S')\n data = self.create_secure_server_push_data(d_id_1, ts_str, value=\"23.5\")\n\n SecureClient.process_push_data(data)\n\n token = Token.objects.get(user__username=email)\n device_param = device.parameters.first()\n client = APIClient()\n client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)\n url = reverse('api:device_measurements', kwargs={'device_parameter_id': device_param.id})\n\n time.sleep(.5)\n\n response = client.get(url, format='json')\n\n self.assertTrue(response.status_code == 200)\n self.assertTrue(len(response.data) >= 2)", "def get_vouchers_history(self, vid_encoded=None, vid=None, action=None,\n uid_from=None, uid_to=None, gid=None,\n valid_after=None, valid_before=None,\n create_after=None, create_before=None,\n last=None, first=None):\n resource = self.kvpath(\n 'vouchers/history',\n ('ident', vid_encoded),\n **{\n 'vid': ('int', vid),\n 'action': ('ident', action),\n 'from': ('int', uid_from),\n 'to': ('int', uid_to),\n 'gid': ('ident', gid),\n 'valid_after': ('isobasic', absdatetime(valid_after)),\n 'valid_before': ('isobasic', absdatetime(valid_before)),\n 'create_after': ('isobasic', absdatetime(create_after)),\n 'create_before': ('isobasic', absdatetime(create_before)),\n 'first': ('int', first),\n 'last': ('int', last)\n }\n )\n return self.request('get', resource)", "def getJobHistory(self,jobname):\n\t\tpass", "def _pre_sync(self):", "def history(self):\n alembic.command.history(self.alembic_config(), verbose=True)", "def check_result_of_history_creation(\n start_ts: Timestamp,\n end_ts: Timestamp,\n trade_history: List[Union[Trade, MarginPosition]],\n loan_history: Dict,\n asset_movements: List[AssetMovement],\n eth_transactions: List[EthereumTransaction],\n) -> Dict[str, Any]:\n assert start_ts == 0, 'should be same as given to process_history'\n assert end_ts == TEST_END_TS, 'should be same as given to process_history'\n\n # TODO: Add more assertions/check for each action\n # OR instead do it in tests for conversion of actions(trades, loans, deposits e.t.c.)\n # from exchange to our format for each exchange\n assert len(trade_history) == 11\n assert trade_history[0].location == Location.KRAKEN\n assert trade_history[0].pair == 'ETH_EUR'\n assert trade_history[0].trade_type == TradeType.BUY\n assert trade_history[1].location == Location.KRAKEN\n assert trade_history[1].pair == 'BTC_EUR'\n assert trade_history[1].trade_type == TradeType.BUY\n assert trade_history[2].location == Location.BITTREX\n assert trade_history[2].pair == 'LTC_BTC'\n assert trade_history[2].trade_type == TradeType.BUY\n assert trade_history[3].location == Location.BITTREX\n assert trade_history[3].pair == 'LTC_ETH'\n assert trade_history[3].trade_type == TradeType.SELL\n assert isinstance(trade_history[4], MarginPosition)\n assert trade_history[4].profit_loss == FVal('0.05')\n assert trade_history[5].location == Location.BINANCE\n assert trade_history[5].pair == 'ETH_BTC'\n assert trade_history[5].trade_type == TradeType.BUY\n assert trade_history[6].location == Location.BINANCE\n assert trade_history[6].pair == 'RDN_ETH'\n assert trade_history[6].trade_type == TradeType.SELL\n assert trade_history[7].location == Location.POLONIEX\n assert trade_history[7].pair == 'ETH_BTC'\n assert trade_history[7].trade_type == TradeType.SELL\n assert trade_history[8].location == Location.POLONIEX\n assert trade_history[8].pair == 'ETH_BTC'\n assert trade_history[8].trade_type == TradeType.BUY\n assert trade_history[9].location == Location.POLONIEX\n assert trade_history[9].pair == 'XMR_ETH'\n assert trade_history[9].trade_type == TradeType.BUY\n # TODO: investigate why this new bitmex position popped up\n assert isinstance(trade_history[10], MarginPosition)\n assert trade_history[10].profit_loss == FVal('5E-9')\n\n assert len(loan_history) == 2\n assert loan_history[0].currency == A_ETH\n assert loan_history[0].earned == AssetAmount(FVal('0.00000001'))\n assert loan_history[1].currency == A_BTC\n assert loan_history[1].earned == AssetAmount(FVal('0.00000005'))\n\n assert len(asset_movements) == 11\n assert asset_movements[0].location == Location.KRAKEN\n assert asset_movements[0].category == AssetMovementCategory.DEPOSIT\n assert asset_movements[0].asset == A_BTC\n assert asset_movements[1].location == Location.KRAKEN\n assert asset_movements[1].category == AssetMovementCategory.DEPOSIT\n assert asset_movements[1].asset == A_ETH\n assert asset_movements[2].location == Location.KRAKEN\n assert asset_movements[2].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[2].asset == A_BTC\n assert asset_movements[3].location == Location.KRAKEN\n assert asset_movements[3].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[3].asset == A_ETH\n assert asset_movements[4].location == Location.POLONIEX\n assert asset_movements[4].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[4].asset == A_BTC\n assert asset_movements[5].location == Location.POLONIEX\n assert asset_movements[5].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[5].asset == A_ETH\n assert asset_movements[6].location == Location.POLONIEX\n assert asset_movements[6].category == AssetMovementCategory.DEPOSIT\n assert asset_movements[6].asset == A_BTC\n assert asset_movements[7].location == Location.POLONIEX\n assert asset_movements[7].category == AssetMovementCategory.DEPOSIT\n assert asset_movements[7].asset == A_ETH\n assert asset_movements[8].location == Location.BITMEX\n assert asset_movements[8].category == AssetMovementCategory.DEPOSIT\n assert asset_movements[8].asset == A_BTC\n assert asset_movements[9].location == Location.BITMEX\n assert asset_movements[9].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[9].asset == A_BTC\n # TODO: investigate why this new bitmex withdrawal popped up\n assert asset_movements[10].location == Location.BITMEX\n assert asset_movements[10].category == AssetMovementCategory.WITHDRAWAL\n assert asset_movements[10].asset == A_BTC\n\n # The history creation for these is not yet tested\n assert len(eth_transactions) == 3\n assert eth_transactions[0].block_number == 54092\n assert eth_transactions[0].tx_hash == hexstring_to_bytes(TX_HASH_STR1)\n assert eth_transactions[0].from_address == ETH_ADDRESS1\n assert eth_transactions[0].to_address == ''\n assert eth_transactions[0].value == FVal('11901464239480000000000000')\n assert eth_transactions[0].input_data == MOCK_INPUT_DATA\n assert eth_transactions[1].block_number == 54093\n assert eth_transactions[1].tx_hash == hexstring_to_bytes(TX_HASH_STR2)\n assert eth_transactions[1].from_address == ETH_ADDRESS2\n assert eth_transactions[1].to_address == ETH_ADDRESS1\n assert eth_transactions[1].value == FVal('40000300')\n assert eth_transactions[1].input_data == MOCK_INPUT_DATA\n assert eth_transactions[2].block_number == 54094\n assert eth_transactions[2].tx_hash == hexstring_to_bytes(TX_HASH_STR3)\n assert eth_transactions[2].from_address == ETH_ADDRESS3\n assert eth_transactions[2].to_address == ETH_ADDRESS1\n assert eth_transactions[2].value == FVal('500520300')\n assert eth_transactions[2].input_data == MOCK_INPUT_DATA\n\n return {}", "def history(self, direction=None, limit=None, start=None, end=None, timeout=None):\n params = {}\n\n if direction:\n params['direction'] = '%s' % direction\n if limit:\n params['limit'] = '%d' % limit\n if start:\n params['start'] = self._format_time_param(start)\n if end:\n params['end'] = self._format_time_param(end)\n\n path = '/channels/%s/history' % self.__name\n\n if params:\n path = path + '?' + urlencode(params)\n\n if self.__cipher:\n message_handler = make_encrypted_message_response_handler(self.__cipher)\n else:\n message_handler = message_response_handler\n\n return PaginatedResult.paginated_query(\n self.ably.http,\n path,\n None,\n message_handler\n )", "def load_history(args):\n # history is a json document as a big dictionary\n # the keys are date/timestamps. The result is a list of pairlists\n # only will return the 'relevant' pairs, meaning the most recent\n # RELEVANT_HISTORY ones.\n if os.path.isfile(args.history):\n with open(args.history, 'r') as h:\n metahistory = json.load(h)\n else:\n metahistory = {}\n return prune_history(metahistory, args.relevant_history)", "def test_item_revision_history(testapp, registry):\n objv1 = {\n 'title': \"Testing1\",\n 'description': \"This is testing object 1\",\n }\n objv2 = {\n 'title': \"Testing2\",\n 'description': \"This is testing object 2\",\n }\n objv3 = {\n 'title': \"Testing3\",\n 'description': \"This is testing object 3\",\n }\n item_uuid = testapp.post_json('/embedding-tests', objv1, status=201).json['@graph'][0]['uuid']\n testapp.patch_json('/' + item_uuid, objv2, status=200)\n testapp.patch_json('/' + item_uuid, objv3, status=200)\n\n # now get revision history\n revisions = testapp.get('/' + item_uuid + '/@@revision-history').json['revisions']\n assert len(revisions) == 3 # we made 3 edits\n\n # lets make some more\n testapp.patch_json('/' + item_uuid, objv2, status=200)\n testapp.patch_json('/' + item_uuid, objv1, status=200)\n revisions = testapp.get('/' + item_uuid + '/@@revision-history').json['revisions']\n assert len(revisions) == 5 # now we made 5 edits\n # they should be ordered by sid, recall the patch order above\n for patched_metadata, revision in zip([objv1, objv2, objv3, objv2, objv1], revisions):\n assert revision['title'] == patched_metadata['title']" ]
[ "0.692454", "0.68217295", "0.6417337", "0.63609856", "0.6296297", "0.6270521", "0.6258328", "0.6244751", "0.62230396", "0.6212335", "0.6196629", "0.60348105", "0.602882", "0.5997421", "0.5959013", "0.5924428", "0.5920476", "0.59072894", "0.58979166", "0.58102685", "0.57995194", "0.5798062", "0.5788464", "0.57698065", "0.5739475", "0.5730417", "0.56833726", "0.56603944", "0.5656698", "0.5640358", "0.5632619", "0.56183606", "0.5615836", "0.56007653", "0.5598125", "0.5597287", "0.5591184", "0.558278", "0.5581012", "0.5573048", "0.5550829", "0.55479664", "0.55460894", "0.553827", "0.55239016", "0.5515536", "0.55099416", "0.55055094", "0.54946005", "0.5494142", "0.5486365", "0.54733264", "0.5466992", "0.5462124", "0.5454299", "0.54226124", "0.5421191", "0.5415764", "0.5413379", "0.54073054", "0.54038477", "0.53974974", "0.5391449", "0.53851265", "0.5381741", "0.53765583", "0.5374688", "0.5365957", "0.5360482", "0.5360108", "0.53587407", "0.5354812", "0.5342595", "0.5340221", "0.5336105", "0.5334696", "0.5317889", "0.53170747", "0.5313727", "0.53133816", "0.53037685", "0.5302398", "0.52967113", "0.5293736", "0.52929777", "0.5290882", "0.5289664", "0.5288593", "0.52874124", "0.5285397", "0.5279432", "0.527596", "0.52705413", "0.5260882", "0.5257079", "0.5253363", "0.5250125", "0.5246446", "0.5242229", "0.5242027" ]
0.9430264
0
Test case for update_case
def test_update_case(self): pass
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_update_scenario(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_update(self):\n pass", "def test_add_or_update_case(self):\n pass", "def test_update_record(self):\n pass", "def test_update_one(self):\n pass", "def test_update(self):\n # this is tested graphically, as it is UI\n pass", "def test_update9(self):\n pass", "def test_update_state1(self):\n pass", "def test_update_state4(self):\n pass", "def test_update_state2(self):\n pass", "def test_update_occurrence(self):\n pass", "def test_update_rule(self):\n pass", "def test_update_state3(self):\n pass", "def test_update_cases_from_fogbugz(mocked_update, transactional_db, case):\n update_cases_from_fogbugz()\n mocked_update.apply_async.assert_called_once_with(kwargs=dict(case_id=case.id))", "def test_update_state(self):\n pass", "def test_user_update_request(self):\n pass", "def test_update_activity(self):\n pass", "def test_update_cases_to_fogbugz(mocked_update, transactional_db, case):\n update_cases_to_fogbugz()\n mocked_update.apply_async.assert_called_once_with(kwargs=dict(case_id=case.id))", "def test_update_goal(self):\n pass", "def test_update_collection(self):\n pass", "def test_client_update(self):\n pass", "def test_update(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 15}, name=\"Candy\")\n assert n_updated == 1\n items = list(test_store.get_by())\n\n candy.age = 15\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update(self):\n # creating a new sample template\n st = SampleTemplate.create(self.metadata, self.new_study)\n # updating the sample template\n st.update(self.metadata_dict_updated)\n\n # validating values\n exp = self.metadata_dict_updated_dict['Sample1'].values()\n obs = st.get('2.Sample1').values()\n self.assertItemsEqual(obs, exp)\n\n exp = self.metadata_dict_updated_dict['Sample2'].values()\n obs = st.get('2.Sample2').values()\n self.assertItemsEqual(obs, exp)\n\n exp = self.metadata_dict_updated_dict['Sample3'].values()\n obs = st.get('2.Sample3').values()\n self.assertItemsEqual(obs, exp)\n\n # checking errors\n with self.assertRaises(QiitaDBError):\n st.update(self.metadata_dict_updated_sample_error)\n with self.assertRaises(QiitaDBError):\n st.update(self.metadata_dict_updated_column_error)", "def test_update(app):\n\n assert False", "def test_update(self, init_db, audit):\n params = {\n \"resource_type\": \"Category\",\n \"action\": \"Updated\",\n \"activity\": \"changed name\"\n }\n audit.update(**params)\n assert audit.resource_type == params['resource_type']\n assert audit.action == params['action']\n assert audit.activity == params['activity']", "def test_update(client):\n rv = update(client, 'Michael')\n assert json.loads(rv.data.decode())['code'] == 0\n assert json.loads(rv.data.decode())['owner'] == 'Michael'", "def test_update_attribute_data(self):\n pass", "def test_full_update(self):\n self.assertEqual(Product.objects.count(), 2)\n self.assertEqual(self.product_1.name, 'Nike Vapor')\n self.assertEqual(self.product_1.sku, '44444444')\n self.assertEqual(self.product_1.category, self.category_1)\n self.assertEqual(self.product_1.description, 'Some product description')\n self.assertEqual(self.product_1.price, 129.99)\n self.assertEqual(self.product_1.featured, False)\n\n payload = {\n 'name': 'Updated name',\n 'category': self.category_2.id,\n 'sku': '11111111',\n 'description': 'New product description',\n 'price': 39.99,\n 'featured': True\n }\n\n headers = {\n 'HTTP_AUTHORIZATION': 'Token ' + str(self.token_admin)\n }\n response = self.client.put(\n '/api/products/{}/'.format(self.product_1.id),\n data=payload, content_type='application/json', **headers)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response['Content-Type'], 'application/json')\n self.assertEqual(Product.objects.count(), 2)\n\n product = Product.objects.get(id=self.product_1.id)\n self.assertEqual(product.name, 'Updated name')\n self.assertEqual(product.sku, '11111111')\n self.assertEqual(product.category, self.category_2)\n self.assertEqual(product.description, 'New product description')\n self.assertEqual(float(product.price), 39.99)\n self.assertEqual(product.featured, True)", "def test_client_partial_update(self):\n pass", "def test_update_client(self):\n pass", "def test_update_user(self):\n pass", "def test_update_system(self):\n pass", "def test_update(self):\n payload = {\n 'name': 'Pecho inclinado',\n 'description': \"New description\",\n 'muscle_group': \"pecho\"\n }\n response = self.client.put(\n '/exercises/{}/'.format(self.exer1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Exercise.objects.get(id=self.exer1.id).name, payload['name'])", "def test_update(self):\n user = self.custodian_1_user\n user_client = self.custodian_1_client\n urls = [reverse('api:user-detail', kwargs={'pk': user.pk})]\n new_first_name = \"New First Name\"\n data = {\n \"first_name\": new_first_name,\n }\n access = {\n \"forbidden\": [self.anonymous_client, self.readonly_client, self.custodian_2_client],\n \"allowed\": [self.admin_client, user_client]\n }\n\n for client in access['forbidden']:\n for url in urls:\n self.assertIn(\n client.patch(url, data, format='json').status_code,\n [status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]\n )\n\n for client in access['allowed']:\n for url in urls:\n new_first_name += '1'\n data['first_name'] = new_first_name\n self.assertEqual(\n client.patch(url, data, format='json').status_code,\n status.HTTP_200_OK\n )\n user.refresh_from_db()\n self.assertEqual(user.first_name, new_first_name)", "def taco_test_put_update(self):\n body = '{ \"id\": 400, \"name\": \"item4\", \"content\": \"after test update\" }'\n env = self.get_env('PUT', '/item/4', body=body)\n webapi_start(env, lambda status, response_headers: self.assertEqual(status, '204'))", "def test_update_inventory(self):\n pass", "def test_update_values(self):\n m0 = TestUpdateModel.create(count=5, text='monkey')\n\n # independently save over a new count value, unknown to original instance\n m1 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\n m1.count = 6\n m1.save()\n\n # update the text, and call update\n m0.update(text='monkey land')\n self.assertEqual(m0.text, 'monkey land')\n\n # database should reflect both updates\n m2 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\n self.assertEqual(m2.count, m1.count)\n self.assertEqual(m2.text, m0.text)", "def test_beneficiaries_update_that_will_pass(self):\n print('the test function name: {}'.format(sys._getframe().f_code.co_name))\n url = reverse('beneficiary:beneficiary-entity-by-id-update', kwargs={'pk': 1})\n response = self.client.post(url, content_type='application/json')\n return self.assertTrue(response.status_code, 200)", "def test_update_values(self):\r\n m0 = TestUpdateModel.create(count=5, text='monkey')\r\n\r\n # independently save over a new count value, unknown to original instance\r\n m1 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\r\n m1.count = 6\r\n m1.save()\r\n\r\n # update the text, and call update\r\n m0.update(text='monkey land')\r\n self.assertEqual(m0.text, 'monkey land')\r\n\r\n # database should reflect both updates\r\n m2 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\r\n self.assertEqual(m2.count, m1.count)\r\n self.assertEqual(m2.text, m0.text)", "def test_update(self):\n doctor = DoctorFactory.create(id=21)\n data = {'name': 'Joe'}\n self.assertNotEqual(doctor.name, data['name'])\n\n response = self.unath_client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)\n\n response = self.client.put(reverse('doctor-detail', args=[21]), data=data)\n self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)", "def test_update_profile(self):\n url = self.url\n url = url + '{}/'.format(\n self.profile.pk\n )\n response = self.client.patch(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n update_data = {\n 'first_name': 'UpdateTest'\n }\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Caso 1\n update_data['tasks_finalize'] = '14124123'\n update_data['tasks_pending'] = '124123132'\n update_data['tasks_created'] = '12412323'\n\n response = self.client.patch(url, update_data, format='json')\n self.assertEqual(response.data['tasks_finalize'], 0)\n self.assertEqual(response.data['tasks_pending'], 0)\n self.assertEqual(response.data['tasks_created'], 0)", "def test_update_twice_same_result():\n starting_db = create_db(STARTING_DB_INPUT)\n actual: dict = o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n INP\n )\n actual2: dict = o_obj.update_object_in_db(\n starting_db,\n \"some_uid\",\n INP\n )\n assert actual == EXPECTED == actual2", "def test_customer_update(self):\n # first performe create\n id = self._create_model(\"customer\", self.customer_data, [\"name\", \"email\", \"phone\"])\n if id:\n # then performe update\n data = { \n \"name\": \"Changed the name\",\n \"email\": self.customer_data[\"email\"],\n \"phone\": self.customer_data[\"phone\"]\n }\n self._update_model(\"customer\", id, data, [\"name\"])\n self.assertIsNotNone(id)", "def test_partial_update(self):\n self.assertEqual(Product.objects.count(), 2)\n self.assertEqual(self.product_1.name, 'Nike Vapor')\n\n payload = {\n 'name': 'Updated name',\n }\n\n headers = {\n 'HTTP_AUTHORIZATION': 'Token ' + str(self.token_admin)\n }\n response = self.client.patch(\n '/api/products/{}/'.format(self.product_1.id),\n data=payload, content_type='application/json', **headers)\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response['Content-Type'], 'application/json')\n self.assertEqual(Product.objects.count(), 2)\n\n product = Product.objects.get(id=self.product_1.id)\n self.assertEqual(product.name, 'Updated name')", "def test_update_group(self):\n pass", "def test_update_all(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 13})\n assert n_updated == 3\n items = list(test_store.get_by())\n\n andy.age = pandy.age = 13\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update(self):\n query = \"insert into cds values(%s,%s,%s,%s)\"\n values = (156098,\"haha\",\"haha 5\",2)\n self.a.insert(query, values)\n query1 = \"update cds set Quantity=%s where id=%s\"\n values1 = (5, 156098)\n self.a.update(query1, values1)\n query2 = \"select * from cds where id=156609\"", "def test_update_book(self):\n\n delete_books()\n\n book = create_book(\"title one\")[\"book\"]\n\n with test_client.put(\n \"/book/{}/\".format(book[\"id\"]),\n data={\n \"title\": \"title one updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book,\n \"title\": \"title one updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book,\n \"title\": \"title one updated\"\n }\n }\n )\n\n \"\"\"\n clear the table, create several books, update them and read them\n \"\"\"\n\n delete_books()\n\n book_one = create_book(\"title one\")[\"book\"]\n book_two = create_book(\"title two\")[\"book\"]\n\n with test_client.put(\n \"/book/{}/\".format(book_one[\"id\"]),\n data={\n \"title\": \"title one updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book_one,\n \"title\": \"title one updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book_one[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book_one,\n \"title\": \"title one updated\"\n }\n }\n )\n\n with test_client.put(\n \"/book/{}/\".format(book_two[\"id\"]),\n data={\n \"title\": \"title two updated\"\n }\n ) as response:\n\n self.assertEqual(\n json.loads(\n response.get_data(as_text=True)\n ),\n {\n \"status\": \"success\",\n \"book\": {\n **book_two,\n \"title\": \"title two updated\"\n }\n }\n )\n\n self.assertEqual(\n read_book(book_two[\"id\"]),\n {\n \"status\": \"success\",\n \"book\": {\n **book_two,\n \"title\": \"title two updated\"\n }\n }\n )", "async def test_update(self):\n await self.collection.create({'id': 'foo', 'value': 'bar'})\n updated = await self.resource.update('foo', {'value': 'new'})\n self.assertEqual({'id': 'foo', 'value': 'new'}, updated)", "def test_CovidCase_update(self):\n u_Covid = self.update_CovidCase()\n c = CovidCase.objects.get(country_id=\"UP\")\n c.name_en = \"New name\"\n c.save()\n\n self.assertEqual(c.name_en, \"New name\")", "def test_update(self):\n s1 = Square(2)\n\n s1.update(10)\n self.assertEqual(s1.id, 10)\n\n s1.update(10, 4)\n self.assertEqual(s1.size, 4)\n self.assertEqual(s1.width, 4)\n self.assertEqual(s1.height, 4)\n\n s1.update(10, 4, 9)\n self.assertEqual(s1.x, 9)\n\n s1.update(10, 4, 9, 7)\n self.assertEqual(s1.y, 7)", "def test_update(self):\n s1 = Square(10, 10, 10)\n s1.update(89)\n self.assertEqual(89, s1.id)\n s1.update(79, 2)\n self.assertEqual(79, s1.id)\n self.assertEqual(2, s1.size)\n s1.update(9, 1, 3)\n self.assertEqual(9, s1.id)\n self.assertEqual(1, s1.size)\n self.assertEqual(3, s1.x)\n s1.update(89, 2, 4, 5)\n self.assertEqual(89, s1.id)\n self.assertEqual(2, s1.size)\n self.assertEqual(4, s1.x)\n self.assertEqual(5, s1.y)\n\n # update with kwargs\n s4 = Square(5, 5, 5, 5)\n s4.update(id=1)\n self.assertEqual(1, s4.id)\n s4.update(id=4, size=3)\n self.assertEqual(4, s4.id)\n self.assertEqual(3, s4.size)\n s4.update(x=1, y=3, size=4)\n self.assertEqual(1, s4.x)\n self.assertEqual(3, s4.y)\n self.assertEqual(4, s4.size)\n s4.update(id=3, size=9, x=2, y=2)\n self.assertEqual(3, s4.id)\n self.assertEqual(9, s4.size)\n self.assertEqual(2, s4.x)\n self.assertEqual(2, s4.y)", "def test_updateContact(self):\n qs = Contact.objects.all()\n contact = qs[0]\n contact2 = Contact.objects.get(id=contact.id)\n to_update_value = 'address 2'\n contact2.address = to_update_value\n contact2.save()\n # refresh from db\n contact3 = Contact.objects.get(id=contact.id)\n self.assertEqual(contact3.address, to_update_value)", "def test_update(self):\n payload = {\n 'id': self.rout1.id,\n 'name': 'Tuesday routine',\n 'exercises': [self.exer1.id]\n }\n response = self.client.put(\n '/routines/{}/'.format(self.rout1.id), data=payload)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(\n Routine.objects.get(id=self.rout1.id).name, payload['name'])", "def test_update(self):\n\n # Test that instances without application information cannot be started\n incomplete_instance = Instance(self.client, 'foo')\n with self.assertRaises(ValueError):\n incomplete_instance.update()\n\n value = self.instance.update()\n update_instance = self.client.update_instance\n update_instance.assert_called_once_with('nginx', 'nginx', 'latest',\n parameters={\n 'SETTING': 'value'\n },\n options={\n 'storageBucket': 'custom'\n })\n self.assertEqual(value, update_instance.return_value)", "def test_otoroshi_controllers_adminapi_tcp_service_api_controller_update_entity_action(self):\n pass", "def test_update__endtoend__1(search_data, browser):\n # The `searchDataS` fixture defines some persons. When user searches for\n # them all persons are selected by default so he only has to select the\n # `update` search handler to perform a multi-update:\n browser.login('mgr')\n browser.keyword_search('family', apply='Update')\n\n # The user is guided through the update using a wizard.\n # 1st) Choose a field for update:\n assert ['person -- first name', 'person -- last name',\n 'person -- birth date'] == browser.getControl(\n 'field').displayOptions[:3]\n browser.getControl('field').displayValue = ['person -- notes']\n browser.getControl('Next').click()\n\n # 2nd) Enter a new value for the selected field and choose an operation\n # which defaults to 'append':\n assert ['append new value to existing one'] == browser.getControl(\n 'operation').displayValue\n browser.getControl('new value', index=0).value = '\\tfoobar'\n browser.getControl('Next').click()\n\n # 3rd) Check result:\n assert 2 == browser.contents.count('\\tfoobar')\n\n # 4th) Hitting `Complete` persists the change and redirects to the person\n # list, displaying a message:\n browser.getControl('Complete').click()\n assert browser.PERSONS_LIST_URL == browser.url\n assert 'Data successfully updated.' == browser.message\n\n # The fields got changed as promised in the message:\n browser.getLink('Person list').click()\n browser.getLink('Koch').click()\n assert 'father-in-law\\tfoobar' == browser.getControl('notes').value", "def test_control_update(self):\n test_date = datetime.datetime.utcnow().strftime(\"%Y-%m-%d\")\n _, email = parseaddr(settings.SYNC_SERVICE_USER)\n person = factories.PersonFactory(email=email)\n control = factories.ControlFactory(modified_by=person)\n response = self.api.get(control, control.id)\n response.json[\"control\"].pop(\"selfLink\")\n response.json[\"control\"].pop(\"viewLink\")\n control_body = response.json[\"control\"]\n control_body.update({\n \"title\": \"updated_title\",\n \"created_at\": test_date,\n \"updated_at\": test_date,\n \"kind\": \"test kind\",\n \"means\": \"test means\",\n \"verify_frequency\": \"test frequency\",\n \"assertions\": '[\"test assertions\"]',\n \"categories\": '[\"test categories\"]',\n })\n\n response = self.api.put(\n control,\n control.id,\n data=response.json,\n )\n\n expected_assertions = control_body.pop(\"assertions\")\n expected_categories = control_body.pop(\"categories\")\n self.assertEqual(\n response.json[\"control\"].get(\"assertions\"),\n json.loads(expected_assertions),\n )\n self.assertEqual(\n response.json[\"control\"].get(\"categories\"),\n json.loads(expected_categories),\n )\n self.assert_response_fields(response.json[\"control\"], control_body)\n control = all_models.Control.query.get(control.id)\n self.assert_object_fields(control, control_body)\n self.assertEqual(control.assertions, expected_assertions)\n self.assertEqual(control.categories, expected_categories)\n revision = db.session.query(all_models.Revision).filter(\n all_models.Revision.resource_type == \"Control\",\n all_models.Revision.resource_id == control.id,\n all_models.Revision.action == \"modified\",\n all_models.Revision.created_at == control.updated_at,\n all_models.Revision.updated_at == control.updated_at,\n all_models.Revision.modified_by_id == control.modified_by_id,\n ).one()\n self.assertIsNotNone(revision)", "def test_update_connector(self):\n pass", "def test_update(self):\n\n user = CustomUser.objects.get(email=\"[email protected]\")\n user.update(first_name=\"UpdatedName\", second_name=\"UpdatedSecondName\")\n\n self.assertEqual(user.first_name, \"UpdatedName\")\n self.assertEqual(user.second_name, \"UpdatedSecondName\")", "def test_update_team(self):\n pass", "def test_update_multiple(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 14}, age=12)\n assert n_updated == 2\n items = list(test_store.get_by())\n\n andy.age = pandy.age = 14\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update(self):\n c = city.City(name=\"Paris\")\n p1 = city.Citizen(name=\"Peter\")\n c.add(p1, rel=city.hasInhabitant)\n\n with DataspaceSession(URI) as session:\n wrapper = city.CityWrapper(session=session)\n cw = wrapper.add(c)\n session.commit()\n\n p2 = city.Citizen(name=\"Georg\")\n cw.add(p2, rel=city.hasInhabitant)\n cw.name = \"Freiburg\"\n session.commit()\n\n check_state(self, c, p1, p2, db=DB)", "def test_multiple_updates(self):\n response = self.api.put(self.assessment, {\"test_plan\": \"steps\"})\n self.assert200(response)\n\n response = self.api.put(self.assessment, {\"title\": \"new title\"})\n self.assert200(response)\n\n notifs, notif_data = common.get_daily_notifications()\n updated = notif_data[\"[email protected]\"][\"assessment_updated\"]\n self.assertEqual(len(notifs), 1)\n self.assertEqual(\n updated[self.assessment.id][\"updated_data\"][\"TITLE\"],\n (\"new title\", \"Assessment1\")\n )\n self.assertEqual(\n updated[self.assessment.id][\"updated_data\"][\"ASSESSMENT PROCEDURE\"],\n (\"steps\", \"\")\n )", "def test_duo_application_update(self):\n pass", "def test_request_do_update(test_dao, test_configuration):\r\n DUT = dtcFunction(test_dao, test_configuration, test=True)\r\n DUT.request_do_select_all(revision_id=1)\r\n\r\n assert not DUT.request_do_update(1)", "def test_update_other_fields(auth_client):\n account_ids = prep_database(auth_client.sqla)\n\n # For each of the accounts, grab the current value of the \"other\" fields.\n expected_by_id = {}\n for account_id in account_ids:\n current_account = auth_client.sqla.query(Account).filter_by(id=account_id).first()\n expected_by_id[account_id] = {\n 'username': current_account.username,\n 'active': current_account.active\n }\n\n for account_id in account_ids:\n payload = {}\n\n if flip():\n # Randomly update the username.\n new_username = username_factory()\n expected_by_id[account_id]['username'] = new_username\n payload['username'] = new_username\n if flip():\n # Randomly update the active flag.\n new_active = flip()\n expected_by_id[account_id]['active'] = new_active\n payload['active'] = new_active\n\n # At this point, we'll have constructed a payload that might have zero of more\n # of the fields. This lets us test various combinations of update requests.\n # The expected_by_id dictionary stores the values we expect to see in the database,\n # whether the original value retrieve earlier or the newly updated on just\n # created.\n\n # It's possible that none of the fields will have been selected for update,\n # which doesn't make much sense, but we'll still test for that possibility.\n\n resp = auth_client.patch(url_for('people.update_account', account_id=account_id), json=payload)\n assert resp.status_code == 200\n\n for account_id in account_ids:\n updated_account = auth_client.sqla.query(Account).filter_by(id=account_id).first()\n assert updated_account is not None\n assert updated_account.username == expected_by_id[account_id]['username']\n assert updated_account.active == expected_by_id[account_id]['active']", "def test_02_product_update(self):\n # Update new product state2 from default draft to sellable\n new_product = self.create_product()\n self.assertEqual(new_product.state2, 'draft')\n new_product.state2 = 'sellable'\n self.assertEqual(new_product.state2, 'sellable')\n\n # Same but to an existing demo product.\n demo_product = self.product_obj.browse(\n self.ref('product_lifecycle.product_product_4g'))\n self.assertEqual(demo_product.state2, 'sellable')\n demo_product.state2 = 'draft'\n self.assertEqual(demo_product.state2, 'draft')\n\n # Update new product invividual field (field defined in product.product\n # model).\n self.assertEqual(new_product.default_code, 'A2330')\n new_product.default_code = 'A2330-1'\n self.assertEqual(new_product.default_code, 'A2330-1')\n\n # Same but to an existing demo product.\n self.assertEqual(demo_product.default_code, 'A2329')\n demo_product.default_code = 'A2329-1'\n self.assertEqual(demo_product.default_code, 'A2329-1')\n\n # Update new product commom characteristic (field defined in\n # product.template) and check that affects the another product\n # variants\n self.assertFalse(new_product.description)\n new_product.description = 'This is a New Product'\n self.assertEqual(new_product.description, 'This is a New Product')\n self.assertEqual(demo_product.description, 'This is a New Product')\n demo_product.description = False\n self.assertFalse(demo_product.description)", "def test_partial_update(self):\n\n action = ActionFactory.create(id=22)\n data = {\n 'name': 'Ação para Melhorar',\n 'institution': 'Vamos Ajudar',\n }\n self.assertNotEqual(action.name, data['name'])\n self.assertNotEqual(action.institution, data['institution'])\n\n response = self.client.patch(reverse('action-detail', args=[23]), data=data)\n self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)\n\n response = self.client.patch(reverse('action-detail', args=[22]), data=data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data['name'], data['name'])\n self.assertEqual(response.data['institution'], data['institution'])", "def test_update_update_has_a_value(self):\n self.Person.drop_collection()\n\n author = self.Person.objects.create(name=\"Test User\")\n\n with pytest.raises(OperationError):\n self.Person.objects(pk=author.pk).update({})\n\n with pytest.raises(OperationError):\n self.Person.objects(pk=author.pk).update_one({})", "def test_update_model(self):\r\n m0 = TestUpdateModel.create(count=5, text='monkey')\r\n\r\n # independently save over a new count value, unknown to original instance\r\n m1 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\r\n m1.count = 6\r\n m1.save()\r\n\r\n # update the text, and call update\r\n m0.text = 'monkey land'\r\n m0.update()\r\n\r\n # database should reflect both updates\r\n m2 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\r\n self.assertEqual(m2.count, m1.count)\r\n self.assertEqual(m2.text, m0.text)", "def test_update_metadata1(self):\n pass", "def test_teams_partial_update(self):\n pass", "def test_update_item_using_post(self):\n pass", "def test_update_instance_limit1(self):\n pass", "def test_update_model(self):\n m0 = TestUpdateModel.create(count=5, text='monkey')\n\n # independently save over a new count value, unknown to original instance\n m1 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\n m1.count = 6\n m1.save()\n\n # update the text, and call update\n m0.text = 'monkey land'\n m0.update()\n\n # database should reflect both updates\n m2 = TestUpdateModel.get(partition=m0.partition, cluster=m0.cluster)\n self.assertEqual(m2.count, m1.count)\n self.assertEqual(m2.text, m0.text)", "def test_update_note(self):\n pass", "def test_update_batch(self):\n self.batch_data['batch_id'] = self.batch_info.id\n resp = self.query_with_token(\n self.access_token_master,\n update_batch_info.format(**self.batch_data))\n\n self.assertIn('data', resp)\n self.assertEqual(\n resp['data']['updateBatchInfo']['batchInfo']['supplier']['name'],\n self.supplier.name)\n self.assertEqual(\n resp['data']['updateBatchInfo']['batchInfo']['batchNo'],\n self.batch_info.batch_no)", "def test_update_jwp(self):\n v1, = set_resources_and_sync([make_video(media_id='1234')])\n jwp1 = jwpmodels.Video.objects.get(key=v1.key)\n self.assertEqual(jwp1.updated, v1['updated'])\n\n v1['updated'] += 20\n v1, = set_resources_and_sync([v1])\n jwp1 = jwpmodels.Video.objects.get(key=v1.key)\n self.assertEqual(jwp1.updated, v1['updated'])", "def test_fetch_cases(mocked_update, mocked_fogbugz, transactional_db, case):\n mocked_case = mock.Mock()\n mocked_fogbugz.return_value.search.return_value.findAll.return_value = [mocked_case]\n mocked_case.attrs = dict(ixbug=case.id)\n mocked_case.sfixfor.string = '1516'\n mocked_case.dtfixfor.string = '2015-01-18T23:00:00Z'\n mocked_case.dtlastupdated.string = '2015-01-18T23:00:00Z'\n mocked_case.stitle.string = 'Some title'\n mocked_case.soriginaltitle.string = 'Some original title'\n mocked_case.cixproject.string = 'some-ci-project'\n mocked_case.sproject.string = 'Some project'\n mocked_case.sarea.string = 'Some area'\n fetch_cases()\n mocked_update.apply_async.assert_called_once_with(kwargs=dict(case_id=case.id))", "def test_update_with_no_matches(test_store, andy, pandy, candy):\n n_updated = test_store.update(fields={\"age\": 15}, name=\"Mark\")\n assert n_updated == 0\n\n items = list(test_store.get_by())\n assert len(items) == 3\n assert andy in items\n assert pandy in items\n assert candy in items", "def test_update_sample(self):\n response = self.client.post(reverse('update-proband', args=[self.gel_ir.id]),\n {'outcome': 'testoutcome',\n 'comment': 'testcomment',\n 'case_status': 'N',\n 'pilot_case': True,\n 'mdt_status': 'R',\n 'case_sent': False,\n 'no_primary_findings': False},\n follow=True)\n self.assertContains(response, 'Proband Updated')\n self.assertEquals(response.status_code, 200)\n proband = Proband.objects.get(id=self.proband.id)\n gelir = GELInterpretationReport.objects.get(id=self.gel_ir.id)\n self.assertEqual(proband.comment, 'testcomment')\n self.assertEqual(gelir.pilot_case, True)", "def test_product_update(self):\n # first performe create\n id = self._create_model(\"product\", self.product_data, [\"name\", \"description\", \"image_link\", \"price\"])\n if id:\n # then performe update\n data = { \n \"name\": \"Changed the name\",\n \"description\": self.product_data[\"description\"],\n \"image_link\": self.product_data[\"image_link\"],\n \"price\": self.product_data[\"price\"]\n }\n self._update_model(\"product\", id, data, [\"name\"])\n self.assertIsNotNone(id)", "def test_update_many(self):\n sample_input = \"\"\"\nfoo=100\nbar=200, baz=300\n\"\"\"\n self.assertNotEquals(self.param_dict.get(\"foo\"), 100)\n self.assertNotEquals(self.param_dict.get(\"bar\"), 200)\n self.assertNotEquals(self.param_dict.get(\"baz\"), 300)\n result = self.param_dict.update_many(sample_input)\n log.debug(\"result: %s\", result)\n self.assertEquals(result[\"foo\"], True)\n self.assertEquals(result[\"bar\"], True)\n self.assertEquals(result[\"baz\"], True)\n self.assertEquals(self.param_dict.get(\"foo\"), 100)\n self.assertEquals(self.param_dict.get(\"bar\"), 200)\n self.assertEquals(self.param_dict.get(\"baz\"), 300)", "def test_updateContact(self):\n response = self.client.get(self.url)\n qs = response.json()\n contact = qs[0]\n to_update_value = 'address 2'\n contact['address'] = to_update_value\n response = self.client.put(self.url + str(contact['id']) + '/', contact, content_type=\"application/json\")\n self.assertEqual(response.status_code, 200)\n contact2 = response.json()\n self.assertEqual(contact2['address'], to_update_value)", "def test_vault_update_vault_item(self):\n pass", "def test_update(self):\n doc_fields = document_fields.DocumentFields({\n 'foo@': 'bar',\n })\n self.assertEquals('bar', doc_fields['foo'])\n doc_fields.update({\n 'foo@': 'bbq',\n })\n self.assertEquals('bbq', doc_fields['foo'])", "def test_update(self):\n tz = pytz.timezone(settings.TIME_ZONE)\n self.assertFalse(self.user1.o365_licence)\n url = '/api/users/{}/'.format(self.user1.ad_guid)\n data = {\n 'Surname': 'Lebowski',\n 'title': 'Bean Counter',\n 'o365_licence': True,\n\n 'email' : '[email protected]' ,\n 'name' : 'Mike' ,\n 'username' : 'MikeLebowski' ,\n 'ad_guid' : '123',\n 'expiry_date' : '2019-03-12',\n 'given_name' : 'Mike',\n #'Enabled' :'True',\n 'active' : True,\n 'deleted' : False,\n\n\n\n }\n response = self.client.put(url, json.dumps(data), content_type='application/json')\n self.assertEqual(response.status_code, 202)\n user = DepartmentUser.objects.get(pk=self.user1.pk) # Refresh from db\n self.assertEqual(user.surname, data['Surname'])\n self.assertEqual(user.title, data['title'])\n\n self.assertEqual(user.name , data['name'])\n self.assertEqual(user.email, data['email'])\n self.assertEqual(user.username, data['username'])\n\n #self.assertEqual(user.expiry_date, data['expiry_date'])\n\n self.assertEqual(user.ad_guid, data['ad_guid'])\n\n self.assertEqual(user.expiry_date, tz.localize(parse(data['expiry_date'])))\n\n self.assertEqual(user.given_name, data['given_name'])\n #self.assertEqual(user.active, data['Enabled'])\n self.assertEqual(user.active, data['active'])\n self.assertEqual(user.ad_deleted, data['deleted'])\n\n self.assertTrue(user.o365_licence)\n self.assertTrue(user.in_sync)", "def test_update_metadata(self):\n pass", "def test_meeting_update(self):\n pass", "def test_change_provisioned_throughput_usual_case():", "def test_update(self, mock_put):\n self.policies.update(id=333114, policy_update=self.policy_show_response)\n\n mock_put.assert_called_once_with(\n url='https://api.newrelic.com/v2/alert_policies/333114.json',\n headers=self.policies.headers,\n data=json.dumps(self.policy_show_response)\n )", "def test_client_nationlity_partial_update(self):\n pass", "def test_update():\n payload = {'age': 99}\n sample_uuid = get_sample_id()\n response = requests.put(f'http://localhost:5000/api/persons/{sample_uuid}', json=payload)\n data = response.json()\n\n assert response.status_code == 200\n for field in FIELDS:\n assert field in data", "def test_full_update(self):\n recipe = create_sample_recipe(user=self.user)\n recipe.ingredients.add(create_sample_ingredient(\n user=self.user,\n name='Fries'\n ))\n payload = {\n \"title\": \"New Cuisine\",\n \"price\": 5.00,\n \"time_minutes\": 90\n }\n recipe_url = create_detail_url(recipe.id)\n self.client.put(recipe_url, payload)\n recipe.refresh_from_db()\n ingredients = recipe.ingredients.all()\n self.assertEqual(recipe.title, payload['title'])\n self.assertEqual(recipe.time_minutes, payload['time_minutes'])\n self.assertEqual(len(ingredients), 0)", "def test_update_book_details(self):\n\n first_book_list = BookList()\n first_book = Book()\n\n first_book.create_book({\n \"title\": \"First Man\",\n \"author\": \"James R. Hansen\",\n \"year\": 2005,\n \"publisher_name\": \"Simon & Schuster\",\n \"publication_date\": \"01/01/2018\",\n \"num_copies\": 1\n })\n\n first_book_list.add_book(first_book)\n\n new_book_details = {\n \"title\": \"First Man\",\n \"author\": \"James Hansen\",\n \"year\": 2018,\n \"publisher_name\": \"Simon & Schuster\",\n \"publication_date\": \"01/01/2018\",\n \"num_copies\": 5\n }\n\n assert first_book_list.update_book_details(new_book_details) == True\n assert first_book_list.find_book(\"First Man\") == True\n\n for book in first_book_list.show_all():\n assert book.get(\"title\") == \"First Man\"\n assert book.set(\"title\", \"First Man: The Life of Neil A. Armstrong\") == True\n\n assert first_book_list.find_book(\"First Man: The Life of Neil A. Armstrong\") == True", "def test_update_book(self):\n book_information = self.books_from_json[0]\n book_id = '60773a16cb838494e13d3652'\n self.books.update = MagicMock(return_value=None) # success on update\n update_book = self.books.update_details(book_id, self.books_from_json[0])\n self.assertEqual(\"Mock Book updated!\", update_book['flash_message'])", "def test_update(self):\n # Sanity check: Ensure no verification deadline is set\n assert VerificationDeadline.deadline_for_course(self.course.id) is None\n\n # Generate the expected data\n now = datetime.now(pytz.utc)\n verification_deadline = now + timedelta(days=1)\n expiration_datetime = now\n response, expected = self._get_update_response_and_expected_data(expiration_datetime, verification_deadline)\n\n # Sanity check: The API should return HTTP status 200 for updates\n assert response.status_code == 200\n\n # Verify the course and modes are returned as JSON\n actual = json.loads(response.content.decode('utf-8'))\n assert actual == expected\n\n # Verify the verification deadline is updated\n assert VerificationDeadline.deadline_for_course(self.course.id) == verification_deadline" ]
[ "0.8599205", "0.849513", "0.849513", "0.849513", "0.8350617", "0.8206942", "0.81498384", "0.81489325", "0.78003776", "0.75835353", "0.75727797", "0.7463717", "0.7440917", "0.7428357", "0.7388373", "0.7385997", "0.73503935", "0.7333985", "0.73062307", "0.7268944", "0.72565717", "0.72371036", "0.7208849", "0.71794355", "0.7158332", "0.7153649", "0.714196", "0.7074285", "0.7041866", "0.7027655", "0.70050293", "0.6985807", "0.698561", "0.69784176", "0.6941041", "0.69402564", "0.69354755", "0.69152564", "0.69141376", "0.69136554", "0.6900963", "0.6882495", "0.68822217", "0.68705237", "0.6855842", "0.68557745", "0.6844011", "0.68426627", "0.6832093", "0.68319494", "0.6827568", "0.6824155", "0.6818332", "0.68150645", "0.6804126", "0.6803718", "0.68021786", "0.6799858", "0.67889005", "0.67858547", "0.67832786", "0.6774896", "0.6772874", "0.67718995", "0.6747758", "0.6747477", "0.6745123", "0.6738663", "0.67352706", "0.6728797", "0.67108303", "0.67082727", "0.66996205", "0.66981965", "0.66935354", "0.66908604", "0.66895205", "0.66894686", "0.6687551", "0.668293", "0.66698986", "0.66569597", "0.6655934", "0.66542184", "0.66496927", "0.6649689", "0.6649529", "0.6645383", "0.6643107", "0.66353035", "0.6627825", "0.6625791", "0.6625714", "0.66246486", "0.66210914", "0.6617627", "0.66149634", "0.66141087", "0.6601552", "0.6595369" ]
0.93751144
0
Set state of key exchange process
def _set_state(self, state): #print("** set state from %d to %d" % (self.state, state)) self.state = state
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __setstate__(self, state):\n state['_lock'] = Lock()\n self.__dict__.update(state)", "def __setstate__(self, state):\n\n self.set(DER = state)", "def set_state(self, state: int):", "def set_state( self ):", "def set_state(self, state: ProcessStateStr | core.QProcess.ProcessState):\n self.setProcessState(PROCESS_STATES.get_enum_value(state))", "def __setstate__(self, state):\n self.__dict__.update(state)", "def __setstate__(self, state: dict) -> None: # pragma: no cover\n self.__dict__.update(state)\n self.rFp = {}\n self.wFp = {}\n self.Fp = ChainMap(self.rFp, self.wFp)\n self.open(mode=self.mode)", "def state(self, state: str) -> None:\n try:\n self._redis.set(self._namespace(\"state\"), str(state))\n except RedisError:\n self.logger.error(\"RedisError\", exc_info=True)", "def set_state(self,state):\n self.__state = state", "def set_state(self, state):\n self._env.set_state(state)", "def send_state(self, key=None):\n state = self.get_state(key=key)\n if len(state) > 0:\n if self._property_lock: # we need to keep this dict up to date with the front-end values\n for name, value in state.items():\n if name in self._property_lock:\n self._property_lock[name] = value\n state, buffer_paths, buffers = _remove_buffers(state)\n msg = {'method': 'update', 'state': state, 'buffer_paths': buffer_paths}\n self._send(msg, buffers=buffers)", "def set_state(self, state_dict: Dict) -> None:\n self._state_waiting_to_be_consumed.update(state_dict)", "def set_state(self, state):\n self.state = state", "def set_state(self, state, input_index, **kwargs):\n if state == WAITING and input_index == 1:\n self.state = state", "def state(self, state):\n self._state = state", "def __setstate__(self, state):\n\n for key, value in state.items():\n if key in self.__slots__:\n setattr(self, key, value)", "def set_state(self, state, result=_NOT_SET, exception=None):\n if state not in [TASK_PENDING, TASK_SENDING, TASK_SENT, TASK_STARTED,\n TASK_RESCHEDULED, TASK_SUCCEEDED, TASK_FAILED]:\n raise RuntimeError('Illegal state set on task: {0} '\n '[task={1}]'.format(state, str(self)))\n if self._state in TERMINATED_STATES:\n return\n self._state = state\n if self.stored:\n self._update_stored_state(\n state, result=result, exception=exception)\n if not self.stored:\n event = {}\n if result is not _NOT_SET:\n event['result'] = result\n elif exception:\n event['exception'] = exception\n try:\n self.workflow_context.internal.send_task_event(\n state, self, event)\n except RuntimeError:\n pass\n if state in TERMINATED_STATES:\n self.is_terminated = True", "def __setstate__(self, state):\n if len(state) != 1:\n raise TypeError('Invalid state length, expected 1; received %i' %\n len(state))\n kwargs = state[0]\n if not isinstance(kwargs, dict):\n raise TypeError('Key accepts a dict of keyword arguments as state; '\n 'received %r' % kwargs)\n self.__reference = None\n self.__pairs = tuple(kwargs['pairs'])\n self.__app = kwargs['app']\n self.__namespace = kwargs['namespace']", "def set_state(self, state: int):\n self.state = state", "def key_state(self, key):\r\n return self.handler.key_state(key_to_code(key))", "def set_state(self,s):\n self.state = s", "def __setstate__(self, state):\n self.__dict__ = dict(state)\n self._init_compiled()", "def __change_state(self, state):\n self.state = state", "def __setstate__(self, _state : dict):\n self.__init__(**_state)", "def __setstate__(self, state):\n return None", "def state(self, state: str) -> None:\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def state(self, state):\n\n self._state = state", "def __setstate__(self, message):\n self._message = message", "def set_state(cls, t_state, t_msg):\n cls.state['TASK_STATE'] = t_state\n cls.state['TASK_INFO'] = t_msg\n\n print str(t_state) + ': ' + str(t_msg)\n\n if cls.USE_CELERY:\n # send message to the front end\n current_task.update_state(state=t_state, meta={'result': t_msg})\n\n state = cls.state['TASK_STATE']\n info = cls.state['TASK_INFO']\n ret = {'state': state, 'info': info}\n current_task.info = json.dumps(ret)\n\n print '%s: %s' % (t_state, t_msg)", "def set_state(self, service, key, value, context=None):\n return self._client.call_method(\n 'UserAndJobState.set_state',\n [service, key, value], self._service_ver, context)", "def _perform_key_exchange(self, query_entry):\n if KeyType.retry_timer in query_entry.data and query_entry.data[KeyType.retry_timer]:\n message_key_types.unset_cipher(self.pending_key_name)\n self.pending_key_name = None\n self._set_state(KeyExchangeManager.STATE_REQUESTING)\n #print(\"# (%d) _perform_key_exchange: to\" % int(time.time()), self.counter_node_id.hex())\n self.secret_key, self.peer_public_key, self.pending_key_name = message_key_types.get_ECDH_parameters()\n self.nonce = os.urandom(16)\n self.random = os.urandom(8)\n ret = self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, \"request\",\n self.peer_public_key, self.nonce, self.random,\n self.pending_key_name)\n if not ret:\n self._set_state(KeyExchangeManager.STATE_NONE)\n message_key_types.unset_cipher(self.pending_key_name)\n message_key_types.unset_cipher(self.key_name)\n self.secret_key = None\n self.peer_public_key = None\n self.pending_key_name = None\n self.nonce = None\n self.random = None\n return\n rand_time = KeyExchangeManager.KEY_EXCHANGE_RETRY_INTERVAL*random.uniform(0.5, 1.5)\n self.set_invoke_timer(rand_time, retry_entry=True)", "def setstate(self,name,state):\n if (name not in KFNode.names):\n print ' state name ',name,' not in KNode!'\n self.states[name]=state.copy()\n self.status = name\n return", "def __setstate__(self, d):\n self.__dict__.update(d)\n self.__queueLock = threading.RLock()", "def set_power_state(self, task, pstate):\n _set_and_wait(task, pstate)", "def set_to_process(self, key, value):\n self.__manager_dic[key] = value", "def setState(self, state):\n self.state = state", "def set_state(self, state: Any) -> None:\n raise NotImplementedError(\n 'This environment has not implemented `set_state()`.'\n )", "def setExchange(self, exchange):\r\n\t\tself.pair.exchange = exchange", "def set_state(self, state):\n return self.update(current_state=state)", "def state(self, state: str):\n\n self._state = state", "def __setstate__(self, dict):\n self.__dict__.update(dict)\n self.start_callback = None\n self.finalize_callback = None", "def __init__(__self__, *,\n key_name: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input['DatabaseEncryptionState']] = None):\n if key_name is not None:\n pulumi.set(__self__, \"key_name\", key_name)\n if state is not None:\n pulumi.set(__self__, \"state\", state)", "def __setstate__(self, state: Dict[str, Any]):\n self.__dict__.update(state)\n self.__dict__['__db'] = None", "def setstate(self, state):\n\t\tif not self._input: raise PlumberExceptions.PipeTypeException(self)\n\t\tself._state.ustate = state", "def state_processing_enter(cfg, app, win):", "def set_state(self, value):\n self.state = value", "def __setstate__(self, state):\n params, weights = state\n #self.set_params(**params)\n #self.ready()\n self._set_weights(weights)", "def __setstate__(self, state):\n params, weights = state\n #self.set_params(**params)\n #self.ready()\n self._set_weights(weights)", "def keyExchangeServer():\n with socket.socket() as sock:\n try:\n # if the port number already taken, the following line will not work\n sock.bind((GW_ADRRESS, KEY_EXCHANGE_PORT))\n print(\"success in binding\")\n except:\n print(\"error in binding\")\n sys.exit()\n sock.listen(0)\n while True:\n client_socket, client_address = sock.accept()\n valid, data = get_msg(client_socket)\n if valid:\n data = data.split(\" \")\n if data[0] == HELLO_MASSEGE:\n if (client_address[0], int(data[1])) not in key_dic:\n key = int.from_bytes(Fernet.generate_key(), \"big\")\n else:\n key = key_dic[(client_address[0], int(data[1]))]\n client_socket.send(create_msg(get_key(key)).encode())\n key_dic[(client_address[0], int(data[1]))] = key\n print(key_dic)", "def _localSetState(self,pdict):\n pass", "async def async_set_state(self, state):\n self._state = state", "def set_state(canvas, state):\n for key, value in state.items():\n set_attribute(canvas, key, value)", "def process(self, new_state, **args):\n self._state = new_state", "def set_state(self, state):\n #print(\"ComponentBase.set_state\")\n for k,v in state.items():\n #print(\" Set {:14s} to {:s}\".format(k,str(v)))\n if k == \"connectors\":\n for con_state in v:\n self.add_connector() \n self.connectors[-1].set_state(con_state)\n else:\n setattr(self, k, v)", "def set(self, key):\n if key == 0:\n self._servo.d_key(.1)\n elif key == 1:\n self._servo.ctrl_d(.1)\n elif key == 2:\n self._servo.ctrl_u(.1)\n elif key == 3:\n self._servo.ctrl_enter(.1)\n elif key == 4:\n self._servo.enter_key(.1)\n elif key == 5:\n self._servo.refresh_key(.1)\n elif key == 6:\n self._servo.ctrl_refresh_key(.1)\n elif key == 7:\n self._servo.sysrq_x(.1)\n else:\n raise kbError(\"Unknown key enum: %s\", key)", "def set_task_state(self, task, state):\n self._gdb_interface.set_task_state(task, state)", "def set_state(self, new_state):\n self.state = new_state", "def __setstate__(self, state):\n self.__dict__ = state\n self.get_esoh_solver = lru_cache()(self._get_esoh_solver)", "def set_key(attr):\n cmds.setKeyframe(attr)", "def __setstate__(self, state):\n # Restore instance attributes\n try: \n obj = Thing.ID_dict[state['id']] # is this obj already in dict?\n dbg.debug(\"Note: %s already in Thing.ID_dict, maps to %s\" % (state['id'], obj))\n except KeyError: # Not already in dict\n Thing.ID_dict[state['id']] = self\n if 'has_beat' in state:\n Thing.game.register_heartbeat(self)\n self.__dict__.update(state)", "def keyExchangeClient(port):\n try:\n with socket.socket() as sock:\n sock.connect((GW_IP, KEY_EXCHANGE_PORT))\n sock.send(create_msg(HELLO_MASSEGE + \" \" + str(port)).encode())\n valid, data = get_msg(sock)\n if valid:\n key = get_key(data)\n key_dict[port] = key\n print(key_dict)\n except Exception as e:\n print(e, \"error in keyExchangeClient\")", "def SetState(self, new_state):\r\n\r\n self.state = new_state", "def begin_set_key(self):\n\t\tself.setting_key = True\n\t\tself.player.current_level.screen_manager.set_controls(ControlSettingControls(self))", "def __setstate__(self, state):\n self.__dict__ = state\n self.freshly_loaded = True", "def set_workflow_state(self, state):\n self._gdb_interface.set_workflow_state(state)", "def on_key_status(self, key):\n self.core.log.info(\"Key Manager key update\")\n client_proto = self.core.get_client_protocol(key.machine_id)\n if client_proto is not None:\n reply = ServerMsgFactory().create(kind=ServerMsgFactory.KIND_HANDSHAKE_PKEY_STATUS_RESP)\n reply.internal[\"payload\"] = key.status\n client_proto.sendMessage(ObjectGate(reply).pack(True), True)\n if key.status != KeyStore.STATUS_ACCEPTED:\n client_proto.dropConnection()", "def _state_cb(self, msg):\n if self.current_mode == '':\n self.current_mode = msg.mode\n self.state = msg", "def set_key(self, key):\n self.key = key", "def set_state(self, state: bool) -> None:\n payload = self._cfg.state_power_on if state else self._cfg.state_power_off\n command = f\"{COMMAND_POWER}{self._cfg.idx+1}\"\n self._mqtt_client.publish(\n self._cfg.command_topic + command,\n payload,\n )", "def set_state(self, state_dict: dict) -> None:\n super().set_state(state_dict)\n\n self.seed_rng(state_dict[\"seed\"])\n self.rng.set_state(state_dict[\"rng_state\"])\n self.strategy.set_state(state_dict[\"strategy\"])", "def assign_state(self, state):\n raise NotImplementedError()", "def _localSetState(self,pdict):\n super()._localSetState(pdict)\n self.p = pdict.pop('p')", "def echo(self, key, value):\n yield self.set_state({key: value})", "def activate(ctx: CLIContext, access_key):\n with Session() as session:\n try:\n data = session.KeyPair.activate(access_key)\n except Exception as e:\n ctx.output.print_mutation_error(\n e,\n item_name='keypair',\n action_name='activation',\n )\n sys.exit(1)\n if not data['ok']:\n ctx.output.print_mutation_error(\n msg=data['msg'],\n item_name='keypair',\n action_name='activation',\n )\n sys.exit(1)\n ctx.output.print_mutation_result(\n data,\n extra_info={\n 'access_key': access_key,\n },\n )", "def __setstate__(self, state):\n version, state = state\n if version != '1.0':\n raise TypeError('Template definition mismatch')\n self.__dict__ = state", "def receive_confirmation(self):\n #print(\"(%d) receive_confirmation:\" % int(time.time()))\n #print(\" **> state:\", self.state)\n if self.state != KeyExchangeManager.STATE_CONFIRMING:\n return\n rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1))\n self.set_invoke_timer(rand_time)\n self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER)\n self.key_name = self.pending_key_name\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n #print(\"*STATE_ESTABLISHED\")", "def update(self):\n self._state = 23", "def __setstate__(self, state):\n for i, j in state.items():\n setattr(self, i, j)\n self.describer_model = _load_model(self.name)", "def __setstate__(self, state):\n for i, j in state.items():\n setattr(self, i, j)\n self.describer_model = _load_model(self.name)", "def update(self, key):\n return self.state", "def post_key(self):\n # print(self.key)\n #Sending the key to the attacker.\n s.send(bytes(\"K\\n{}\".format(str(self.key,'utf-8')),'utf-8'))", "def set_working_state(self):\n self.state = 0\n self.port = None", "def set_task_state(self, task, state):\n self._write_transaction(tx.set_task_state, task=task, state=state)", "def change_state(self,state):\n if self.__currentState:\n self.__currentState.stop()\n \n try:\n idler=self[state]\n except KeyError:\n raise \"%s is not a state of %s\" % (state,self)\n \n self.__currentState=idler()\n self.__currentState.idle()\n self.__currentState=None", "def set(self, state):\n if self.mode == gpio.OUT:\n # Write an event to the buffer. \n self._buffer_write(state, time.time()*1000)\n\n gpio.output(self.bcm_id, state)\n self.state = state", "def set_state(self, uState):\n self.strategy['state_handler'].set_state(self.state, uState)", "def set_state(self, state: ApplicationState) -> None:\n self.state = state\n if state == ApplicationState.IDLE:\n self.generate_cards.config(text=\"Generate Bingo Game\")\n elif state == ApplicationState.GENERATING_GAME:\n self.generate_cards.config(text=\"Stop Generating Game\")\n else: # state == ApplicationState.GAME_GENERATED\n self.generate_cards.config(text=\"Regenerate Game\")", "def set_state(self, is_on: bool) -> None:\n json_data = self.perform_command(self.commands[\"on\"] if is_on else self.commands[\"off\"])\n\n if json_data[\"system\"][\"set_relay_state\"][\"err_code\"] != 0:\n raise Exception(\"Error: Error from the smartplug: \" + json.dumps(json_data))" ]
[ "0.6264874", "0.62306124", "0.6219067", "0.62150574", "0.6114401", "0.609517", "0.6037395", "0.6025829", "0.5982595", "0.59734386", "0.5955964", "0.59348714", "0.59043247", "0.5876987", "0.5874065", "0.5861183", "0.58430445", "0.5808853", "0.5787678", "0.5764213", "0.57516503", "0.57461154", "0.57383", "0.572498", "0.5724952", "0.5723863", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.57120305", "0.5692412", "0.5687768", "0.56821203", "0.568046", "0.56801057", "0.5676495", "0.56761795", "0.56587166", "0.564937", "0.564702", "0.5636685", "0.56281763", "0.56264555", "0.562262", "0.56202847", "0.5588606", "0.5569909", "0.55314624", "0.5530005", "0.5523449", "0.5523449", "0.55178976", "0.5514474", "0.5497672", "0.5494796", "0.54922146", "0.5491516", "0.5485438", "0.54828024", "0.54794186", "0.5461191", "0.5453455", "0.54440755", "0.54438186", "0.54361045", "0.54348814", "0.5434138", "0.5419112", "0.5406636", "0.5403093", "0.5384032", "0.5374079", "0.536946", "0.5366592", "0.53639215", "0.5363798", "0.5363583", "0.53629744", "0.5362644", "0.5358107", "0.53568995", "0.53568995", "0.5351054", "0.53444976", "0.53416276", "0.533924", "0.5335712", "0.5321389", "0.5306759", "0.5303393", "0.53012824" ]
0.59008133
13
Set key to the encryptor and decryptor
def set_cipher(self, key_name, hint): message_key_types.set_cipher(self.shared_key, self.nonce, key_name, hint)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_key(self):\n\n self.key = Fernet.generate_key()\n self.cryptor = Fernet(self.key)", "def set_encryption(key):\n global_scope['enc'] = Encryption(key.encode())", "def get_key(self, key_value):\n # Storing the correct key value back to the self.key attributes.\n self.key=key_value\n self.cryptor=Fernet(self.key)", "def _set_key(self, key):\n\n # select 56 bits from the 64-bit key\n key = self._permutate(self.__pc1, self._string_to_bitlist(key))\n self.L = key[:28]\n self.R = key[28:]\n for i in range(0, 16):\n for j in range(0, self.__left_rotations[i]):\n self.L.append(self.L[0])\n del self.L[0]\n self.R.append(self.R[0])\n del self.R[0]\n # select 48 bits from 56 bits\n self.Kn[i] = self._permutate(self.__pc2, self.L + self.R)", "def __init__(self, key=None):\n\n self.key = key\n self.cryptor = None\n self.file_ext_targets = ['txt']", "def __init__(self, key):\n self._block_size = AES.block_size\n self._key = hashlib.sha256(get_as_bytes(key)).digest()", "def generate_key(self):\n self.key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(self.key)", "def key(self, key_val):\r\n\r\n if self.key_exists():\r\n raise EncryptedField.KeyAlreadyExists()\r\n\r\n validate_key(key_val)\r\n\r\n if self.model_class not in self._keys:\r\n EncryptedField._keys[self.model_class] = {}\r\n\r\n EncryptedField._keys[self.model_class][id(self)] = key_val", "def __init__(self, key):\n self.block_size = 16\n self.cipher = Cipher(algorithms.AES(key), modes.ECB(), default_backend())", "def write_key(self):\n\t key = Fernet.generate_key()\n\t with open(\"key.key\", \"wb\") as key_file:\n\t key_file.write(key)", "def __init__(self, encryption_method: str, encryption_key_size: int = 32, encryption_key: bytes = None,\r\n block_size: int = 32, block_mode: str = BlockMode.ECB):\r\n self.__encryption_method = encryption_method\r\n self.__encryption_key_size = encryption_key_size\r\n self.__encryption_key = encryption_key\r\n self.__block_size = block_size\r\n self.__block_mode = block_mode\r\n\r\n if self.__encryption_key is None:\r\n self.__randomize_key_on_every_encryption = True\r\n else:\r\n self.__randomize_key_on_every_encryption = False\r\n\r\n # Generate the next key to be used\r\n if self.__randomize_key_on_every_encryption:\r\n self.__encryption_key = get_random_bytes(self.__encryption_key_size)", "def setUp(self):\n\n self.private_key = self.get_new_key()\n self.public_key = self.private_key.public_key()\n\n self.pem_private_key = self.private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n )\n self.encrypted_pem_private_key = self.private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.BestAvailableEncryption(self.private_key_password),\n )\n\n self.pem_public_key = self.public_key.public_bytes(\n serialization.Encoding.PEM, serialization.PublicFormat.PKCS1\n )", "def test_rekey_defaults(self, settings):\n old_key = b'0' * 32\n new_key = b'1' * 32\n\n settings.CHITON_ENCRYPTION_KEY = new_key\n settings.CHITON_PREVIOUS_ENCRYPTION_KEY = old_key\n\n encrypted = encrypt('message', key=old_key)\n rekeyed = rekey(encrypted)\n\n assert decrypt(rekeyed) == 'message'", "def set_key(self, key):\n self.key = key", "def __init__(__self__, *,\n disk_encryption_key: Optional[pulumi.Input['KeyVaultAndSecretReferenceArgs']] = None,\n key_encryption_key: Optional[pulumi.Input['KeyVaultAndKeyReferenceArgs']] = None):\n if disk_encryption_key is not None:\n pulumi.set(__self__, \"disk_encryption_key\", disk_encryption_key)\n if key_encryption_key is not None:\n pulumi.set(__self__, \"key_encryption_key\", key_encryption_key)", "def __init__(__self__, *,\n key_encryption_key_identity: Optional[pulumi.Input['ClusterPropertiesKeyEncryptionKeyIdentityArgs']] = None,\n key_encryption_key_url: Optional[pulumi.Input[str]] = None):\n if key_encryption_key_identity is not None:\n pulumi.set(__self__, \"key_encryption_key_identity\", key_encryption_key_identity)\n if key_encryption_key_url is not None:\n pulumi.set(__self__, \"key_encryption_key_url\", key_encryption_key_url)", "def derive_keys(self, master_salt, master_secret):\n\n self.sender_key = self._kdf(master_salt, master_secret, self.sender_id, 'Key')\n self.recipient_key = self._kdf(master_salt, master_secret, self.recipient_id, 'Key')\n\n self.common_iv = self._kdf(master_salt, master_secret, b\"\", 'IV')", "def __init__(self, key):\n if len(key) > KEY_SIZE:\n raise ParameterError(\"Key must be <%d bytes\" % (KEY_SIZE))\n\n self.key = key.ljust(KEY_SIZE, b\"\\xff\")\n self.encryptIV = b\"\\xff\" * BLOCK_SIZE\n self.decryptIV = b\"\\xff\" * BLOCK_SIZE\n self.remainingData = b\"\"\n self.oldDecrypt = b\"\"", "def genKey(self, otherKey):\n self.sharedSecret = self.genSecret(self.privateKey, otherKey)\n #print(\"Shared secret:\")\n #print(self.sharedSecret)\n s = hashlib.sha256()\n s.update(bytes(str(self.sharedSecret).encode()))\n self.key = s.digest()", "def write_key():\n key = fernet.Fernet.generate_key()\n keyfile = open(KEY_PATH,'wb')\n keyfile.write(key)\n keyfile.close()", "def __init__(self, key, msg0503):\n enkey1 = map(ord, AES.new(key).encrypt(msg0503[:16]))\n self.cipher = AES.new(\"\".join(\n map(chr, (enkey1[i] ^ ord(msg0503[i + 16]) for i in range(16)))))\n self.encrypt_seq = random.randint(0, 0xffff)", "def setKey(self, key):\n if hasattr(key, '__class__') and issubclass(key.__class__, ObjectKey):\n key = key.getKey()\n\n self.__key = key", "def encryption_key(self) -> typing.Optional[aws_cdk.aws_kms.IKey]:\n ...", "def operate_cipher(self):", "def test_encrypt_no_key_id(self):\n encryptor = self.test_init()\n encryptor.key_id = None\n\n with self.assertRaises(IceItException):\n encryptor.encrypt('blah', 'blah-again')", "def generate_keys(self):\n\n # TODO: Store keys encrypted\n rsa1 = RsaPrivateKey.Generate()\n self.sign_private = str(rsa1)\n self.sign_public = str(rsa1.public_key)\n\n rsa2 = RsaPrivateKey.Generate()\n self.crypt_private = str(rsa2)\n self.crypt_public = str(rsa2.public_key)", "def test_set_key():\n\n assert symmetric.set_key(\"test\") == \"test\"", "def server_side_encryption_key(self, server_side_encryption_key):\n\n self._server_side_encryption_key = server_side_encryption_key", "def get_key(self):\r\n return self.__encryption_key", "def init_alternate_cipher(self, alternate_encryption_key: bytes) -> None:\n #: Initialize AES ECB cipher with alternate_encryption_key\n cipher: Cipher = Cipher(algorithms.AES(alternate_encryption_key), modes.ECB(), backend=backend)\n #: Initialize AES ECB cipher context\n encryptor: CipherContext = cipher.encryptor()\n #: Add cipher to alternate_ciphers dict\n self.alternate_ciphers[alternate_encryption_key] = cipher\n #: Add encryptor to alternate_encryptors dict\n self.alternate_encryptors[alternate_encryption_key] = encryptor\n self.logger.debug(f'Alternate Cipher initialized for key {hexlify(alternate_encryption_key).decode(\"ASCII\")}')", "def del_key(self):\n # Deleting the values from the self.key and self.cryptor attributes.\n self.key=None\n self.cryptor=None", "def create_key ():", "def passwd_encryption(self):\n key = Fernet.generate_key()\n cipher_suite = Fernet(key)\n bin_passwd = bytes(self.password, 'utf-8')\n ciphered_text = cipher_suite.encrypt(bin_passwd)\n with open(self.pass_path, 'wb') as pass_output:\n pass_output.write(ciphered_text)\n with open(self.key_path, 'wb') as key_output:\n key_output.write(key)", "def WriteKey(self, key, version_number, encrypter=None):\n key = str(key)\n if encrypter:\n key = encrypter.Encrypt(key) # encrypt key info before outputting\n self.dict[str(version_number)] = key", "def generate_key():\r\n # generating key\r\n key = Fernet.generate_key()\r\n\r\n key_dir = os.path.join(os.path.dirname(__file__), \"resources/key\")\r\n\r\n # writing key in file\r\n with open(key_dir, \"wb\") as keyFile:\r\n keyFile.write(key)", "def setup_key_decrypt(self):\r\n\t\tself.max_key = math.floor(len(self.message) / 2)\r\n\t\twhile True:\r\n\t\t\tkey = input(\"Please enter the key that was used to encrypt your message.--> \")\r\n\t\t\ttry:\r\n\t\t\t\tself.key = int(key)\r\n\t\t\texcept ValueError:\r\n\t\t\t\tprint(\"Key needs to be a number.\")\r\n\t\t\t\tcontinue\r\n\t\t\tif self.key > self.max_key: \t\t\t\r\n\t\t\t\tprint(f\"{key} is too big of a number.\")\r\n\t\t\telif self.key == 0:\r\n\t\t\t\tprint(\"0 cannot be a key.\")\t\t\t\t\r\n\t\t\telse:\t\t\t\r\n\t\t\t\tbreak", "def set_key_id(self, key_id=''):\n self.key_id = key_id", "def setup_key_encrypt(self):\r\n\t\tself.max_key = math.floor(len(self.message) / 2)\r\n\t\twhile True:\r\n\t\t\tkey = input(f\"Please enter a key value less than or equal to {self.max_key}. --> \")\r\n\t\t\ttry:\r\n\t\t\t\tself.key = int(key)\r\n\t\t\texcept ValueError:\r\n\t\t\t\tprint(\"Key needs to be a number.\")\r\n\t\t\t\tcontinue\r\n\t\t\tif self.key > self.max_key: \t\t\t\r\n\t\t\t\tprint(f\"{key} is too big of a number.\")\t\r\n\t\t\telif self.key == 0:\r\n\t\t\t\tprint(\"0 cannot be a key\")\t\t\t\r\n\t\t\telse:\t\t\t\r\n\t\t\t\tbreak", "def volume_encryption_keys(self, volume_encryption_keys):\n\n self._volume_encryption_keys = volume_encryption_keys", "def crypt_key(self):\n return self._crypt_key", "def test_rekey(self):\n old_key = b'0' * 32\n new_key = b'1' * 32\n\n old_encrypted = encrypt('message', key=old_key)\n new_encrypted = rekey(old_encrypted, old_key=old_key, new_key=new_key)\n\n assert decrypt(new_encrypted, key=new_key) == 'message'", "def key(self, value=None):\n if self.crypt_method == 'C':\n key_type = \"number\"\n else:\n key_type = \"string\"\n\n input_message = f\"Please enter a {key_type} as a \" \\\n f\"{self.crypt_type}ion key\\n>> \"\n if value is None:\n key = input(input_message)\n else:\n key = value\n\n is_valid_key, key = Check.is_valid_key(key, self.crypt_method)\n if is_valid_key:\n self._key = key\n else:\n raise ValueError(f\"Key{key} is invalid\")", "def create_keys(self):\n crypto_tool = CryptoTools()\n # creating RSA keys for the signer user\n public_key, private_key = crypto_tool.create_key_with_entropy()\n self.priv_key = crypto_tool.get_pem_format(private_key).decode(\"utf-8\")\n self.pub_key = crypto_tool.get_pem_format(public_key).decode(\"utf-8\")", "def __init__(self, key, initial_prng):\n self.cipher = key\n self.prng = initial_prng\n self.nonce = None", "def __init__(self):\n self._keypair = RSA.generate(2048)\n self.public_key = self._keypair.publickey().exportKey()", "def generate_key():\n key = Fernet.generate_key()\n with open(\"Secret.key\",\"wb\")as key_file:\n key_file.write(key)", "def secret_key(self, val):\n self.__secret_key = val", "def setup_keys(self, dh_object, public_key, private_key):\n public_numbers = DHPublicNumbers(public_key, dh_object.parameter_numbers)\n private_numbers = DHPrivateNumbers(private_key, public_numbers)\n dh_object.private_key = private_numbers.private_key(default_backend())", "def test_encrypt_key(self):\n encrypted = encrypt('message', key=b'0' * 32)\n\n assert encrypted\n assert encrypted != 'message'", "def exchange_key(connection, pub_key):\r\n\r\n if main.diffe_key_exchange is False:\r\n # Get the server's public key\r\n server_pub_key_bytes = connection.recv(1024)\r\n\r\n # Send public key\r\n connection.sendall(rsa.PublicKey.save_pkcs1(pub_key))\r\n\r\n else:\r\n # Rounds of bit-shifting and XOR\r\n rounds = 64\r\n\r\n while True:\r\n\r\n # Generate 4096-bit keys (RFC 3526 Group 16)\r\n client_diffe_key = pyDHE.new(16)\r\n shared_secret = client_diffe_key.negotiate(connection)\r\n\r\n # Encrypt\r\n encrypted = int(binascii.hexlify(rsa.PublicKey.save_pkcs1(pub_key)).decode(), 16)\r\n for x in range(0, rounds):\r\n encrypted = encrypted ^ (shared_secret ** rounds)\r\n encrypted = encrypted << rounds\r\n encrypted = int(str(encrypted)[::-1])\r\n\r\n # Decrypt\r\n decrypted = encrypted\r\n decrypted = int(str(decrypted)[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n # Check if able to decrypt\r\n try:\r\n binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n client_success = True\r\n\r\n # Generate new keys upon failure and try again\r\n except UnicodeDecodeError:\r\n client_success = False\r\n pass\r\n except binascii.Error:\r\n client_success = False\r\n pass\r\n\r\n # Notify client about encryption status\r\n server_success = connection.recv(1024)\r\n if client_success is False:\r\n connection.send(b'DHE')\r\n else:\r\n connection.send(b'CONTINUE')\r\n\r\n # Get encryption status from client\r\n if client_success is False or server_success == b'DHE':\r\n pass\r\n elif server_success == b'CONTINUE':\r\n break\r\n\r\n # Hold encrypted server key\r\n server_encrypted = b''\r\n\r\n # Receive encrypted key from the server\r\n while True:\r\n data = connection.recv(8192)\r\n if data == b'ENDED':\r\n break\r\n elif data[-5:] == b'ENDED':\r\n server_encrypted += data[:-5]\r\n break\r\n server_encrypted += data\r\n\r\n # Send the encrypted key to the server\r\n connection.sendall(bytes(hex(encrypted).encode()))\r\n connection.send(b'ENDED')\r\n\r\n # Decrypt the client's public key\r\n decrypted = int(server_encrypted, 16)\r\n decrypted = int(str(int(decrypted))[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n server_pub_key_bytes = binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n\r\n server_pub_key = rsa.PublicKey.load_pkcs1(server_pub_key_bytes)\r\n # Determine max message size\r\n max_message_size = common.byte_size(server_pub_key.n) - 11\r\n\r\n # Return crypto key information\r\n return server_pub_key, server_pub_key_bytes, max_message_size", "def encrypt(self, sensor_data):\r\n \r\n # set encryption parameters\r\n encryption1 = aes(self.ivkey, 2, self.staticiv)\r\n encryption2 = aes(self.datakey, 2, self.iv)\r\n # encrypt data\r\n self.encrypted_data = encryption2.encrypt(sensor_data) \r\n self.encrypted_iv = encryption1.encrypt(self.iv)\r\n self.encrypted_nodeid = encryption2.encrypt(self.nodeid)\r\n \r\n self.iv = bytes(random.getrandbits(8) for _ in range(16)) # changes every time\r", "def __init__(self, key):\n self.key = key\n self.BLOCK_SIZE = 16", "def wrap(self, key:bytes, credential:PublicKeyCredentialSource)->bytes:\n return keywrap.aes_key_wrap_with_padding(key,credential.get_bytes(True),default_backend())", "def encryptor(iv = os.urandom(16), key = os.urandom(32), bc = backend,key_type = 'AES128',mode='CBC'):\n\tif key_type == 'AES128':\n\t\talgo = algorithms.AES(key)\n\telif key_type == 'ChaCha20':\n\t\talgo = algorithms.ChaCha20(key,nonce=os.urandom(32))\n\telse:\n\t\traise('Error algorithm ' + key_type + ' not supported!')\n\tif mode == 'CBC':\n\t\tmode = modes.CBC(iv)\n\telif mode == 'GCM':\n\t\tmode = modes.GCM(iv)\n\telse :\n\t\traise('Error mode ' + mode + ' not supported!')\n\tcipher = Cipher(algo,mode,backend = bc)\n\treturn iv,key,cipher.encryptor()", "def key_manager():\n key = DBKey(5, [], 2)\n key.receive_db_key()\n key.send_db_key()\n return key", "def test_decrypt_key(self):\n key = b'0' * 32\n\n encrypted = encrypt('message', key=key)\n assert decrypt(encrypted, key=key) == 'message'", "def load_key(self, key):\n self.key = key", "def create_crypt_key():\n\n crypt_key = Fernet.generate_key() # key is type = bytes\n\n crypt_query = 'INSERT INTO Crypt (crypt_key) VALUES (%s)'\n my_cursor.execute(crypt_query, (crypt_key,))\n pw_db.commit()", "def read_key(self, keyfile_name):\n\n with open(keyfile_name, 'rb') as f:\n self.key = f.read()\n self.cryptor = Fernet(self.key)", "def __init__(__self__, *,\n key_name: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input['DatabaseEncryptionState']] = None):\n if key_name is not None:\n pulumi.set(__self__, \"key_name\", key_name)\n if state is not None:\n pulumi.set(__self__, \"state\", state)", "def _get_encryption_key(self, **options):\n\n return self._public_key", "def __init__(self, key: bytearray):\n self.__key = key\n self.__KSA(bytearray([i for i in range(256)]))", "def key(self, key):\n self._key = key", "def key(self, key):\n self._key = key", "def md5_encrypt(self, key):\n # instantiate the md5 object in hashlib module\n md5_object = hashlib.md5()\n # encrypt the key\n md5_object.update(key)\n # return the encrypted key\n encrypted_key = md5_object.hexdigest()\n return encrypted_key", "def generate_keystream(self):", "def encrypt(cls, plaintext, aad, key, iv):", "def decrypt(self, key, dir):\n self.encrypt(key, dir)", "def key(self, key):\n\n self._key = key", "def key(self, key):\n\n self._key = key", "def __init__(self, uid, key, initial_prng):\n self.uid = uid\n self.key = key\n Crypto1.__init__(self, key, initial_prng)", "def initialize(self):\r\n if self.key_gen is None:\r\n self.key = random_string(self.key_len)\r\n else:\r\n self.key = self.key_gen()\r\n return self.key", "def __init__(self,key):\n self.block_size = 32\n self.key = hashlib.sha256(key).digest()", "def setKey(self, time, attributeIndex, hash, value, view) -> None:\n ...", "def _encrypt_aes_key(aes_key: bytes, receiver_public_key: RsaKey) -> bytes:\n cipher_rsa = PKCS1_OAEP.new(receiver_public_key)\n return cipher_rsa.encrypt(aes_key)", "def __init__(__self__, *,\n kms_encryption_config: pulumi.Input['FhirDatastoreKmsEncryptionConfigArgs']):\n pulumi.set(__self__, \"kms_encryption_config\", kms_encryption_config)", "def makeEncKey(symmetricKey, key):\n\tpt = symmetricKey\n\tiv = os.urandom(16)\n\tcipher = cryptography.hazmat.primitives.ciphers.Cipher(\n\t algorithms.AES(key), modes.CBC(iv), backend=default_backend())\n\tencryptor = cipher.encryptor()\n\tct = encryptor.update(pt) + encryptor.finalize()\n\treturn encodeCipherString(0, base64.b64encode(iv), base64.b64encode(ct),\n\t None)", "def aes_key_wrap(self, kek: bytes, key_to_wrap: bytes) -> bytes:\n return keywrap.aes_key_wrap(kek, key_to_wrap, default_backend())", "def _setup(self):\n if not os.path.isdir(self.config_path):\n os.makedirs(self.config_path)\n # Create a key file if one does not exist\n if not os.path.isfile(self.__key_file):\n with open(self.__key_file, 'wb') as f:\n f.write(Fernet.generate_key())\n # Make the file read-only\n os.chmod(self.__key_file, S_IRUSR)", "def __init__(self, key, plaintext=None, ciphertext=None):\n self.key = key\n # If plaintext is specified, generate its encrypted counterpart\n if plaintext:\n self.plaintext = plaintext\n self.ciphertext, self.iv = self.encrypt()\n # If instead cipher-text is specified, decrypt it\n elif ciphertext:\n self.ciphertext = ciphertext\n self.plaintext, self.iv = self.decrypt()\n # Otherwise declaration is invalid\n else:\n raise InvalidMessage(\"Either plaintext or cipher-text must be declared\")", "def encrypt(text,key):\r\n aes = pyaes.AESModeOfOperationCTR(key)\r\n ciphertext = aes.encrypt(text)\r\n return ciphertext", "def enable_encryption(self, output_key: bytes, input_key: bytes) -> None:\n self.chacha = chacha20.Chacha20Cipher(output_key, input_key)\n self.state.has_authenticated = True", "def __set_or_create_key_if_not_exist(self):\n\n # instantiate PKI class:\n pki = PKI(username=self.username, password=self.password)\n\n # load private key into object. key is ready to be used to sign already imported\n privkey = pki.load_priv_key()\n\n # if it is an empty list then no key created and saved on username so generate new key\n if not privkey:\n pki.generate_pub_priv_key()\n privkey = pki.load_priv_key()\n\n # set self.privkey to privkey\n self.privkey = privkey", "def encryption_key(self) -> bytearray:\n # Handle if encryption is disabled.\n if self.aes_on == 0:\n return None\n # Encryption is enabled so read the key and return it.\n key = bytearray(16)\n self._read_into(_REG_AES_KEY1, key)\n return key", "def update_key(self):\n self.__prev_key = self.__new_key", "def __init__(self):\n self.key = b'FSMF73R873YM187R'\n self.signer = AES.new(self.key, AES.MODE_EAX)\n self.verifier = AES.new(self.key, AES.MODE_EAX, nonce=self.signer.nonce)", "def __init__(self, key):\n self.bs = 16\n self.key = hashlib.sha256(key.encode()).digest()", "def set_enc_params(self, enc_params):\n self.enc_params = enc_params", "def encrypt(key, plaintext):\n data = fk(keyGen(key)[0], ip(plaintext))\n return fp(fk(keyGen(key)[1], swapNibbles(data)))", "def test_set_private_key_setter(self) -> None:\n\n expected = self.pem_private_key.decode()\n\n encryptor = DataEncryption()\n encryptor.set_private_key(self.pem_private_key)\n\n # pylint: disable=protected-access\n actual = encryptor._loaded_private_key.private_bytes(\n serialization.Encoding.PEM,\n serialization.PrivateFormat.PKCS8,\n serialization.NoEncryption(),\n ).decode()\n\n self.assertEqual(expected, actual)", "def test_decrypt_key_default(self, settings):\n settings.CHITON_ENCRYPTION_KEY = b'0' * 32\n\n encrypted = encrypt('message')\n assert decrypt(encrypted) == 'message'\n\n settings.CHITON_ENCRYPTION_KEY = b'1' * 32\n with pytest.raises(EncryptionError):\n decrypt(encrypted)", "def _newKey(self, key):\n pass", "def generate_keys(self, password):\n\n # TODO: Store keys encrypted\n rsa1 = RsaPrivateKey.Generate()\n self.sign_private = str(rsa1)\n self.sign_public = str(rsa1.public_key)\n\n rsa2 = RsaPrivateKey.Generate()\n self.crypt_private = str(rsa2)\n self.crypt_public = str(rsa2.public_key)", "def generate_key():\n key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(key)", "def generate_key():\n key = Fernet.generate_key()\n with open(\"pass.key\", \"wb\") as key_file:\n key_file.write(key)", "def save_symmetric_key(self, key, user):\n self.temp_passphrase = key\n self.send_request(user, self.KM_TEMP_KEY_ACK)", "def generate_key():\n key = ''.join([chr(random.randint(0, 0x10)) for _ in range(block_size)])\n return AES.new(second_key, AES.MODE_ECB).encrypt(pad((key.encode('ascii')), block_size))", "def key_id(self, key_id):\n\n self._key_id = key_id", "def encrypt(self, input, key, iv):\n pass", "def do_server(wrapping_key_public):\n secret = os.urandom(32)\n logging.info(f'secret: {hexlify(secret)}')\n\n ref_path = 'server-secret-for-reference.bin'\n logging.debug(f'creating {ref_path}')\n with open(ref_path, 'wb') as f:\n f.write(secret)\n\n # generate IV\n iv = os.urandom(12)\n logging.debug(f'iv: {hexlify(iv)}')\n\n # generate 256-bit AES encryption key\n ephemeral_key = os.urandom(32)\n logging.debug(f'ephemeral_key: {hexlify(ephemeral_key)}')\n\n # xor_mask = os.urandom(32)\n xor_mask = b'\\x00' * 32\n logging.debug(f'xor_mask: {hexlify(xor_mask)}')\n\n # xor with mask to get transportKey\n transport_key = bytes([ephemeral_key[i] ^ xor_mask[i] for i in range(32)])\n logging.debug(f'transport_key: {hexlify(transport_key)}')\n\n logging.debug(f'wrapping the transport key with the public RSA wrapping key')\n encrypted_transport_key = wrap(wrapping_key_public, transport_key)\n\n logging.debug(f'encrypting the secure secret with the AES ephermeral key')\n encrypted_secret, tag = encrypt(ephemeral_key, iv, secret)\n\n logging.debug(f'encrypted_secret: {hexlify(encrypted_secret)}')\n logging.debug(f'tag: {hexlify(tag)}')\n\n authorizationList = AuthorizationList()\n\n key_description = KeyDescription()\n key_description['keyFormat'] = KM_KEY_FORMAT_RAW\n key_description['keyParams'] = authorizationList\n\n secure_key_wrapper = SecureKeyWrapper()\n secure_key_wrapper['version'] = 0\n secure_key_wrapper['encryptedTransportKey'] = encrypted_transport_key\n secure_key_wrapper['initializationVector'] = iv\n secure_key_wrapper['keyDescription'] = key_description\n secure_key_wrapper['encryptedKey'] = encrypted_secret\n secure_key_wrapper['tag'] = tag\n\n encoded_secure_key_wrapper = encode_secure_key_wrapper(secure_key_wrapper)\n\n return encoded_secure_key_wrapper, xor_mask" ]
[ "0.75822407", "0.7209422", "0.7106368", "0.66078645", "0.6590576", "0.6399073", "0.6381233", "0.63626736", "0.63152766", "0.62950706", "0.6283543", "0.62832546", "0.62813187", "0.6264032", "0.623297", "0.61464673", "0.610903", "0.61076546", "0.60681397", "0.6064058", "0.60416406", "0.6002442", "0.59940374", "0.5961371", "0.593941", "0.5939405", "0.5906055", "0.59050673", "0.5899507", "0.58850044", "0.58439815", "0.58418167", "0.5839512", "0.58331114", "0.5832023", "0.5830414", "0.58245903", "0.5821543", "0.5820159", "0.5818704", "0.58117974", "0.5808444", "0.5795383", "0.57930756", "0.57887226", "0.5787241", "0.57804555", "0.5780164", "0.5779056", "0.5777305", "0.5776172", "0.5757466", "0.5735699", "0.5728705", "0.57279104", "0.5722764", "0.57224214", "0.57205546", "0.57152337", "0.5707277", "0.57013375", "0.56992173", "0.5692241", "0.5692241", "0.5686709", "0.56824005", "0.5681127", "0.56780994", "0.5677386", "0.5677386", "0.56687725", "0.5661822", "0.5660609", "0.565998", "0.56422514", "0.5639283", "0.56364346", "0.563292", "0.5631346", "0.56212115", "0.56135213", "0.56132436", "0.5604604", "0.5603651", "0.5600473", "0.55924594", "0.55791396", "0.55764806", "0.5564148", "0.5564073", "0.55638486", "0.5555253", "0.55540794", "0.55460864", "0.55346596", "0.5534001", "0.55273616", "0.55175465", "0.55050683", "0.5504296" ]
0.612742
16
Unset key from the encryptor and decryptor
def unset_cipher(self, key_name=None): if key_name is None: if self.key_name is not None: message_key_types.unset_cipher(self.key_name) if self.pending_key_name is not None: message_key_types.unset_cipher(self.pending_key_name) else: message_key_types.unset_cipher(key_name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def del_key(self):\n # Deleting the values from the self.key and self.cryptor attributes.\n self.key=None\n self.cryptor=None", "def clear_key(self, key):\r\n return self.handler.clear_key(key_to_code(key))", "def tearDown(self) -> None:\n\n del self.private_key\n del self.pem_private_key\n del self.pem_public_key\n del self.encrypted_pem_private_key", "def _delKey(self, key):\n pass", "def discard(self, key: KT) -> None:\n discard(self, key)", "def test_decrypt_key(self):\n key = b'0' * 32\n\n encrypted = encrypt('message', key=key)\n assert decrypt(encrypted, key=key) == 'message'", "def delkey(confirm, pub):\n stm = shared_morphene_instance()\n if mph.rpc is not None:\n mph.rpc.rpcconnect()\n if not unlock_wallet(stm):\n return\n mph.wallet.removePrivateKeyFromPublicKey(pub)\n set_shared_morphene_instance(stm)", "def decrypt(self, key):\n super(MACDataUplinkMessage, self).decrypt(key, dir=0)", "def decipher(self):\n plaintext = \"\"\n for ct, key_char in zip(self.text, self.key):\n char_index = self.char_block.rows[key_char].index(ct)\n plaintext += self.char_block.alphabet[char_index]\n print(plaintext)", "def unlink(self):\r\n try:\r\n deleteSenderPublicKey(self)\r\n del self._privateKey\r\n del self._secondPrivateKey\r\n except Exception:\r\n pass", "def deleteKey(self, key):\n key.delete()", "def eliminate_key (self,key):\r\n\r\n if self.using_shelf:\r\n\r\n del self.key_dict[str(key)]", "def unfunc(ciphertext, key):\n plaintext = xor(ciphertext, key)\n return plaintext", "def removeAllKeys(self) -> None:\n ...", "def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None", "def tearDown(self):\n if self.keypair_creator:\n self.keypair_creator.clean()\n\n try:\n os.remove(pub_file_path)\n except:\n pass\n\n try:\n os.remove(priv_file_path)\n except:\n pass", "def _disable_encryption(self):\n # () -> None\n self.encrypt = self._disabled_encrypt\n self.decrypt = self._disabled_decrypt", "def remove_key(attr):\n pm.cutKey(attr, clear=True, time=pm.currentTime())", "def unset(self, key: str) -> Any:\n return self.client.delete(self._url(key))", "def decipher(s, key): # s = message\n return decipher_raw(s, key).rstrip(bytes('\\x00'.encode('utf-8')))", "def test_rekey(self):\n old_key = b'0' * 32\n new_key = b'1' * 32\n\n old_encrypted = encrypt('message', key=old_key)\n new_encrypted = rekey(old_encrypted, old_key=old_key, new_key=new_key)\n\n assert decrypt(new_encrypted, key=new_key) == 'message'", "def test_rekey_defaults(self, settings):\n old_key = b'0' * 32\n new_key = b'1' * 32\n\n settings.CHITON_ENCRYPTION_KEY = new_key\n settings.CHITON_PREVIOUS_ENCRYPTION_KEY = old_key\n\n encrypted = encrypt('message', key=old_key)\n rekeyed = rekey(encrypted)\n\n assert decrypt(rekeyed) == 'message'", "def do_ios_decryption(self):\r\n try:\r\n self.aes_decryption_key = self.extract_aes_key()\r\n except DecryptionKeyInvalidError:\r\n self.aes_decryption_key = self.get_backup_encryption_key()\r\n self.used_ios_decryption_key_cache = True\r\n \r\n self.decrypt_device_file()\r\n # join is optimized and does not cause O(n^2) total memory copies.\r\n self.decrypted_file = b\"\\n\".join(self.good_lines)", "def remove_key(self,key):\n public_key = key\n try: public_key = key.public_key()\n except: pass\n\n serialized = public_key.public_bytes(\n encoding = serialization.Encoding .OpenSSH,\n format = serialization.PublicFormat.OpenSSH)\n\n blob = serialized.split(None,2)[1]\n data = b64decode(blob)\n\n message = WriteMessage()\n message.write_uint8(constants.request.SSH_AGENTC_REMOVE_IDENTITY)\n message.write_binary(data)\n self.connection.send_message(message.data)\n self._await_operation_result()", "def reset(self):\n self.enc_len = None\n self.precomputed_enc_h = None\n self.mask = None\n self.prev_attn = None", "def __del__(self):\n if self.key_buffer:\n del self.key_buffer\n if self.val_buffer:\n del self.val_buffer\n castle_disconnect(self.conn)\n pycastle_log.info(str(self)+\" Destroyed connection\")", "def remove_key(self, key):\n del self.data[key]\n self.save_data()", "def delete(self, key):", "def dec(self, key):\n if key not in self.key_dict:\n return\n self.decrease(key)", "def remove(self, key):", "def _delete_key(self):\n return self.connection.delete(self.key)", "def decrypt(self, key, dir):\n self.encrypt(key, dir)", "def delete(self, key):\n pass", "def delete(self, key):\n pass", "def clear(self):\n self._check_private_key(\"clear data\")\n headers = {'Phant-Private-Key': self.privateKey}\n self._delete(self.inputUrl(''), headers=headers)", "def clear(self, key):\n os.remove(self._get_path(key))\n self.keys.remove(key)", "def decrypt(self, data):", "def remove(enforcer_dict, key):\n del enforcer_dict['f']\n assert other.keystring == 'abcde'\n assert other.valuesum == 15\n\n enforcer_dict['a'] = 2\n assert other.keystring == 'bcdea'\n assert other.valuesum == 16\n\n enforcer_dict.clear()\n assert other.keystring == ''\n assert other.valuesum == 0", "def aes_key_unwrap(self, kek: bytes, wrapped_key: bytes) -> bytes:\n return keywrap.aes_key_unwrap(kek, wrapped_key, default_backend())", "def remove(self, key):\n pass", "def test_decrypt_key_default(self, settings):\n settings.CHITON_ENCRYPTION_KEY = b'0' * 32\n\n encrypted = encrypt('message')\n assert decrypt(encrypted) == 'message'\n\n settings.CHITON_ENCRYPTION_KEY = b'1' * 32\n with pytest.raises(EncryptionError):\n decrypt(encrypted)", "def shutdown(self):\n auth.debug(\"DICEKey shutdown called\")\n super().shutdown()\n AuthenticatorCryptoProvider.shutdown_providers()", "def deleteKey(self):\n\n self.key_del_response = self.ec2.delete_key_pair(KeyName=self.key)", "def decrypt(text,key):\r\n aes = pyaes.AESModeOfOperationCTR(key)\r\n decrypted = aes.decrypt(text)\r\n return decrypted", "def _delete_key(self):\n return self.connection.hdel(self.key, self.name)", "def _decrypt(self, data, key):\n seed1 = key\n seed2 = 0xEEEEEEEE\n result = BytesIO()\n\n for i in range(len(data) // 4):\n seed2 += self.encryption_table[0x400 + (seed1 & 0xFF)]\n seed2 &= 0xFFFFFFFF\n value = struct.unpack(\"<I\", data[i*4:i*4+4])[0]\n value = (value ^ (seed1 + seed2)) & 0xFFFFFFFF\n\n seed1 = ((~seed1 << 0x15) + 0x11111111) | (seed1 >> 0x0B)\n seed1 &= 0xFFFFFFFF\n seed2 = value + seed2 + (seed2 << 5) + 3 & 0xFFFFFFFF\n\n result.write(struct.pack(\"<I\", value))\n\n return result.getvalue()", "def decipher2(s, key): # s = message\n return decipher_raw2(s, key).rstrip(bytes('\\x00'.encode('utf-8')))", "def decryptEncryptionKey(cipherString, key):\n\tencryptionType, iv, cipherText, mac = decodeCipherString(cipherString)\n\t# log.debug(\"mac:%s\", mac)\n\t# log.debug(\"iv:%s\", iv)\n\t# log.debug(\"ct:%s\", cipherText)\n\tassert mac is None\n\tif encryptionType != 0:\n\t\traise UnimplementedError(\"can not decrypt type:%s\" % encryptionType)\n\tcipher = cryptography.hazmat.primitives.ciphers.Cipher(\n\t algorithms.AES(key), modes.CBC(iv), backend=default_backend())\n\tdecryptor = cipher.decryptor()\n\tplainText = decryptor.update(cipherText) + decryptor.finalize()\n\t# log.debug(\"mackey before unpad:%s\", plainText[32:])\n\treturn plainText[:32], plainText[32:64]", "def delete(self, key, key_type=None):\n pass", "def decrypt(ciphertext: str, key: str) -> str:\n return encrypt(ciphertext, key)", "def delKey(self, key ):\n if key in self.conf:\n del self.conf[key]", "def clean(self):\n if self.__keypair:\n try:\n nova_utils.delete_keypair(self._nova, self.__keypair)\n except NotFound:\n pass\n self.__keypair = None\n\n if self.__delete_keys_on_clean:\n if (self.keypair_settings.public_filepath and\n file_utils.file_exists(\n self.keypair_settings.public_filepath)):\n expanded_path = os.path.expanduser(\n self.keypair_settings.public_filepath)\n os.chmod(expanded_path, 0o755)\n os.remove(expanded_path)\n logger.info('Deleted public key file [%s]', expanded_path)\n if (self.keypair_settings.private_filepath and\n file_utils.file_exists(\n self.keypair_settings.private_filepath)):\n expanded_path = os.path.expanduser(\n self.keypair_settings.private_filepath)\n os.chmod(expanded_path, 0o755)\n os.remove(expanded_path)\n logger.info('Deleted private key file [%s]', expanded_path)\n\n super(self.__class__, self).clean()", "def __delitem__(self, key_mac):\n self.ingress_tbl.pop(key_mac, None)\n rsw = self.rootsw_tbl.pop(key_mac, None)\n if rsw:\n rsw.leaf_macs.discard(key_mac)", "def unset(self, keys=None):\n if not keys:\n keys = self._trans_dict.keys()\n for key in keys:\n key = key.upper()\n self._trans_dict[key] = key", "def decryptor(iv = os.urandom(16), key = os.urandom(32), bc = backend):\n\tcipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend = bc)\n\treturn iv, key, cipher.decryptor()", "def clearKeys(self):\n for attr in self._filter():\n pm.cutKey(attr)", "def test_decrypt_key_incorrect(self):\n right_key = b'0' * 32\n wrong_key = b'1' * 32\n\n encrypted = encrypt('message', key=right_key)\n\n with pytest.raises(EncryptionError):\n decrypt(encrypted, key=wrong_key)", "def operate_cipher(self):", "def __delitem__(self, key):\n path = self.pwfiles.pop(key)\n filename = os.path.join(path, key) + '.pw'\n os.unlink(filename)", "def delete_key(self, key):\n self.dest.delete(key)", "def remove(self, key):\n h = key%self.m\n a = self.a\n if a[h]:\n a[h] = None", "def erase(self):\n pass", "def unsetResourceApplicationDataEntry(self, authenticationToken, guid, key):\r\n pass", "def test_07_erase(self, mock_shred, mock_unlink,\n mock_config, mock_verks):\n self._init()\n udocker.Config = mock_config\n udocker.Config.tmpdir = \"/tmp\"\n kstore = udocker.KeyStore(\"filename\")\n self.assertTrue(kstore.erase())\n mock_unlink.assert_called_once_with(\"filename\")", "def reset(self):\n self.keyToFile=dict()", "def xor_decrypt(ciphertext, key):\n\n\tdecrypted_char = ''\n\tdecrypted_str = ''\n\n\tfor char in ciphertext:\n\t\tdecrypted_char = chr(char ^ key)\n\t\tdecrypted_str += decrypted_char\n\n\treturn decrypted_str", "def __delitem__(self, key):\n del self._get_storage()[key]", "def setup_key_decrypt(self):\r\n\t\tself.max_key = math.floor(len(self.message) / 2)\r\n\t\twhile True:\r\n\t\t\tkey = input(\"Please enter the key that was used to encrypt your message.--> \")\r\n\t\t\ttry:\r\n\t\t\t\tself.key = int(key)\r\n\t\t\texcept ValueError:\r\n\t\t\t\tprint(\"Key needs to be a number.\")\r\n\t\t\t\tcontinue\r\n\t\t\tif self.key > self.max_key: \t\t\t\r\n\t\t\t\tprint(f\"{key} is too big of a number.\")\r\n\t\t\telif self.key == 0:\r\n\t\t\t\tprint(\"0 cannot be a key.\")\t\t\t\t\r\n\t\t\telse:\t\t\t\r\n\t\t\t\tbreak", "def removeKeySignature(self):\n\n for measure in self.measures:\n measure.removeKeySignature()", "def clear(self):\n self.__redis.delete(self.key)", "def __delitem__(self, key):\n del self._ctx[key]", "def decrypt(key, ciphertext):\n data = fk(keyGen(key)[1], ip(ciphertext))\n return fp(fk(keyGen(key)[0], swapNibbles(data)))", "def test_encrypt_no_key_id(self):\n encryptor = self.test_init()\n encryptor.key_id = None\n\n with self.assertRaises(IceItException):\n encryptor.encrypt('blah', 'blah-again')", "def delete(self, key):\n return None", "def _remove_swarm_keys(self):\n for key in SWARM_PROPERTIES:\n self.spec.pop(key, None)", "def clear(self):\r\n for key in self.conn.keys():\r\n self.conn.delete(key)", "def decrypt():\n plaintext = \"\"\n i = 0\n while i < len(ciphertext):\n if i%2==1:\n try:\n plaintext += key[ ciphertext[i-1]+ciphertext[i] ]\n except KeyError:\n plaintext += ciphertext[i-1]+ciphertext[i]\n i += 1\n return plaintext", "def discard_key_from_tag(self,tag,key):\r\n\r\n # with shelf\r\n if self.using_shelf:\r\n\r\n self.tag_dict[tag].discard(key)\r\n\r\n\r\n #with database\r\n if self.using_database:\r\n value_tuple = (notebookname,tag,key,)\r\n db_cursor.execute(\"DELETE FROM tags_to_keys\"\r\n +\" WHERE notebook=? AND tag=?\"\r\n +\" AND keyword=?;\",\r\n value_tuple)", "def __delitem__(self, key):\n self.deleteAttributes([key])", "def decrypt(self, ciphertext, key):\n iv = ciphertext[:AES.block_size]\n cipher = AES.new(key, AES.MODE_CBC, iv, segment_size=64)\n plaintext = cipher.decrypt(ciphertext[AES.block_size:])\n return self.pkcs7_unpad(plaintext)", "def decrypt(key, cipher, plaintext):\n\n rsa = Rsa()\n\n try:\n k = TomlKeyFormatter().from_string(key.read())\n\n c = cipher.read()\n p = rsa.decrypt(c, k)\n\n plaintext.write(p)\n\n except KeyFormatError:\n click.echo(\"ERROR: Key is in bad format\")\n\n except DecryptError:\n click.echo(\"ERROR: Key is wrong or message was badly padded before encryption\")", "def switch_off_key(self, key):\n if key not in self.switched_off_keys:\n self._switched_off_keys.append(key)\n self._config[\"# \"+key] = self._config.pop(key)", "def test_rekey_non_encrypted(self):\n with pytest.raises(EncryptionError):\n rekey('message', old_key=b'0' * 32, new_key=b'1' * 32)", "def decrypt_key(data, key):\n data = MegaCrypto.base64_decode(data)\n return sum((MegaCrypto.str_to_a32(MegaCrypto.cbc_decrypt(data[_i:_i + 16], key))\n for _i in range(0, len(data), 16)), ())", "def do_android_decryption(self):\r\n self.aes_decryption_key = self.extract_aes_key()\r\n self.decrypt_device_file()\r\n # join is optimized and does not cause O(n^2) total memory copies.\r\n self.decrypted_file = b\"\\n\".join(self.good_lines)", "def __delitem__(self, key):\n self.delete(key)", "def __delitem__(self, key):\n self.delete(key)", "def clear(self):\n for key in self.keys():\n del self[key]", "def erase(self):\n self._verify_keystore()\n try:\n self._shred()\n os.unlink(self.keystore_file)\n except (IOError, OSError):\n return 1\n return 0", "def dec(self, key: str) -> None:\n if key in self.keyCnt:\n cnt = self.keyCnt[key]\n if cnt == 1:\n self.keyCnt.pop(key)\n self.removeFromNode(self.cntKey[cnt], key)\n else:\n self.changeKey(key, -1)", "async def remove_key(request: web.Request) -> web.Response:\n requested_hash = request.match_info['key_uuid']\n deleted_file = wifi.remove_key(requested_hash)\n if not deleted_file:\n return web.json_response(\n {'message': f\"No such key file {requested_hash}\"}, status=404)\n return web.json_response(\n {'message': f'Key file {deleted_file} deleted'},\n status=200)", "def passwd_decryption(self):\n with open(self.key_path, 'rb') as input_key:\n for line in input_key:\n key = line\n with open(self.pass_path, 'rb') as input_password:\n for line in input_password:\n password = line\n cipher_suit = Fernet(key)\n plain_password = cipher_suit.decrypt(password)\n plain_password = bytes(plain_password).decode('utf-8')\n \n return plain_password", "def removeKey(self, timeOrHash) -> None:\n ...", "def __delitem__(self, key):\n self.f_remove(key)", "def off(key):\n # print(\"{0} released\".format(key), time.perf_counter())\n\n global keys, esc_count\n\n # caps, shift, etc. aren't automatically registered as strings\n if type(key) == Key:\n keys[esc_count].append((str(key), time.perf_counter(), \"released\"))\n else:\n keys[esc_count].append((key, time.perf_counter(), \"released\"))", "def delSit(self, key):\n if hasattr(key, \"encode\"):\n key = key.encode(\"utf-8\") # convert str to bytes\n return self.delVal(self.sits, key)", "def decrypt(self, key, value):\n key = hashlib.sha256(key).digest()[:self.BLOCK_SIZE]\n iv = value[:16]\n crypted = value[16:]\n cipher = AES.new(key,AES.MODE_CBC,iv)\n return self.pkcs5_unpad(cipher.decrypt(crypted))", "def decrypt(self, input, key, iv) :\n pass", "def generate_key(self):\n\n self.key = Fernet.generate_key()\n self.cryptor = Fernet(self.key)", "def delete(self, key: str) -> None:\n self.db.rem(key)\n self.db.dump()" ]
[ "0.79658395", "0.6650825", "0.6479646", "0.64384425", "0.62364286", "0.6223678", "0.62217623", "0.61869067", "0.61497605", "0.6120932", "0.6093871", "0.60819805", "0.60426354", "0.6033569", "0.6002724", "0.5995064", "0.5992837", "0.5932019", "0.59261566", "0.5922496", "0.58950895", "0.5892319", "0.584982", "0.58491504", "0.58487475", "0.58323115", "0.5752463", "0.5746494", "0.57420474", "0.5729714", "0.57252747", "0.5709742", "0.5708405", "0.5708405", "0.5704654", "0.5702179", "0.5697766", "0.56975704", "0.5664121", "0.56429416", "0.56404644", "0.56357443", "0.5627605", "0.56066626", "0.5604074", "0.56019837", "0.55854154", "0.5578745", "0.5572584", "0.5572097", "0.5568681", "0.5556726", "0.5555866", "0.5549696", "0.5549332", "0.5539143", "0.5533889", "0.55303353", "0.5529377", "0.5524828", "0.5516201", "0.550988", "0.5508099", "0.5493857", "0.5493083", "0.5488534", "0.5484292", "0.54725736", "0.54655874", "0.54622006", "0.54574126", "0.54510593", "0.5450208", "0.54477775", "0.54475987", "0.5443794", "0.5442096", "0.5437599", "0.54358524", "0.54351544", "0.5422568", "0.5421892", "0.5418807", "0.54143065", "0.5412724", "0.54030544", "0.54030544", "0.54023856", "0.5389829", "0.53895533", "0.53870445", "0.53860074", "0.5380401", "0.5378884", "0.5362498", "0.53623885", "0.5361832", "0.5353315", "0.5351372", "0.5344802" ]
0.6914403
1
Set timer for key refreshment
def set_invoke_timer(self, timeout, retry_entry=False): if self.timer_entry is not None and self.timer_entry.active: self.timer_entry.deactivate() #print("(%d) set_invoke_timer:" % int(time.time()), timeout) self.timer_entry = query_management.QueryEntry(expire_after=timeout, callback_expire=self._perform_key_exchange, retry_count=0) if retry_entry: self.timer_entry.data[KeyType.retry_timer] = True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_time(cls, key):\n key.put()", "def __updateElapsedTime(self):\n if self._keyCodeTime != 0.0 and \\\n (globalClock.getFrameTime() - self._keyCodeTime) >= self._timeout:\n self.notify.debug(\"Key code timed out. Resetting...\")\n self.reset()\n messenger.send(KeyCodes.CLEAR_CODE_EVENT)\n self._keyCodeTime = globalClock.getFrameTime()", "def tic(self, key):\n self.starts[key] = time()", "def timer_handler():\r\n \r\n global elapsed_time\r\n elapsed_time += 1", "def start_timer(self):\n print \"Timer Object Started. Will update ADC Information every %s seconds\" % self.refreshTime\n self.timer=Timer(float(self.refreshTime)*1000, self._refresh_Visible_channels)", "def reset_timer():\n resetTimer = time.time()\n target_time.clear()\n target_time.append(resetTimer)", "def start_timer(self):\n self.start_time = time.time()", "def timer_change(self):\n if self.time < 999:\n self.time += 1\n self.time_lcd.display(self.time)\n else:\n self.timer.stop()", "def _set_delete_timer(self, key_name, timeout):\n if key_name is not None:\n #print(\"(%d) _set_delete_timer:\" % int(time.time()), key_name.hex()[:10], timeout)\n query_management.QueryEntry(expire_after=timeout, callback_expire=remove_old_key,\n data={KeyType.hint: key_name}, retry_count=0)", "def set_timer(self, update: Update, context: CallbackContext) -> None:\n chat_id = update.message.chat_id\n try:\n due = int(context.user_data[\"duration\"])\n if due < 0:\n # update.message.reply_text('Sorry we can not go back to future!')\n return\n\n job_removed = self.remove_job_if_exists(str(chat_id), context)\n context.job_queue.run_once(\n self.command_stop_irrigation, due, context=chat_id, name=str(chat_id)\n )\n\n # text = 'Timer successfully set!'\n # if job_removed:\n # text += ' Old one was removed.'\n # update.message.reply_text(text)\n\n except (IndexError, ValueError):\n # update.message.reply_text('Usage: /set <seconds>')\n update.message.reply_text(\"Erro ao agendar o desligamento da irrigação 😞\")", "def StartTimer(self):\n self._start_time = time.time()", "def timer_setup(self):\n pass", "def update_timer(self):\r\n frmt_time = \"%d:%02d\" % (self.time_minutes, self.time_seconds)\r\n self.time_seconds += 1\r\n if self.time_seconds == 60:\r\n self.time_seconds = 0\r\n self.time_minutes += 1\r\n\r\n self.mainWidget.statusLabel.setText(\"{} {} --- {} {} --- {}\".format(self.elapsedTimeString,\r\n frmt_time,\r\n self.freeSpaceString,\r\n get_free_space(self.config.videodir),\r\n self.recordingString))", "def start_timer(self):\n self.start_time = datetime.now()", "async def on_timer_update(self, secs: int):\n pass", "def reset_timer(self):\r\n self.time_minutes = 0\r\n self.time_seconds = 0", "def _refreshKey(self, displayKey):\n refreshRect = Rect(*displayKey.scaled)\n refreshRect.Inflate(2, 2)\n self.RefreshRect(refreshRect.Get())", "def add(self, key):\n self.times[key] = time.time()", "async def _timein_refresh(self):\n\t\t\n\t\tawait self.refresh_cache()", "def synictimer(self, synictimer):\n\n self._synictimer = synictimer", "def clean_timer(sc):\n global prev_dict_\n # Cleaning the previous dictionary after 5 hours\n prev_dict_ = {}\n z.enter(18000, 1, clean_timer, (sc,))", "def on_key(event):\n if event.key == 'right':\n# print( 'on_key ', sld['time'].val+1)\n sld['time'].set_val( min( sld['time'].val+1, sld['time'].valmax ) )\n if event.key == 'left':\n# print( 'on_key ', sld['time'].val-1)\n sld['time'].set_val( max( sld['time'].val-1, sld['time'].valmin ) )\n if event.key == 'up':\n sld['freq'].set_val( min(sld['freq'].val + scale_freq, sld['freq'].valmax) )\n if event.key == 'down':\n sld['freq'].set_val( max(sld['freq'].val - scale_freq, sld['freq'].valmin) )", "def timer(self):\n if self.board.reset_timer:\n self.time_count.set(\"000\")\n self.time_widget.config(text=self.time_count.get())\n return 0\n elif self.board.stop_timer:\n return 0\n now_time = time.time()\n self.board.time = now_time - self.board.last_frame_time\n self.time_count.set(self.numToStrLabel(int(self.board.time)))\n self.time_widget.config(text=self.time_count.get())\n self.board.time = self.board.last_frame_time\n self.after(50, self.timer)", "def toc(self, key):\n self.ends[key] = time()-self.starts[key]", "def timerAction():\n timer = threading.Timer(30.0, timerAction)\n timer.daemon = True\n timer.start()\n save()", "def tick(self):\n self.times.append(timeit.default_timer())", "def _addTiming(self, key, duration):\n pass", "def tick(self):\r\n new_time = time.strftime('%H:%M:%S')\r\n if new_time != self.time:\r\n self.time = new_time\r\n self.config(text=self.time)\r\n self.after(200, self.tick)", "def renewKey():\n while True:\n try:\n sleep(RENEW_KEY)\n mutex.acquire()\n key_dict.clear()\n mutex.release()\n except:\n print(\"error in renew key\")\n finally:\n if mutex.locked():\n mutex.release()", "def addKey(self, time, value) -> None:\n ...", "def update_time(self):\n pass # Do nothing", "def timer(self):\n self.time_remaining -= 1\n if self.time_remaining > 0:\n Timer(1, self.timer).start()", "def set_lock_time():\n\n pass", "def time_automation_listener(now):\n action()", "def timer_update(self):\n if self.mineboard.gamestate is not None:\n return\n time_so_far = round(time.time()-self.start_time)\n if time_so_far == 1:\n self.now.set(f\"Time so far: {time_so_far} second\")\n else:\n self.now.set(f\"Time so far: {time_so_far} seconds\")\n self.after(1000, self.timer_update) # calls this function every second", "def autonomousInit(self):\n #self.timer.reset()\n #self.timer.start()\n pass", "def recordStart(self, event_key):\n self.start_times[event_key] = time.time()", "def timer_callback(self):\n msg = String()\n msg.data = ' '.join(str(k) for k in self.keys_set)\n self.keys_pub.publish(msg)", "def refresh(self, key: str):\n self._registry[key] = time.time()\n # Also update the whole thing\n return self.get_registry()", "def _ontimer(self, fun, t):\n if t == 0:\n self.cv.after_idle(fun)\n else:\n self.cv.after(t, fun)", "def start_handler():\r\n timer.start()", "def Start():\n timer.start()", "def ontimer(self, fun, t=0):\n self._ontimer(fun, t)", "def update(self, func):\n if self.current_time == 0:\n func()\n return\n self.current_time -= 1\n hours = self.current_time // 3600\n minutes = self.current_time % 3600 // 60\n seconds = self.current_time % 60\n try:\n self.timer_label.setText('%02d:%02d:%02d' % (hours, minutes, seconds))\n if self.current_time <= 10:\n self.timer_label.setStyleSheet('color: red')\n Qt.QTimer().singleShot(1000, lambda: self.update(func))\n except RuntimeError:\n return", "def reset_timer():\r\n window.after_cancel(timer)\r\n canvas.itemconfig(timer_text, text=f\"00:00\")\r\n pomodoro_title.config(text=\"Timer\", fg=GREEN)\r\n check_marks.config(text=\"\")", "def tick(self):\r\n if self.display_seconds:\r\n new_time = time.strftime('%H:%M:%S')\r\n else:\r\n new_time = time.strftime('%I:%M %p').lstrip('0')\r\n if new_time != self.time:\r\n self.time = new_time\r\n self.display_time = self.time\r\n self.config(text=self.display_time)\r\n self.after(200, self.tick)", "def reset_timer(self, *_) -> \"ALL\":\n self.last = time.time()\n delta = time.time() - self.last\n if delta > 180:\n print(\n \"!!! Warning: Watchdog failure detected, spawning a fallback \"\n \"thread.\"\n )\n self.watchdog = FallbackWatchdog(self)\n self.watchdog.start()", "def update_timeval(self):\n self.timeval = self.get_timeval()", "def start(self):\n self.timer.start(500)", "def valkkafsmanager_set_time_cb(self, t):\n self.signals.set_time.emit(t)", "def settimeout(self,timeout=10):\r\n # Update\r\n self.timeout = timeout", "def set_timer(bot, update, args, job_queue, chat_data):\n chat_id = update.message.chat_id\n try:\n # args[0] should contain the time for the timer in seconds\n due = int(args[0])\n if due < 0:\n update.message.reply_text('Sinto muito, mas ainda nao podemos viajar no tempo')\n return\n\n # Add job to queue\n job = job_queue.run_repeating(alarm, due, context=chat_id)\n chat_data['job'] = job\n check = emojize(\":white_check_mark:\", use_aliases=True)\n update.message.reply_text('Intervalo das notificacoes setado com sucesso '+check+'')\n\n except (IndexError, ValueError):\n update.message.reply_text('Exemplo: /set <seconds>')", "def _startbuttontimers(self):\n changetimes = {}\n for b in self._buttons:\n if not b._times:\n continue\n t0, t1 = b._times\n changetimes[t0] = changetimes[t1] = 1\n for t in changetimes.keys():\n mw_globals.toplevel.settimer(t, (self._window._buttonschanged, ()))", "def _mark_timer_complete(crawl_id, engine_redis):\n config_file = engine_redis.get(crawl_id)\n config = json.loads(config_file)\n start_time = config['time']\n stop_time = time.time()\n config['time'] = stop_time - start_time\n config_json = json.dumps(config)\n engine_redis.set(crawl_id, config_json)\n engine_redis.expire(crawl_id, 60*60)", "def record(self, key):\n return TimerCtxManager(timer=self, key=key)", "async def _init_timed_events(self, client: Bot):\n\n await self.bot.wait_until_ready() # Wait for the bot to launch first\n client.secs = 0\n\n secs = 0 # Count number secs\n while True:\n client.dispatch(\"timer_update\", secs)\n secs += 1\n client.secs = secs\n await sleep(1)", "def setTime(self, *args):\n return _osgAnimation.Keyframe_setTime(self, *args)", "def set_refresh_rate(self, refresh_rate : int) -> None:\n if self.stdscr:\n self.stdscr.timeout(int(1000 / refresh_rate))", "def pytest_timeout_set_timer(item, settings):", "def tick(self):\n if self.display_seconds:\n new_time = time.strftime('%I:%M:%S %p')\n else:\n new_time = time.strftime('%I:%M:%S %p').lstrip('0')\n if new_time != self.time:\n self.time = new_time\n self.display_time = self.time\n self.config(text=self.display_time)\n self.after(200, self.tick)", "def update_rec_timer(self, time_s):\n self._stop_section.ids.rec_time_lbl.text = format_time_str(int(round(time_s)))", "def update(self, surface, keys, current_time, dt, scale):\n self.anykey.update(current_time)\n self.draw(surface)", "def start_clock(self):\n pass", "def on_refreshTime(self, control):\n\n self.txtFecha.set_text(str(self.localTime))", "def register_timers(self, key, timers):\n if key in self.registry:\n [timer.stop() for timer in self.registry[key]]\n self.registry[key] = timers", "def on_timer(self):\n self.read_serial_data()\n # self.update_monitor()", "def setitimer(which, seconds, interval=None): # real signature unknown; restored from __doc__\n pass", "async def set_timer(self, ctx: commands.Context, seconds: int = None):\n if seconds is not None:\n await ctx.cfg_guild.autopostseconds.set(seconds)\n await ctx.send(\"Auto-post timer has been set to {}\".format(seconds))\n else:\n seconds = await ctx.cfg_guild.autopostseconds()\n await ctx.send(f\"Currently posting every {seconds} seconds.\")", "def set_time(self, sec):\n self.set_timed(round(sec * 10.0))", "def __enter__(self):\n self.start = timeit.default_timer()", "def caller(self,press,timer):\n self.set_numkey(press)\n self.alien_fire()\n self.moveBolts(timer)\n self.deleteBolts()\n self.change_PU(timer)\n self.col_detectorPU()\n self.col_detector()\n self.GameState()", "def update(self, dt):", "def update(self, dt):", "def pause_game_timer(self):\n self._pause_start_time = datetime.datetime.now()", "def _api_timer_expiration_handler(self):\n\n try:\n self._api_lock.release()\n except:\n pass", "def set_timer(bot, update, args, job_queue, chat_data):\n chat_id = update.message.chat_id\n try:\n # args[0] should contain the time for the timer in seconds\n due = int(args[0])\n if due < 0:\n update.message.reply_text('Sorry we can not go back to future!')\n return\n \n try:\n # args[1] should contain the name of the timer\n timer_name = args[1]\n except IndexError:\n timer_name = 'timer'\n if timer_name in chat_data:\n update.message.reply_text(f'Updating \\'{timer_name}\\' timer')\n timer = chat_data[timer_name]\n timer.schedule_removal()\n timer = job_queue.run_once(alarm, due, context=[chat_id, timer_name])\n chat_data[timer_name] = timer\n update.message.reply_text(f'Timer \\'{timer_name}\\' successfully set!')\n\n except (IndexError, ValueError):\n update.message.reply_text('Usage: /set <seconds> <timer_name>(optional)')", "def med_timer(self):\n self.start_button.config(text='Sit', state='disabled')\n self.start_button.update()\n if self.mins.get() == \"\":\n num_mins = 0\n else:\n num_mins = float(self.mins.get())\n time_in_seconds = num_mins * 60\n self.t = Timer(time_in_seconds, self.play_wav)\n self.t.start()", "def init_update_timer(self):\r\n\r\n self.update_timer = wx.Timer(self)\r\n self.Bind(wx.EVT_TIMER, self.check_updates, self.update_timer)", "def OnButton1(self):\n self.start_time = self.start_time.Minutes(DEFAULT_TIMER)\n self.timertext.SetLabel(self.start_time.Format(\"%M:%S\"))\n self.timerNotZero = True\n self.blinkPhase = 0\n self.timertext.SetForegroundColour('black')\n self.button1.SetBackgroundColour('white')", "def start_timer(self, secs):\r\n self.secs = secs\r\n self.countdownTimer.start(1000)", "def loop_run(self):\n super(TimerLoop, self).loop_run()\n self.timer = self.cothread.Timer(self.timeout,\n self.callback,\n retrigger=True)", "def timer():\r\n\r\n T = 0\r\n while True:\r\n print (term.white + term.move_xy(82,1) + 'TIMER : ', end='')\r\n print(T, end='\\r')\r\n time.sleep(1)\r\n T = T + 1", "def OnTimer(self, event):\n if self.timerNotZero: # When timer runs, subtract one second and update text\n self.start_time = self.start_time.Subtract(wx.TimeSpan(0, sec=1))\n self.timertext.SetLabel(self.start_time.Format(\"%M:%S\"))\n if self.start_time.GetMinutes() == 0 and self.start_time.GetSeconds() == 0: # Timer reached zero\n self.timerNotZero = False\n self.button1.SetBackgroundColour('red')\n else: # Once timer stops, makes the text blink red\n if self.blinkPhase == 0:\n self.timertext.SetForegroundColour('red')\n self.timertext.SetLabel(\"00:00\")\n self.blinkPhase = 1\n elif self.blinkPhase == 1:\n self.timertext.SetForegroundColour('black')\n self.timertext.SetLabel(\"00:00\")\n self.blinkPhase = 0", "def change_stopwatch(timez):\r\n\r\n m = timez // 60\r\n s2 = timez % 60\r\n s1 = 0 if s2 < 10 else \"\"\r\n now = f\"{m}:{s1}{s2}\"\r\n stopwatch.configure(text=now)", "def set_timer(self, update: Update, context: CallbackContext) -> None:\n chat_id = update.message.chat_id\n try:\n due = int(context.user_data[\"duration\"])\n if due < 0:\n return\n\n job_removed = self.remove_job_if_exists(str(chat_id), context)\n context.job_queue.run_once(\n self.command_stop_irrigation, due, context=chat_id, name=str(chat_id)\n )\n\n except (IndexError, ValueError):\n update.message.reply_text(\"Erro ao agendar o desligamento da irrigação 😞\")", "def start(self, title):\n if self.enabled:\n if title not in self.timers.keys():\n self.timers[title] = {'st' : time.time(),\n 'sum' : 0.0,\n 'count' : 0,\n 'max' : 0.0,\n 'last' : None,\n }\n else:\n self.timers[title]['st'] = time.time()\n\n self.last_start = title", "def watch(self, func, seconds=3600):\n func\n time.sleep(seconds)", "def tick(self):\n uh.rotation(270)\n while True:\n self.show_time()\n time.sleep(60)\n uh.off()", "def _timer_start(self) -> None:\n self.machine.events.post('ball_save_{}_timer_start'.format(self.name))\n '''event: ball_save_(name)_timer_start\n desc: The multiball ball save called (name) has just start its countdown timer.\n '''\n\n shoot_again_ms = self.config['shoot_again'].evaluate([])\n grace_period_ms = self.config['grace_period'].evaluate([])\n hurry_up_time_ms = self.config['hurry_up_time'].evaluate([])\n\n self._start_shoot_again(shoot_again_ms, grace_period_ms, hurry_up_time_ms)", "def set_timer_time(self, time: int) -> None:\n current_mode = self.get_mode()\n # Defining the time for the Timer program only has an effect\n # when first the Timer program is selected.\n if current_mode != 'Timer':\n self.set_mode('Timer')\n self.logger.info(f\"Switching program from '{current_mode}' to \"\n \"'Timer'.\")\n\n return self.send(self.cmd.SET_TIMER_TIME, time)", "def tmpDown(self, mSec):\n timer = QtCore.QTimer(self)\n timer.setSingleShot(True)\n self.connect(timer, QtCore.SIGNAL('timeout()'), self.timerUp)\n timer.start(mSec)\n self.setDown(True)", "def time_interval(key=\"default\", update=True):\n\n if not hasattr(time_interval, 'time_dict'):\n time_interval.time_dict = collections.defaultdict(\n datetime.datetime.now)\n\n now = datetime.datetime.now()\n ret = now - time_interval.time_dict[key]\n\n if update:\n time_interval.time_dict[key] = now\n\n return ret", "def test_timed_reset(self):\n time = 0.005\n cache = TimedCache(max_age=time)\n\n cache[1] = 1\n assert 1 in cache\n assert cache[1] == 1\n sleep(time / 2)\n assert 1 in cache\n assert cache[1] == 1\n cache[1] = 1\n sleep(time / 2)\n assert 1 in cache\n assert cache[1] == 1\n sleep(time / 2)\n assert 1 not in cache\n with pytest.raises(KeyError):\n assert cache[1]", "def run(self):\n last_time = time.time()\n while self.running:\n now_time = time.time()\n interval = now_time - last_time\n last_time = now_time\n self.update(interval)\n time.sleep(Options['update interval'])", "def set_update_interval (self, interval):\n\t\tif self.__timeout:\n\t\t\tgobject.source_remove(self.__timeout)\n #print \"update interval : %s min\" % interval\n\t\tself.__timeout = gobject.timeout_add(interval*60*1000, self.update)", "def tick():\n\n global time1\n # get the current local time from the PC\n time2 = time.strftime(\"%H:%M:%S\")\n # if time string has changed, update it\n if time2 != time1:\n time1 = time2\n timeLabel.config(text=time2)\n # calls itself every 200 milliseconds\n # to update the time display as needed\n # could use >200 ms, but display gets jerky\n timeLabel.after(200, tick)", "def addKey(self, time, name, value, view) -> None:\n ...", "def _start_clock(self):\n self._start = time.time()", "def _start_new_timer(self):\n if not self._running.is_set():\n return\n self._timer = threading.Timer(interval=self._interval,\n function=self._check_for_life_signs)\n self._timer.daemon = True\n self._timer.start()", "def measure(self, key, reset=False):\r\n timer = self.timers.get(key, None) if not reset else None\r\n if timer is None:\r\n timer = Timer()\r\n self.timers[key] = timer\r\n return timer" ]
[ "0.7022178", "0.66496783", "0.64718276", "0.64211446", "0.6270911", "0.62581867", "0.62419593", "0.62306786", "0.61925316", "0.6182545", "0.61779076", "0.6164358", "0.6159859", "0.6124073", "0.6121016", "0.6054378", "0.6042401", "0.6031246", "0.60209733", "0.6018492", "0.6016127", "0.60143936", "0.6011642", "0.60005885", "0.597821", "0.59394497", "0.5932187", "0.5919062", "0.5897376", "0.5877064", "0.58725923", "0.5870468", "0.5856246", "0.58365244", "0.5791468", "0.5785768", "0.5781282", "0.5774934", "0.5741016", "0.57387704", "0.57377064", "0.5736665", "0.573171", "0.5729084", "0.57281053", "0.57278025", "0.5723388", "0.5720574", "0.57146883", "0.5709198", "0.56939083", "0.5671521", "0.5667315", "0.5633474", "0.56295425", "0.5604531", "0.56025195", "0.5601712", "0.55994093", "0.55748737", "0.5573908", "0.5571518", "0.55640537", "0.5553574", "0.55447656", "0.5540069", "0.5538485", "0.5534956", "0.55326235", "0.5525097", "0.5522216", "0.5509801", "0.5509801", "0.5497596", "0.5493667", "0.5492665", "0.5474359", "0.5470881", "0.5464032", "0.546199", "0.5460638", "0.54454553", "0.54412967", "0.5436722", "0.54334486", "0.5412368", "0.54116124", "0.5402377", "0.5390302", "0.538805", "0.53764975", "0.5373395", "0.53728503", "0.53718936", "0.53698266", "0.53599423", "0.5357391", "0.53556406", "0.53419006", "0.53416574" ]
0.58821076
29
Set timer for key revocation
def _set_delete_timer(self, key_name, timeout): if key_name is not None: #print("(%d) _set_delete_timer:" % int(time.time()), key_name.hex()[:10], timeout) query_management.QueryEntry(expire_after=timeout, callback_expire=remove_old_key, data={KeyType.hint: key_name}, retry_count=0)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __updateElapsedTime(self):\n if self._keyCodeTime != 0.0 and \\\n (globalClock.getFrameTime() - self._keyCodeTime) >= self._timeout:\n self.notify.debug(\"Key code timed out. Resetting...\")\n self.reset()\n messenger.send(KeyCodes.CLEAR_CODE_EVENT)\n self._keyCodeTime = globalClock.getFrameTime()", "def _api_timer_expiration_handler(self):\n\n try:\n self._api_lock.release()\n except:\n pass", "def renewKey():\n while True:\n try:\n sleep(RENEW_KEY)\n mutex.acquire()\n key_dict.clear()\n mutex.release()\n except:\n print(\"error in renew key\")\n finally:\n if mutex.locked():\n mutex.release()", "def update_time(cls, key):\n key.put()", "def set_invoke_timer(self, timeout, retry_entry=False):\n if self.timer_entry is not None and self.timer_entry.active:\n self.timer_entry.deactivate()\n #print(\"(%d) set_invoke_timer:\" % int(time.time()), timeout)\n self.timer_entry = query_management.QueryEntry(expire_after=timeout,\n callback_expire=self._perform_key_exchange,\n retry_count=0)\n if retry_entry:\n self.timer_entry.data[KeyType.retry_timer] = True", "def revoke_refresh_token(cls, jti: str) -> None:\n redis = cls._conn_redis(cls)\n expired_time = int(timedelta(days=cls._REFRESH_TOKEN_EXPIRES).total_seconds())\n redis.setex(jti,expired_time,'true')", "def do_expire(self):\n # Deep copy to avoid RuntimeError: dictionary changed size during iteration\n _timeouts = deepcopy(self.timeouts)\n for key, value in _timeouts.items():\n if value - self.clock.now() < timedelta(0):\n del self.timeouts[key]\n # removing the expired key\n if key in self.redis:\n self.redis.pop(key, None)", "def _expire_item(self, key):\n (timeout, callback) = self._timeouts[key]\n now = time.time()\n if timeout <= now:\n item = dict.pop(self, key)\n del self._timeouts[key]\n if callback:\n try:\n callback(key, item)\n except TypeError:\n try:\n callback(key)\n except TypeError:\n callback()\n return None\n else:\n return timeout - now", "def clean_timer(sc):\n global prev_dict_\n # Cleaning the previous dictionary after 5 hours\n prev_dict_ = {}\n z.enter(18000, 1, clean_timer, (sc,))", "def test_rotate_expiration(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(10, 80, 20, 19)\n assert key.audit_state == 'old'", "def setDeactivationTime(*argv):", "def synictimer(self, synictimer):\n\n self._synictimer = synictimer", "async def _expire(self, key, ttl):\n return await self.client.touch(key, ttl)", "def set_timer(self, update: Update, context: CallbackContext) -> None:\n chat_id = update.message.chat_id\n try:\n due = int(context.user_data[\"duration\"])\n if due < 0:\n # update.message.reply_text('Sorry we can not go back to future!')\n return\n\n job_removed = self.remove_job_if_exists(str(chat_id), context)\n context.job_queue.run_once(\n self.command_stop_irrigation, due, context=chat_id, name=str(chat_id)\n )\n\n # text = 'Timer successfully set!'\n # if job_removed:\n # text += ' Old one was removed.'\n # update.message.reply_text(text)\n\n except (IndexError, ValueError):\n # update.message.reply_text('Usage: /set <seconds>')\n update.message.reply_text(\"Erro ao agendar o desligamento da irrigação 😞\")", "def reset_timer():\n resetTimer = time.time()\n target_time.clear()\n target_time.append(resetTimer)", "def revoke_access_token(cls, jti: str) -> None:\n redis = cls._conn_redis(cls)\n expired_time = int(timedelta(minutes=cls._ACCESS_TOKEN_EXPIRES).total_seconds())\n redis.setex(jti,expired_time,'true')", "def removeKey(self, timeOrHash) -> None:\n ...", "async def _expire(self, key, ttl):\n if key in SimpleMemoryBackend._cache:\n handle = SimpleMemoryBackend._handlers.pop(key, None)\n if handle:\n handle.cancel()\n if ttl:\n loop = asyncio.get_event_loop()\n SimpleMemoryBackend._handlers[key] = loop.call_later(ttl, self.__delete, key)\n return True\n\n return False", "async def _expire(self):\n # pylint: disable=protected-access\n await asyncio.sleep(const.DEFAULT_COMMAND_EXPIRATION)\n self.set(None)", "def expire(self):\n logging.debug(\"Expiring token as wanted...\")\n self.expiration = datetime.now() - timedelta(seconds=(10))", "def test_old_expiration(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(10, 11, 10, 8)\n assert key.audit_state == 'expire'", "def _expire(self):\n del self.map.addr[self.name]\n self.map.notify(\"addrmap_expired\", *[self.name], **{})", "def on_expire(self):\n pass", "def reset_timer(self):\r\n self.time_minutes = 0\r\n self.time_seconds = 0", "def set_timer(self, update: Update, context: CallbackContext) -> None:\n chat_id = update.message.chat_id\n try:\n due = int(context.user_data[\"duration\"])\n if due < 0:\n return\n\n job_removed = self.remove_job_if_exists(str(chat_id), context)\n context.job_queue.run_once(\n self.command_stop_irrigation, due, context=chat_id, name=str(chat_id)\n )\n\n except (IndexError, ValueError):\n update.message.reply_text(\"Erro ao agendar o desligamento da irrigação 😞\")", "def _rescheduleFromRun(self, newTime):\n if newTime is None:\n self.deleteFromStore()\n else:\n self.time = newTime", "def on_expiration_time(self, alarm) -> None:\r\n return", "async def _reset_time(self, request_id: int) -> int:\n raise NotImplementedError()", "def attempt(self, timer, context, phases):", "def stop_timer(self):\n self.end_time = datetime.now()", "def set_lock_time():\n\n pass", "def settimeout(self,timeout=10):\r\n # Update\r\n self.timeout = timeout", "def _mark_timer_complete(crawl_id, engine_redis):\n config_file = engine_redis.get(crawl_id)\n config = json.loads(config_file)\n start_time = config['time']\n stop_time = time.time()\n config['time'] = stop_time - start_time\n config_json = json.dumps(config)\n engine_redis.set(crawl_id, config_json)\n engine_redis.expire(crawl_id, 60*60)", "def reset_timeout (self, new_timeout):\n self.timer.cancel()\n self.timer = Timeout(new_timeout, TestIsTakingTooLong(new_timeout))", "def timerAction():\n timer = threading.Timer(30.0, timerAction)\n timer.daemon = True\n timer.start()\n save()", "def _do_expire(self):\n t = time.time()\n\n # Expire probes\n for ip, expire_at in self.outstanding_probes.items():\n if t > expire_at:\n self.outstanding_probes.pop(ip, None)\n if ip in self.live_servers:\n self.log.warn(\"Server %s down\", ip)\n del self.live_servers[ip]\n\n # Expire flow\n memory = self.memory.copy()\n self.memory.clear()\n for key, val in memory.items():\n ip = key[0]\n if ip in self.live_servers and val.is_expired:\n # Decrease total connection for that server\n self.total_connection[ip] -= 1\n if not val.is_expired:\n self.memory[key] = val", "def expire(self, key, timeout=None):\n try:\n if timeout == 0:\n # force the key to be non-volatile\n result = self._cache.get(key)\n self._cache.set(key, result)\n else:\n timeout = timeout or self.default_timeout\n # If the expiration command returns false, we need to reset the key\n # with the new expiration\n if not self._cache.expire(key, timeout):\n value = self.get(key)\n self.set(key, value, timeout)\n except Exception as err:\n return self.warn_or_error(err)", "def on_expiration_time(self, alarm):\r\n self.state.on_expiration_time(alarm)\r\n return", "def timer(self):\n self.time_remaining -= 1\n if self.time_remaining > 0:\n Timer(1, self.timer).start()", "def _api_release_lock_with_timer(self):\n\n if self._apt_timer.is_alive():\n self._apt_timer.cancel()\n\n if self._api_lock.locked():\n self._api_lock.release()", "def run(principal: str | None, keytab: str):\n if not keytab:\n log.warning(\"Keytab renewer not starting, no keytab configured\")\n sys.exit(0)\n\n while True:\n renew_from_kt(principal, keytab)\n time.sleep(conf.getint(\"kerberos\", \"reinit_frequency\"))", "def reset_stop_timer(self) -> None: \r\n self.stop_timer = 0", "def expire_at(self, key, timestamp):\n with self._lock:\n # pylint: disable=unused-variable\n _expire, value = self._values[key]\n self._values[key] = (timestamp, value)", "def remove_if_expired(self, key, now):\n with self.GLOB_LOCK:\n inst = self._request_sessions.get(key, None)\n if inst is not None and (inst.last_access + self.TIMEOUT < now):\n self._request_sessions.pop(key, None)\n return True\n\n return False", "def expire(self):\n if not self.has_connection():\n self.generate_connection()\n self.connection.expire_hit(self.mturk_id)\n self.update()", "def _ResetLastPassTime(self):\n util.StrictOp('updating last GC pass time',\n self.engine.Put,\n KEY_LAST_PASS_TIME, '%.3f' % time.time())", "def test_timed_reset(self):\n time = 0.005\n cache = TimedCache(max_age=time)\n\n cache[1] = 1\n assert 1 in cache\n assert cache[1] == 1\n sleep(time / 2)\n assert 1 in cache\n assert cache[1] == 1\n cache[1] = 1\n sleep(time / 2)\n assert 1 in cache\n assert cache[1] == 1\n sleep(time / 2)\n assert 1 not in cache\n with pytest.raises(KeyError):\n assert cache[1]", "def reset_timer(self, *_) -> \"ALL\":\n self.last = time.time()\n delta = time.time() - self.last\n if delta > 180:\n print(\n \"!!! Warning: Watchdog failure detected, spawning a fallback \"\n \"thread.\"\n )\n self.watchdog = FallbackWatchdog(self)\n self.watchdog.start()", "def set_ttl(self, key, ttl, now=None):\n if now is None:\n now = time.time()\n with self._lock:\n # pylint: disable=unused-variable\n _expire, value = self._values[key]\n self._values[key] = (now + ttl, value)", "def delete_expired(self):\n check_time = datetime.now()\n if self.can_expire and self.duration:\n exp_times = deepcopy(self.exp_times)\n for key in exp_times:\n if exp_times[key] < check_time:\n self.delete(key)", "def timer_handler():\r\n \r\n global elapsed_time\r\n elapsed_time += 1", "def set_cache_timeout(self, cache_key, timeout):\n self.cache_timeouts[cache_key.lower()] = timeout", "def toc(self, key):\n self.ends[key] = time()-self.starts[key]", "def set_ttl(self, ttl):", "def renew(self):\n remaining=self.time_left()\n if ( (remaining !=-1) and (self.update_frequency!=-1) and \n (remaining<self.update_frequency) ): \n self.create()", "def reset_timer():\r\n window.after_cancel(timer)\r\n canvas.itemconfig(timer_text, text=f\"00:00\")\r\n pomodoro_title.config(text=\"Timer\", fg=GREEN)\r\n check_marks.config(text=\"\")", "def ttl(self, key):\n return self._command(b'PTTL', key, handler=lambda ms: .001 * ms)", "def _set_token_expiration_time(self, expires_in):\n self.token_expiration_time = dt.datetime.utcnow() + dt.timedelta(0, expires_in) # timedelta(days, seconds)", "def test_rotate_access(self):\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('username', 'keyid', 'Active', created, last_used)\n key.audit(60, 80, 20, 10)\n assert key.audit_state == 'stagnant'", "def _addTiming(self, key, duration):\n pass", "def _ontimer(self, fun, t):\n if t == 0:\n self.cv.after_idle(fun)\n else:\n self.cv.after(t, fun)", "def addKey(self, time, value) -> None:\n ...", "def timeout_set(self, x):\n if not self._dtr_enabled:\n self._resume_time = self.__micros() + x", "def timeout(self):\n self.timeout_scan_flag=True\n self.timer.stop()\n self.status_sig.emit([\"Update_Status\",\"Timeout during acquisition\",'log'])\n self.status_sig.emit([\"Timeout\"])", "def _check_token_is_revoked(self, jti: str) -> None:\n redis = self._conn_redis()\n entry = redis.get(jti)\n if entry and entry == 'true':\n raise HTTPException(status_code=401,detail=\"Token has been revoked\")", "def decrease_timer(self):\n\n if self.queue_length() < 1:\n return\n self._start_time += 1\n if self._clients[0].get_time() == self._start_time:\n self._pop_customer()\n self._start_time = 0", "def pytest_timeout_cancel_timer(item):", "async def resettimer(self, ctx:commands.Context, member: Member = None):\r\n\r\n await self.config.member(member if not member == None else ctx.message.author).currently_fishing.set(False)\r\n await ctx.send('Fishing cooldown reset')", "def on_expire(self, *args):\n\t\traise NotImplementedError", "def record(self, key):\n return TimerCtxManager(timer=self, key=key)", "def test_get_ttl(self):\n self.now = time.time()\n with patch('time.time', self.fake_time):\n storage = Storage()\n keys_to_set = {'1': 'hello',\n '2': 'bye',\n '3': [1, 2, 'three'],\n '4': {1: 'one', 2: 'two'}}\n moes = {'1': time.time() + 5, '4': time.time() + 10}\n for key in keys_to_set.keys():\n storage.set(key, keys_to_set[key], moes.get(key))\n # test at moment t\n self.assertEqual(keys_to_set['1'], storage.get('1'), \"Key '1' should still exist.\")\n # test at moment t+6, one key should expire\n self.now += 6\n keys_to_set.pop('1')\n moes.pop('1')\n self.assertRaises(StorageKeyError, storage.get, '1')\n self.assertEqual(keys_to_set['4'], storage.get('4'), \"Key '4' should still exist.\")\n self.assertEqual(keys_to_set, storage._keys_dict, \"Remaining keys are wrong\")\n self.assertEqual(moes, storage._moe_dict, \"Remaining moes are wrong\")\n # test at moment t+11\n self.now += 5\n keys_to_set.pop('4')\n moes.pop('4')\n self.assertRaises(StorageKeyError, storage.get, '1')\n self.assertRaises(StorageKeyError, storage.get, '4')\n self.assertEqual(keys_to_set, storage._keys_dict, \"Remaining keys are wrong\")\n self.assertEqual(moes, storage._moe_dict, \"Remaining moes are wrong\")", "def time_automation_listener(now):\n action()", "async def _timein_refresh(self):\n\t\t\n\t\tawait self.refresh_cache()", "def DestroyTimer(self, obj, event):\n return 1", "def tmpDown(self, mSec):\n timer = QtCore.QTimer(self)\n timer.setSingleShot(True)\n self.connect(timer, QtCore.SIGNAL('timeout()'), self.timerUp)\n timer.start(mSec)\n self.setDown(True)", "def schedule_session_expiry(self, user_id, timeout):\n ukey = self.r_key('session', user_id)\n self.r_server.expire(ukey, timeout)", "def _extend_expiration_time(self, fname=None):\n if fname is None:\n fname = self.uniquefile\n future = datetime.now() + timedelta(seconds=self._lockduration)\n expires = time.mktime(future.timetuple())\n try:\n os.utime(fname, (expires, expires))\n except OSError as e:\n if e.errno not in self.NOT_EXIST_ERRORS:\n raise", "async def generate_new_refesh_key(payload: dict = Depends(get_jwt_payload)):\n if payload[\"type\"] != \"refresh\":\n raise HTTPException(\n status_code=status.HTTP_401_UNAUTHORIZED,\n detail=\"You gave the access key, but we need the refresh key\",\n headers={\"WWW-Authenticate\": \"Bearer\"},\n )\n\n # <- Your token revocation code should be here!\n\n access_token_data = jwt_claims.copy()\n access_token_data[\"sub\"] = payload[\"sub\"]\n access_token_data[\"exp\"] = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)\n access_token_data[\"jti\"] = str(uuid.uuid4())\n\n return AccessToken(access_token=jwt.encode(access_token_data, SECRET_KEY, algorithm=ALGORITHM))", "def test_no_disable(self, monkeypatch):\n monkeypatch.setenv('ENABLE_AUTO_EXPIRE', 'false')\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('user2', 'ldasfkk', 'Inactive', created, last_used)\n key.audit(10, 11, 10, 8)\n assert key.audit_state == 'expire'", "async def on_timer_update(self, secs: int):\n pass", "def pytest_timeout_set_timer(item, settings):", "def stop_timer(self):\r\n self.countdownTimer.stop()", "def testExpirationTime(self):\n\n bye = \"Good bye!\"\n memcache.add('bye', bye, 1)\n assert memcache.get('bye') == bye\n time.sleep(2)\n assert memcache.get('bye') == None", "def set_end_time(self):\n with self.redis_client.lock(\"time-end-lock\"):\n total_seconds = (\n datetime.now() - datetime.fromtimestamp(self.start_time)\n ).total_seconds()\n self.end_time = total_seconds", "def timer_setup(self):\n pass", "def renew_token(self, wts_server_name: str, server_access_token):\n token = wts_get_token(\n hostname=wts_server_name,\n idp=self.idp,\n access_token=server_access_token,\n )\n token_info = decode_token(token)\n # TODO: this would break if user is trying to download object from different commons\n # keep BRH token and wts sparate\n self.access_token = token\n self.expire = datetime.fromtimestamp(token_info[\"exp\"])", "def set(self, key, value, ttl=0):\n pass", "def test_issue_reset_time(self):\n pass", "def test_last_used(self, monkeypatch):\n monkeypatch.setenv('ENABLE_AUTO_EXPIRE', 'true')\n monkeypatch.setenv('INACTIVITY_AGE', '10')\n created = datetime.datetime(2019, 1, 1, tzinfo=datetime.timezone.utc)\n last_used = datetime.datetime(2019, 1, 2, tzinfo=datetime.timezone.utc)\n key = Key('user3', 'kljin', 'Active', created, last_used)\n key.audit(10, 11, 2, 1)\n assert key.audit_state == 'expire'\n key.audit(60, 80, 2, 1)\n assert key.audit_state == 'stagnant_expire'", "def rem(self, key):\n if self.dexists('ttl', key):\n self.dpop('ttl', key)\n return super(MyCache, self).rem(key)", "def set_retry_timeout(self, retry_timeout):", "def _SetCurveTime(self, duration):\n lastKey = self.controllerCurve.GetKeyCount() - 1\n timeDelta = self.controllerCurve.length - self.controllerCurve.GetKeyTime(lastKey)\n self.controllerCurve.length = duration\n self.controllerCurve.SetKeyTime(lastKey, duration - timeDelta)\n self.controllerCurve.Sort()", "def set_expiration(self):\n # This is an arbitrary decision setting the expiration time\n # to the current date + expires_in - 10 seconds\n self.expiration = datetime.now() + \\\n timedelta(seconds=(self.expires_in - 10))\n logging.debug('Token expiration set to %s' % self.expiration)", "def test_one_key(self):\n self.now = time.time()\n with patch('time.time', self.fake_time):\n self.storage.set('1', 'one', moe=self.now + 1)\n self.now += 2\n self.gc.expire_random()\n self.assertRaises(StorageKeyError, self.storage.get, '1')", "def on_timeout(self):\n pass", "def removeKey(self, time, attributeIndex, view) -> None:\n ...", "def timeout_change(self, timedelta):\n pass # pylint: disable=unnecessary-pass\n # For backward compatibility only.", "def reset():\n global counter, total_attempts, successful_stops\n timer.stop()\n counter = 0\n total_attempts = 0\n successful_stops = 0", "def tic(self, key):\n self.starts[key] = time()", "def update(self):\n if not self.exists:\n return\n if AT.TIME_TO_EXPIRE in self.attributes:\n if not self.calculate_time_left():\n self.fire_trigger(TR.TIME_EXPIRED)" ]
[ "0.6193708", "0.6188828", "0.61632437", "0.61356395", "0.6049411", "0.5949552", "0.5939341", "0.58892876", "0.5877896", "0.584273", "0.58422077", "0.5806897", "0.57887334", "0.57828623", "0.5779299", "0.5777707", "0.571499", "0.5553746", "0.55466074", "0.5524556", "0.5522834", "0.55197686", "0.55180275", "0.55080485", "0.5502418", "0.5501438", "0.5490628", "0.54829204", "0.54818016", "0.54720414", "0.5452073", "0.5433725", "0.54192233", "0.54075736", "0.5404759", "0.53869784", "0.53849137", "0.5366055", "0.53659093", "0.5342135", "0.53404117", "0.53401095", "0.5338364", "0.53246444", "0.5296316", "0.527842", "0.5270814", "0.5246089", "0.52425325", "0.52410704", "0.5239364", "0.5229163", "0.521959", "0.5212907", "0.52042913", "0.51958114", "0.5181658", "0.51794636", "0.51793873", "0.5176887", "0.51731", "0.51667374", "0.5165357", "0.51649404", "0.51449555", "0.51432407", "0.51419663", "0.5140555", "0.51200277", "0.511971", "0.5118689", "0.511653", "0.51152456", "0.5112949", "0.5109975", "0.5107065", "0.5101066", "0.50981134", "0.5097242", "0.50949395", "0.5080876", "0.50785947", "0.5078464", "0.50757825", "0.5066318", "0.50612783", "0.5057297", "0.5055", "0.50510246", "0.5048263", "0.5043966", "0.5037976", "0.50371695", "0.50320494", "0.50298226", "0.50272936", "0.50229144", "0.50225884", "0.502055", "0.5015503" ]
0.6783776
0
Perform ECDH key exhange to establish secure channel to the node
def _perform_key_exchange(self, query_entry): if KeyType.retry_timer in query_entry.data and query_entry.data[KeyType.retry_timer]: message_key_types.unset_cipher(self.pending_key_name) self.pending_key_name = None self._set_state(KeyExchangeManager.STATE_REQUESTING) #print("# (%d) _perform_key_exchange: to" % int(time.time()), self.counter_node_id.hex()) self.secret_key, self.peer_public_key, self.pending_key_name = message_key_types.get_ECDH_parameters() self.nonce = os.urandom(16) self.random = os.urandom(8) ret = self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, "request", self.peer_public_key, self.nonce, self.random, self.pending_key_name) if not ret: self._set_state(KeyExchangeManager.STATE_NONE) message_key_types.unset_cipher(self.pending_key_name) message_key_types.unset_cipher(self.key_name) self.secret_key = None self.peer_public_key = None self.pending_key_name = None self.nonce = None self.random = None return rand_time = KeyExchangeManager.KEY_EXCHANGE_RETRY_INTERVAL*random.uniform(0.5, 1.5) self.set_invoke_timer(rand_time, retry_entry=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def exchange_key(connection, pub_key):\r\n\r\n if main.diffe_key_exchange is False:\r\n # Get the server's public key\r\n server_pub_key_bytes = connection.recv(1024)\r\n\r\n # Send public key\r\n connection.sendall(rsa.PublicKey.save_pkcs1(pub_key))\r\n\r\n else:\r\n # Rounds of bit-shifting and XOR\r\n rounds = 64\r\n\r\n while True:\r\n\r\n # Generate 4096-bit keys (RFC 3526 Group 16)\r\n client_diffe_key = pyDHE.new(16)\r\n shared_secret = client_diffe_key.negotiate(connection)\r\n\r\n # Encrypt\r\n encrypted = int(binascii.hexlify(rsa.PublicKey.save_pkcs1(pub_key)).decode(), 16)\r\n for x in range(0, rounds):\r\n encrypted = encrypted ^ (shared_secret ** rounds)\r\n encrypted = encrypted << rounds\r\n encrypted = int(str(encrypted)[::-1])\r\n\r\n # Decrypt\r\n decrypted = encrypted\r\n decrypted = int(str(decrypted)[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n # Check if able to decrypt\r\n try:\r\n binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n client_success = True\r\n\r\n # Generate new keys upon failure and try again\r\n except UnicodeDecodeError:\r\n client_success = False\r\n pass\r\n except binascii.Error:\r\n client_success = False\r\n pass\r\n\r\n # Notify client about encryption status\r\n server_success = connection.recv(1024)\r\n if client_success is False:\r\n connection.send(b'DHE')\r\n else:\r\n connection.send(b'CONTINUE')\r\n\r\n # Get encryption status from client\r\n if client_success is False or server_success == b'DHE':\r\n pass\r\n elif server_success == b'CONTINUE':\r\n break\r\n\r\n # Hold encrypted server key\r\n server_encrypted = b''\r\n\r\n # Receive encrypted key from the server\r\n while True:\r\n data = connection.recv(8192)\r\n if data == b'ENDED':\r\n break\r\n elif data[-5:] == b'ENDED':\r\n server_encrypted += data[:-5]\r\n break\r\n server_encrypted += data\r\n\r\n # Send the encrypted key to the server\r\n connection.sendall(bytes(hex(encrypted).encode()))\r\n connection.send(b'ENDED')\r\n\r\n # Decrypt the client's public key\r\n decrypted = int(server_encrypted, 16)\r\n decrypted = int(str(int(decrypted))[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n server_pub_key_bytes = binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n\r\n server_pub_key = rsa.PublicKey.load_pkcs1(server_pub_key_bytes)\r\n # Determine max message size\r\n max_message_size = common.byte_size(server_pub_key.n) - 11\r\n\r\n # Return crypto key information\r\n return server_pub_key, server_pub_key_bytes, max_message_size", "def keyExchangeClient(port):\n try:\n with socket.socket() as sock:\n sock.connect((GW_IP, KEY_EXCHANGE_PORT))\n sock.send(create_msg(HELLO_MASSEGE + \" \" + str(port)).encode())\n valid, data = get_msg(sock)\n if valid:\n key = get_key(data)\n key_dict[port] = key\n print(key_dict)\n except Exception as e:\n print(e, \"error in keyExchangeClient\")", "def authenticate(self):\n # Receive public key from server\n message = self.receive()\n # Initialize RSA with public key of server\n self.secret.init_rsa(public_key=message)\n # Initialize AES\n self.secret.init_aes()\n # Encrypt AES key & nonce\n payload = self.secret.encrypt_rsa(self.secret.export_aes_key())\n # Send encrypted AES key & nonce pair to server\n self.send(payload)\n self.secret.ready = True", "def dh(self):\n self.cli.add_msg(\"Establishing Encryption Key...\")\n dh_message = self.connection.recv(DH_MSG_SIZE)\n # Unpack p, g, and server_key from the server's dh message\n p, g, server_key = DH.unpack(dh_message)\n # Generate a randomized private key\n private_key = DH.gen_private_key()\n # Send the server a public key which used the previously\n # Generated private key and both g and p\n public_key = DH.gen_public_key(g, private_key, p)\n self.connection.sendall(DH.package(public_key, LEN_PK))\n # Calculate shared key\n shared_key = DH.get_shared_key(server_key, private_key, p)\n # print(\"Shared Key: {}\".format(shared_key))\n self.cli.add_msg(\"Encryption Key: {}\".format(binascii.hexlify(shared_key).decode(\"utf-8\")))\n return shared_key", "def main():\n # key = random(1024)\n # ciphertexts = [encrypt(key, msg) for msg in MSGS]\n\n # Get key and secret message\n knownPlain2 = \"The nice thing about Keeyloq is now we cryptographers can drive a lot of fancy cars - Dan Boneh\"\n key = strxor(ciphertexts[2], knownPlain2)\n secret = strxor(target, key)\n\n print \"Key: \" + key\n print \"Key (Hex): \" + key.encode(\"hex\")\n print \"Secret: \" + secret", "def do_server(wrapping_key_public):\n secret = os.urandom(32)\n logging.info(f'secret: {hexlify(secret)}')\n\n ref_path = 'server-secret-for-reference.bin'\n logging.debug(f'creating {ref_path}')\n with open(ref_path, 'wb') as f:\n f.write(secret)\n\n # generate IV\n iv = os.urandom(12)\n logging.debug(f'iv: {hexlify(iv)}')\n\n # generate 256-bit AES encryption key\n ephemeral_key = os.urandom(32)\n logging.debug(f'ephemeral_key: {hexlify(ephemeral_key)}')\n\n # xor_mask = os.urandom(32)\n xor_mask = b'\\x00' * 32\n logging.debug(f'xor_mask: {hexlify(xor_mask)}')\n\n # xor with mask to get transportKey\n transport_key = bytes([ephemeral_key[i] ^ xor_mask[i] for i in range(32)])\n logging.debug(f'transport_key: {hexlify(transport_key)}')\n\n logging.debug(f'wrapping the transport key with the public RSA wrapping key')\n encrypted_transport_key = wrap(wrapping_key_public, transport_key)\n\n logging.debug(f'encrypting the secure secret with the AES ephermeral key')\n encrypted_secret, tag = encrypt(ephemeral_key, iv, secret)\n\n logging.debug(f'encrypted_secret: {hexlify(encrypted_secret)}')\n logging.debug(f'tag: {hexlify(tag)}')\n\n authorizationList = AuthorizationList()\n\n key_description = KeyDescription()\n key_description['keyFormat'] = KM_KEY_FORMAT_RAW\n key_description['keyParams'] = authorizationList\n\n secure_key_wrapper = SecureKeyWrapper()\n secure_key_wrapper['version'] = 0\n secure_key_wrapper['encryptedTransportKey'] = encrypted_transport_key\n secure_key_wrapper['initializationVector'] = iv\n secure_key_wrapper['keyDescription'] = key_description\n secure_key_wrapper['encryptedKey'] = encrypted_secret\n secure_key_wrapper['tag'] = tag\n\n encoded_secure_key_wrapper = encode_secure_key_wrapper(secure_key_wrapper)\n\n return encoded_secure_key_wrapper, xor_mask", "def keyExchangeServer():\n with socket.socket() as sock:\n try:\n # if the port number already taken, the following line will not work\n sock.bind((GW_ADRRESS, KEY_EXCHANGE_PORT))\n print(\"success in binding\")\n except:\n print(\"error in binding\")\n sys.exit()\n sock.listen(0)\n while True:\n client_socket, client_address = sock.accept()\n valid, data = get_msg(client_socket)\n if valid:\n data = data.split(\" \")\n if data[0] == HELLO_MASSEGE:\n if (client_address[0], int(data[1])) not in key_dic:\n key = int.from_bytes(Fernet.generate_key(), \"big\")\n else:\n key = key_dic[(client_address[0], int(data[1]))]\n client_socket.send(create_msg(get_key(key)).encode())\n key_dic[(client_address[0], int(data[1]))] = key\n print(key_dic)", "def _ecdsa_key(self,private_key):\n numbers = private_key.private_numbers()\n content = WriteMessage()\n\n public_key = private_key.public_key()\n serialized = public_key.public_bytes(\n encoding = serialization.Encoding .OpenSSH,\n format = serialization.PublicFormat.OpenSSH)\n\n\n # The SSH agent format somehow combines the elliptic curve's\n # `x` and `y` values (in `numbers.public_numbers`) into a single\n # `Q` value. I couldn't figure the specifics out exactly, but\n # the format is used exactly the same way int the OpenSSH\n # public key format, so we'll just reuse that one instead.\n\n pk_data = b64decode(serialized.split(None,2)[1])\n content.data.extend(pk_data)\n\n # nist = self._ecdsa_nists[private_key.curve.name]\n # content.write_string('ecdsa-sha2-{}'.format(nist))\n # content.write_string(nist)\n #\n # buffer = bytearray()\n # buffer.extend(b'0x04')\n #\n # x = numbers.public_numbers.x\n # y = numbers.public_numbers.y\n # for number in [x,y]:\n # tmp = WriteMessage()\n # tmp.write_mpint(number)\n # buffer.extend(tmp.data[4:])\n\n content.write_mpint(numbers.private_value)\n return content.data", "def main():\n # Create the socket\n server_sckt = socket(AF_INET, SOCK_STREAM)\n server_sckt.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)\n server_sckt.bind((HOST, PORT))\n server_sckt.listen()\n print(f\"Listening on {HOST}:{PORT}\")\n conn, client = server_sckt.accept()\n print(f\"New client: {client[0]}:{client[1]}\")\n\n # Negotiating the cipher\n print(\"Negotiating the cipher\")\n msg_in = conn.recv(4096).decode('utf-8')\n proposed = parse_proposal(msg_in)\n cipher_name, key_size = select_cipher(SUPPORTED_CIPHERS, proposed)\n print(f\"We are going to use {cipher_name}{key_size}\")\n msg_out = generate_cipher_response(cipher_name, key_size)\n conn.send(msg_out.encode())\n\n # Negotiating the key\n print(\"Negotiating the key\")\n dh = DiffieHellman()\n dh.generate_public_key()\n msg_in = conn.recv(4096).decode('utf-8')\n client_public_key = parse_dhm_request(msg_in)\n dh.generate_shared_secret(client_public_key)\n msg_out = generate_dhm_response(dh.public_key)\n conn.send(msg_out.encode())\n cipher, key, iv = get_key_and_iv(dh.shared_key, cipher_name, key_size)\n print(\"The key has been established\")\n\n print(\"Initializing cryptosystem\")\n crypto = cipher.new(key, cipher.MODE_CBC, iv)\n hashing = HMAC.new(key, digestmod=SHA256)\n print(\"All systems ready\")\n\n while True:\n msg_in = conn.recv(4096)\n if len(msg_in) < 1:\n conn.close()\n break\n msg, hmac = read_message(msg_in, crypto)\n validate_hmac(msg_in, hmac, hashing)\n print(f\"Received: {msg}\")\n msg_out = f\"Server says: {msg[::-1]}\"\n conn.send(msg_out.encode())", "def generate_encryption_key_epic(shared_secret):\n sha1_hasher = CryptCreateHash(CALG_SHA1)\n CryptHashData(sha1_hasher, shared_secret.encode('utf-8'))\n aes_key = CryptDeriveKey(sha1_hasher, CALG_AES_128)\n return aes_key", "def generate_encryption_key_epic(shared_secret):\n sha1_hasher = CryptCreateHash(CALG_SHA1)\n CryptHashData(sha1_hasher, shared_secret.encode('utf-8'))\n aes_key = CryptDeriveKey(sha1_hasher, CALG_AES_128)\n return aes_key", "def dh_get_key():\n G = EcGroup()\n priv_dec = G.order().random()\n pub_enc = priv_dec * G.generator()\n return (G, priv_dec, pub_enc)", "def create_key ():", "def private_key(self):", "def enable_encryption(self, output_key: bytes, input_key: bytes) -> None:\n self.chacha = chacha20.Chacha20Cipher(output_key, input_key)\n self.state.has_authenticated = True", "def post_key(self):\n # print(self.key)\n #Sending the key to the attacker.\n s.send(bytes(\"K\\n{}\".format(str(self.key,'utf-8')),'utf-8'))", "def test_private_key_ec(self):\n priv = \"\"\"-----BEGIN EC PARAMETERS-----\nBggqhkjOPQMBBw==\n-----END EC PARAMETERS-----\n-----BEGIN EC PRIVATE KEY-----\nMHcCAQEEIJZ57L6f6ywtZa7VhsvthAShxjdrL9EIrVwVgxnmD5b3oAoGCCqGSM49\nAwEHoUQDQgAEIg6eBOPv5M2z4ANtsJukbimKWX04lanEdALsbu2xNCDBXJ0IJ4Sd\n3u4G1qvrKX0mBHd7yUPGui+7bvp084mNag==\n-----END EC PRIVATE KEY-----\"\"\"\n cert = \"\"\"-----BEGIN CERTIFICATE-----\nMIIBiTCCAS+gAwIBAgIJAINtiwRC4eBJMAoGCCqGSM49BAMCMCExDzANBgNVBAMM\nBkVDIDI1NjEOMAwGA1UECgwFV2ViQ0EwHhcNMTgwNTI3MTAyNTIyWhcNMTgwNjI2\nMTAyNTIyWjAhMQ8wDQYDVQQDDAZFQyAyNTYxDjAMBgNVBAoMBVdlYkNBMFkwEwYH\nKoZIzj0CAQYIKoZIzj0DAQcDQgAEIg6eBOPv5M2z4ANtsJukbimKWX04lanEdALs\nbu2xNCDBXJ0IJ4Sd3u4G1qvrKX0mBHd7yUPGui+7bvp084mNaqNQME4wHQYDVR0O\nBBYEFEmE51rEUz4TuD8oEAw2lvMfvi6LMB8GA1UdIwQYMBaAFEmE51rEUz4TuD8o\nEAw2lvMfvi6LMAwGA1UdEwQFMAMBAf8wCgYIKoZIzj0EAwIDSAAwRQIgfiKDoHB3\nWzRO1juSMyVBuBw2p1o0ab+3fBNDvff8PXcCIQCUKIyzTnM7Wz6TkABfqOcmx7n4\nsbRvdOg3CepLGW3Ytw==\n-----END CERTIFICATE-----\"\"\"\n pkcs12 = _create_pkcs12(priv, cert)\n self.assertEqual(utils.private_key_type(pkcs12), c.KEY_EC)", "def _encrypt_aes_key(aes_key: bytes, receiver_public_key: RsaKey) -> bytes:\n cipher_rsa = PKCS1_OAEP.new(receiver_public_key)\n return cipher_rsa.encrypt(aes_key)", "def dh_encrypt(pub, message):\n \n Group, private, public = dh_get_key()#generate new DH pair for Alice\n #private key is an integer/scalar and public key is a point on the curve \n \n #check whether public key of Bob is valid and on curve \n assert Group.check_point(pub)\n \n #Alice obtains shared secret by multiplying her private key with bob's forwarded public key\n key = pub.pt_mul(private)#dA* qB\n print \"key from enc is\", key\n \n hashedKey=sha256(key.export()).digest()\n\n \n plaintext = message.encode(\"utf8\")#encode message\n aes = Cipher(\"aes-128-gcm\")#select cipher\n iv = urandom(16)#generate initialization vector \n cipher, tag = aes.quick_gcm_enc(hashedKey[:16], iv, plaintext)#encrypt using shared key \n ciphertext = [iv,cipher,tag,public]\n\n return ciphertext", "def request_dedkey(mybox, myport=22):\n # start Requests session\n sc = requests.Session()\n\n # set up auth & headers\n sc.headers.update({'User-Agent': \"Mozilla/5.0\"})\n sc.auth = (udata.userauth['user'], udata.userauth['passwd'])\n\n # send request\n cpj = sc.post('https://cpjump.inmotionhosting.com/dedtmpkeys/process-dedkey.php',\n data={'server': mybox, 'port': myport, 'submit': \"Submit\"}, verify=False)\n\n # check login\n check_cpjump_login(cpj)\n\n print(\"** Queued key placement on %s:%s\" % (mybox, myport))\n\n if not xopts['nowait']:\n print(\">> Awaiting response from eDesk...\")\n edok = log_wait(find_latest_log(udata.srcs['edesk']))\n if not re.search(r'success', edok, re.I|re.M):\n print(\"!! Key establish was unsuccessful. Aborting.\")\n sys.exit(101)\n else:\n print(\"** Key established. Connecting to %s:%s...\" % (mybox, myport))\n ssh_to(mybox, myport)", "def operate_cipher(self):", "def generate_key(self):\n\n self.key = Fernet.generate_key()\n self.cryptor = Fernet(self.key)", "def test_ec(self):\n key = c.KEY_EC\n usage = [\n c.KU_DIGITALSIGNATURE,\n c.KU_NONREPUDIATION,\n c.KU_KEYAGREEMENT,\n c.KU_ENCIPHERONLY,\n c.KU_DECIPHERONLY,\n ]\n self.assertTrue(utils.check_key_usage(key, usage))", "def ecdsa_key_gen():\n G = EcGroup()\n priv_sign = G.order().random()\n pub_verify = priv_sign * G.generator()\n return (G, priv_sign, pub_verify)", "def get_key_input():\n return get_input(message='Please enter your master key:',\n secure=True, check_timer=False)", "def ask_keys(self, update, context):\r\n update.message.reply_text('Введите новый ключ')\r\n return self.LISTEN", "def get_key(key):\n encrypt_key = pow(key, e, n)\n return encrypt_key", "def setup_key_encrypt(self):\r\n\t\tself.max_key = math.floor(len(self.message) / 2)\r\n\t\twhile True:\r\n\t\t\tkey = input(f\"Please enter a key value less than or equal to {self.max_key}. --> \")\r\n\t\t\ttry:\r\n\t\t\t\tself.key = int(key)\r\n\t\t\texcept ValueError:\r\n\t\t\t\tprint(\"Key needs to be a number.\")\r\n\t\t\t\tcontinue\r\n\t\t\tif self.key > self.max_key: \t\t\t\r\n\t\t\t\tprint(f\"{key} is too big of a number.\")\t\r\n\t\t\telif self.key == 0:\r\n\t\t\t\tprint(\"0 cannot be a key\")\t\t\t\r\n\t\t\telse:\t\t\t\r\n\t\t\t\tbreak", "def encrypt_aes(msg, key, iv):\r\n #start timer\r\n start = timeit.default_timer()\r\n\r\n #converting key to bytes from hex\r\n key = bytes.fromhex(key)\r\n msg = pad(msg)\r\n obj = AES.new(key, AES.MODE_CBC, iv)\r\n ciphertxt = obj.encrypt(msg)#ciphertxt will be in 'bytes'\r\n\r\n #converting ciphertxt into hexadecimal\r\n ciphertxt = ciphertxt.hex()\r\n\r\n print(\"Ciper is: \",ciphertxt)\r\n\r\n #stop timer\r\n stop = timeit.default_timer()\r\n print('Encryption Running Time: ', stop-start)\r\n \r\n return ciphertxt", "def generate_aes_key ( ) :\n import hashlib\n sr = Crypto.Random.random.StrongRandom( )\n key_bits = sr.getrandbits( 256 )\n sha_key = hashlib.sha256( str( key_bits ) ).digest( )\n return sha_key", "def private_key():\n return \"Toholampi summer festival 2017 has the most harcore rock bands\"", "def test_create_digital_access_key(self):\n pass", "def test_create_key():\n\n assert symmetric.create_key() != \"\"", "def DHencrypt(plaintext, symmetricKey, p, gen):\r\n \"Method was updated to use AES symetric decryption that was\"\r\n \"provided in the starter code as option of symetric encrytion using shared secret keys is generated.\"\r\n simplified_AES.keyExp(symmetricKey) # Generating round keys for AES.\r\n ciphertext = simplified_AES.encrypt(plaintext) # Running simplified AES.\r\n return ciphertext", "def test_encryption_cycle_aes_128_gcm_iv12_tag16_hkdf_sha256_ecdsa_p256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def public_key(self):", "def computeSessionKey(client_pub, server_secret, p):\r\n return expMod(client_pub, server_secret, p)", "def generate_symmetric_key():\n return Fernet.generate_key()", "def test_ec_no(self):\n key = c.KEY_EC\n usage = [\n c.KU_KEYENCIPHERMENT,\n c.KU_DATAENCIPHERMENT,\n ]\n self.assertFalse(utils.check_key_usage(key, usage))", "def download_key():\n data = check_args(('cloudProvider', ))\n provider = jobs.init_provider(data, True)\n key = encrypt_key(provider.get_key(), data['username'])\n return make_response(keyName=provider.keyname, key=key)", "def key():\n pass", "def key():\n pass", "def test_encryption(e, c):\n\n#\te = int(raw_input(\"\\nEnter e from public key\\n\"))\n#\tc = int(raw_input(\"\\nEnter c from public key\\n\"))\n\n string = raw_input(\"\\nEnter word to encrpyt\\n\")\n for i in range(0, len(string)):\n print endecrypt(ord(string[i]), e, c)", "def __init__(self, key, msg0503):\n enkey1 = map(ord, AES.new(key).encrypt(msg0503[:16]))\n self.cipher = AES.new(\"\".join(\n map(chr, (enkey1[i] ^ ord(msg0503[i + 16]) for i in range(16)))))\n self.encrypt_seq = random.randint(0, 0xffff)", "def test_encrypt_key(self):\n encrypted = encrypt('message', key=b'0' * 32)\n\n assert encrypted\n assert encrypted != 'message'", "def test_elliptic_parallel_transport(self):\n \n self._test_parallel_transport(k=1)", "def sshkey():\n with settings( hide( 'everything' ), warn_only=True ):\n print ( '\\rChecking %s... ' % env['host'] ),\n\n try:\n dsa = open( os.getenv('HOME') + '/.ssh/id_dsa.pub', 'r' ).readline().split()\n except IOError as e:\n sys.exit( 'SSH ID file not found' )\n run( 'if [ -d .ssh ]; then true; else mkdir .ssh; fi' )\n exists = run( 'grep \\'%s\\' ~/.ssh/authorized_keys' % dsa[1] )\n if not exists.succeeded:\n run ( 'echo %s %s %s >> ~/.ssh/authorized_keys' % (dsa[0], dsa[1], dsa[2]) )\n print 'SSH key added!'\n else:\n print 'SSH key already present, no update required'", "def test_set_tmp_ecdh(self):\n context = Context(SSLv23_METHOD)\n for curve in get_elliptic_curves():\n if curve.name.startswith(\"Oakley-\"):\n # Setting Oakley-EC2N-4 and Oakley-EC2N-3 adds\n # ('bignum routines', 'BN_mod_inverse', 'no inverse') to the\n # error queue on OpenSSL 1.0.2.\n continue\n # The only easily \"assertable\" thing is that it does not raise an\n # exception.\n context.set_tmp_ecdh(curve)", "def gk_handshake_1_2_aes( self , packet ):\n\t\ttry:\n\n\t\t\t# Decapsulate the TKIP packet, and rebuild the plaintext packet.\n\t\t\tplaintext\t= self.handleAES.decapsulate( packet , self.TK )\n\t\t\tpacket \t\t= LLC()/SNAP()/EAPOL()/EAPOL_Key()/EAPOL_WPAKey()\n\t\t\tnew_packet \t= packet.__class__( plaintext )\n\t\t\t\n\t\t\t# Assert on the flags in the Key Information to verify it is GKHS Message 1/2.\n\t\t\tkeyinfoReceived \t= new_packet.getlayer( EAPOL_WPAKey ).KeyInfo\n\t\t\tself.__setKeyIDFromFlaglist( self.__getFlaglist( keyinfoReceived ) )\n\t\t\tflaglist\t\t= ['HMAC_SHA1_AES','group','ack','mic','secure']\n\t\t\tflaglist.append( self.keyID ) # Copying the Key ID from the received packet.\n\t\t\tkeyinfoCalculated \t= self.__getKeyInformation( flaglist )\n\t\t\tassert( keyinfoReceived == keyinfoCalculated ), \\\n\t\t\t\t'The received packet is not Group Key Handshake Message 1/2.'\n\t\t\tself.logger.log( self.logger.RECEIVED , 'EAPOL Group Key Handshake Message 1/2 AES' )\n\t\t\t\n\t\t\t# Assert that the EAPoL WPA Key layer has a valid MIC.\n\t\t\tself.__assertWPAKeyMIC( new_packet , Crypto.Hash.SHA )\n\n\t\t\t# Update the Replay Counter.\n\t\t\tself.replayCounter\t= new_packet.getlayer( EAPOL_WPAKey ).ReplayCounter\n\t\t\t\n\t\t\t# Retrieve the Group Temporal key.\n\t\t\tself.GTK = self.handleAES.unwrapKey( new_packet.WPAKey , self.KEK ) # Resulting key of 16/32 octets.\n\t\t\tself.logger.logKey( 'Group Temporal Key' , self.GTK )\n\t\t\t\n\t\texcept:\n\t\t\traise", "def generate_keystream(self):", "def key_request(self, user):\n\t\tclient_log.debug(f'Запрос публичного ключа для {user}')\n\t\treq = {\n\t\t\tACTION: PUBLIC_KEY_REQUEST,\n\t\t\tTIME: time.time(),\n\t\t\tACCOUNT_NAME: user\n\t\t}\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tif RESPONSE in ans and ans[RESPONSE] == 511:\n\t\t\treturn ans[DATA]\n\t\telse:\n\t\t\tclient_log.error(f'Не удалось получить ключ собеседника{user}.')", "def generate(self):\n if self.curvetype == KeyType.ECDSA_P256v1:\n self.private_key_obj = ec.generate_private_key(ec.SECP256R1(), default_backend())\n elif self.curvetype == KeyType.ECDSA_SECP256k1:\n self.private_key_obj = ec.generate_private_key(ec.SECP256K1(), default_backend())\n self.public_key_obj = self.private_key_obj.public_key()\n self._get_naive_private_key_bytes()\n self._get_naive_public_key_bytes()", "def generate(self, module):\n\n # If size is wrong, delete the key. A new key will be generated in the next step.\n if self.key_current_size != self.size and not self.ignore_size:\n self.remove()\n self.key_exists = False\n else:\n self.changed = False\n\n # If there is no key or user has set \"force\"\n if not self.key_exists or self.force:\n if self.type == \"RSA\":\n self.key = crypto_rsa.generate_private_key(public_exponent=65537, key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"DSA\":\n self.key = crypto_dsa.generate_private_key(key_size=self.size, backend=crypto_default_backend())\n elif self.type == \"ECDSA\":\n if self.size == 256:\n self.curve = crypto_ec.SECP256R1()\n elif self.size == 384:\n self.curve = crypto_ec.SECP384R1()\n elif self.size == 521:\n self.curve = crypto_ec.SECP521R1()\n self.key = crypto_ec.generate_private_key(curve=self.curve, backend=crypto_default_backend())\n elif self.type == \"ED25519\":\n self.size = 128\n self.curve = \"EC25519\"\n else:\n raise HostkeyError(\"Unknown key type.\")\n\n if self.type != \"ED25519\":\n self.privkey = self.key.private_bytes(crypto_serialization.Encoding.PEM, crypto_serialization.PrivateFormat.PKCS8, crypto_serialization.NoEncryption())\n self.pubkey = self.key.public_key().public_bytes(crypto_serialization.Encoding.OpenSSH, crypto_serialization.PublicFormat.OpenSSH)\n\n try:\n privfile = os.open(self.fullpath, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(privfile, self.privkey)\n os.close(privfile)\n pubfile = os.open(self.fullpath + \".pub\", os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)\n os.write(pubfile, self.pubkey)\n os.close(pubfile)\n self.changed = True\n except IOError:\n self.remove()\n raise HostkeyError(get_exception())\n else:\n # use ssh-keygen to generate ED25519 Hostkeys\n # Keyfile must not exist, as there is no \"force-overwrite\" in ssh-keygen\n self.remove()\n retcode = subprocess.call([\"ssh-keygen\", \"-q\", \"-t\", \"ed25519\", \"-N\", '', \"-f\", self.fullpath])\n self.changed = True\n else:\n self.changed = False\n\n file_args = module.load_file_common_arguments(module.params)\n file_args['path'] = self.fullpath\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True\n file_args['path'] = self.fullpath + \".pub\"\n file_args['mode'] = self.pubmode\n if module.set_fs_attributes_if_different(file_args, False):\n self.changed = True", "def init_connection(session):\n user = session.user\n sock = session.sock\n rnd = b64encode(get_random_bytes(config.SECURE_CHANNEL_KEY_SIZE_BYTES))\n common.send_msg(sock, {\n kk.typ: kk.init_conn,\n kk.user: user,\n kk.nonce: rnd.decode()\n })\n resp = common.recv_message(sock)\n if resp[kk.typ] != kk.init_key:\n print('Big bad happen.')\n exit(1)\n if messaging.common.check_msg_sig(session, resp, extra=rnd) != True:\n print('Invalid server signature while initiating connection!')\n exit(2)\n key = messaging.common.pkc_decrypt(\n b64decode(resp[kk.key]), session.encryption_key)\n session.symkey = key", "def generate_key():\n key = Fernet.generate_key()\n with open(\"Secret.key\",\"wb\")as key_file:\n key_file.write(key)", "def exchange_keys(self, client_sock, obtained_cell):\n\n private_key = ec.generate_private_key(\n ec.SECP384R1(), default_backend()) # elliptic curve\n public_key = private_key.public_key() # duh same.\n serialised_public_key = public_key.public_bytes(\n encoding=serialization.Encoding.PEM,\n format=serialization.PublicFormat.SubjectPublicKeyInfo\n )\n # serialise the public key that I'm going to send them\n their_key = serialization.load_pem_public_key(\n obtained_cell.payload, backend=default_backend())\n shared_key = private_key.exchange(ec.ECDH(), their_key)\n salty = os.urandom(8)\n derived_key = HKDF(\n algorithm=hashes.SHA256(),\n length=32,\n salt=salty,\n info=None,\n backend=default_backend()\n ).derive(shared_key)\n reply_cell = Cell(serialised_public_key,\n salt=salty, ctype=CellType.CONNECT_RESP)\n signature = self.sign(salty) # sign the random bytes\n reply_cell.signature = signature # assign the signature.\n if util.RELAY_DEBUG:\n print(\"reply cell\")\n print(pickle.dumps(reply_cell))\n # send them the serialised version.\n client_sock.send(pickle.dumps(reply_cell))\n return private_key, derived_key", "def recipient_public_key(self):", "def shared_key(private_key,public_key):\n\treturn private_key.exchange(public_key)", "def setup_key_decrypt(self):\r\n\t\tself.max_key = math.floor(len(self.message) / 2)\r\n\t\twhile True:\r\n\t\t\tkey = input(\"Please enter the key that was used to encrypt your message.--> \")\r\n\t\t\ttry:\r\n\t\t\t\tself.key = int(key)\r\n\t\t\texcept ValueError:\r\n\t\t\t\tprint(\"Key needs to be a number.\")\r\n\t\t\t\tcontinue\r\n\t\t\tif self.key > self.max_key: \t\t\t\r\n\t\t\t\tprint(f\"{key} is too big of a number.\")\r\n\t\t\telif self.key == 0:\r\n\t\t\t\tprint(\"0 cannot be a key.\")\t\t\t\t\r\n\t\t\telse:\t\t\t\r\n\t\t\t\tbreak", "def configure_enable_aes_encryption(device, master_key):\n dialog = Dialog(\n [\n Statement(\n pattern=r\".*New\\s*key.*\",\n action=f\"sendline({master_key})\",\n loop_continue=True,\n continue_timer=False,\n ),\n Statement(\n pattern=r\".*Confirm\\s*key.*\",\n action=f\"sendline({master_key})\",\n loop_continue=True,\n continue_timer=False,\n )\n ]\n )\n try:\n device.configure(\"key config-key password-encrypt\", reply=dialog)\n device.configure(\"password encryption aes\")\n except SubCommandFailure as e:\n raise SubCommandFailure(\n \"Could not enables aes password encryption on device {device}.\\nError:\"\n \" {e}\".format(device=device.name, e=str(e))\n )", "def aes_ecb(key):\n return AES.new(key, AES.MODE_ECB)", "def main():\n key, plain = get_key_plain()\n encode(key, plain)", "def gk_handshake_2_2_aes( self , eapolMIC = True , eapolMICFlag = True , wepMIC = True , customFlaglist = None , addNonce = None , customRC = None , addData = None ):\n\t\tparameterList = 'eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',wepMIC=' + str(wepMIC) + ',customFlaglist=' + str(customFlaglist) + ',addNonce=' + str(addNonce) + ',customRC=' + str(customRC) + ',addData=' + str(addData)\n\t\tself.logger.log( self.logger.TRANSMIT , 'EAPOL Group Key Handshake Message 2/2 AES (' + parameterList + ')')\n\t\ttry:\n\t\t\t\n\t\t\t# Create an empty EAPOL WPA Key packet.\n\t\t\tpacket \t\t= EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()\n\t\t\tpacketKey \t= packet.getlayer( EAPOL_WPAKey )\n\t\t\tflaglist\t= ['HMAC_SHA1_AES','group','secure']\n\t\t\tflaglist.append( self.keyID )\n\t\t\tif eapolMICFlag is True:\n\t\t\t\tflaglist.append('mic')\n\t\t\t\n\t\t\t# Fill in the fields.\n\t\t\tif customFlaglist is not None:\n\t\t\t\tflaglist = customFlaglist\n\t\t\tpacketKey.KeyInfo = self.__getKeyInformation( flaglist )\n\t\t\tif customRC is not None:\n\t\t\t\tif customRC == 'lower':\t\n\t\t\t\t\tself.replayCounter -= 1\n\t\t\t\telif customRC == 'higher':\n\t\t\t\t\tself.replayCounter += 1\n\t\t\tpacketKey.ReplayCounter = self.replayCounter\n\t\t\tif addNonce is not None:\n\t\t\t\tif addNonce == 'supplicant':\n\t\t\t\t\tpacketKey.Nonce = self.SNonce\n\t\t\t\tif addNonce == 'authenticator':\n\t\t\t\t\tpacketKey.Nonce = self.ANonce\n\t\t\t\tif addNonce == 'random':\n\t\t\t\t\tpacketKey.Nonce = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )\n\t\t\tif addData is not None:\n\t\t\t\tif addData == 'data':\n\t\t\t\t\tpacketKey.WPAKeyLength \t= 32\n\t\t\t\t\tpacketKey.WPAKey \t= binascii.a2b_hex( os.urandom( 32 ).encode('hex') )\n\t\t\t\tif addData == 'dataNoLength':\n\t\t\t\t\tpacketKey.WPAKeyLength \t= 0\n\t\t\t\t\tpacketKey.WPAKey \t= binascii.a2b_hex( os.urandom( 32 ).encode('hex') )\n\t\t\t\tif addData == 'dataShortLength':\n\t\t\t\t\tpacketKey.WPAKeyLength \t= 16\n\t\t\t\t\tpacketKey.WPAKey \t= binascii.a2b_hex( os.urandom( 32 ).encode('hex') )\n\t\t\t\tif addData == 'dataLongLength':\n\t\t\t\t\tpacketKey.WPAKeyLength \t= 48\n\t\t\t\t\tpacketKey.WPAKey \t= binascii.a2b_hex( os.urandom( 32 ).encode('hex') )\n\t\t\t\t\t\n\t\t\t# Calculate and add the MIC.\n\t\t\tif eapolMIC is True:\n\t\t\t\tmic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.SHA )\n\t\t\t\tpacketKey.WPAKeyMIC = mic.digest()\n\t\t\t\n\t\t\t# Get the plaintext and generate the Logical-Link Control (LLC),\n\t\t\t# and Subnetwork Access Protocol (SNAP).\n\t\t\tplaintext \t= str( packet )\n\t\t\tllcSnap\t\t= LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )\n\t\t\tllcSnap\t /= SNAP( OUI=0x000000 , code=0x888e )\n\t\t\tplaintext\t= str( llcSnap ) + plaintext\n\t\t\t\n\t\t\t# Generate the dot11 header and request the encapsulated dot11wep message.\n\t\t\tdot11\t\t= Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , FCfield='wep+to-DS' , type='Data' , subtype=0 )\n\t\t\tdot11wep \t= self.handleAES.encapsulate( plaintext , self.TK , self.addr1 , self.addr2 , self.addr3 )\n\t\t\tif wepMIC is False:\n\t\t\t\tdot11wep.icv = 0 # NOTE/FIXME: This only clears part of the MIC, still making it incorrect though.\n\t\t\t\t\n\t\t\t# Transmit the packet.\n\t\t\tpacket\t= RadioTap()/dot11/dot11wep\n\t\t\tsendp( packet , iface=self.iface , verbose=False )\n\t\t\t\n\t\texcept:\n\t\t\traise", "def key():", "def generate_ephemeral_key(self, key):\n error = vscf_error_t()\n result = self._lib_vscf_ecc.vscf_ecc_generate_ephemeral_key(self.ctx, key.c_impl, error)\n VscfStatus.handle_status(error.status)\n instance = VscfImplTag.get_type(result)[0].take_c_ctx(cast(result, POINTER(VscfImplTag.get_type(result)[1])))\n return instance", "def _clientTLS13Handshake(self, settings, session, clientHello,\n clientCertChain, privateKey, serverHello):\n prfName, prf_size = self._getPRFParams(serverHello.cipher_suite)\n\n # we have client and server hello in TLS 1.3 so we have the necessary\n # key shares to derive the handshake receive key\n sr_kex = serverHello.getExtension(ExtensionType.key_share)\n sr_psk = serverHello.getExtension(ExtensionType.pre_shared_key)\n if not sr_kex and not sr_psk:\n raise TLSIllegalParameterException(\"Server did not select PSK nor \"\n \"an (EC)DH group\")\n if sr_kex:\n sr_kex = sr_kex.server_share\n self.ecdhCurve = sr_kex.group\n cl_key_share_ex = clientHello.getExtension(ExtensionType.key_share)\n cl_kex = next((i for i in cl_key_share_ex.client_shares\n if i.group == sr_kex.group), None)\n if cl_kex is None:\n raise TLSIllegalParameterException(\"Server selected not \"\n \"advertised group.\")\n kex = self._getKEX(sr_kex.group, self.version)\n\n shared_sec = kex.calc_shared_key(cl_kex.private,\n sr_kex.key_exchange)\n else:\n shared_sec = bytearray(prf_size)\n\n # if server agreed to perform resumption, find the matching secret key\n resuming = False\n if sr_psk:\n clPSK = clientHello.getExtension(ExtensionType.pre_shared_key)\n ident = clPSK.identities[sr_psk.selected]\n psk = [i[1] for i in settings.pskConfigs if i[0] == ident.identity]\n if psk:\n psk = psk[0]\n else:\n resuming = True\n psk = HandshakeHelpers.calc_res_binder_psk(\n ident, session.resumptionMasterSecret,\n session.tickets)\n else:\n psk = bytearray(prf_size)\n\n secret = bytearray(prf_size)\n # Early Secret\n secret = secureHMAC(secret, psk, prfName)\n\n # Handshake Secret\n secret = derive_secret(secret, bytearray(b'derived'),\n None, prfName)\n secret = secureHMAC(secret, shared_sec, prfName)\n\n sr_handshake_traffic_secret = derive_secret(secret,\n bytearray(b's hs traffic'),\n self._handshake_hash,\n prfName)\n cl_handshake_traffic_secret = derive_secret(secret,\n bytearray(b'c hs traffic'),\n self._handshake_hash,\n prfName)\n\n # prepare for reading encrypted messages\n self._recordLayer.calcTLS1_3PendingState(\n serverHello.cipher_suite,\n cl_handshake_traffic_secret,\n sr_handshake_traffic_secret,\n settings.cipherImplementations)\n\n self._changeReadState()\n\n for result in self._getMsg(ContentType.handshake,\n HandshakeType.encrypted_extensions):\n if result in (0, 1):\n yield result\n else:\n break\n encrypted_extensions = result\n assert isinstance(encrypted_extensions, EncryptedExtensions)\n\n size_limit_ext = encrypted_extensions.getExtension(\n ExtensionType.record_size_limit)\n if size_limit_ext and not settings.record_size_limit:\n for result in self._sendError(\n AlertDescription.illegal_parameter,\n \"Server sent record_size_limit extension despite us not \"\n \"advertising it\"):\n yield result\n if size_limit_ext:\n if size_limit_ext.record_size_limit is None:\n for result in self._sendError(\n AlertDescription.decode_error,\n \"Malformed record_size_limit extension\"):\n yield result\n if not 64 <= size_limit_ext.record_size_limit <= 2**14+1:\n for result in self._sendError(\n AlertDescription.illegal_parameter,\n \"Invalid valid in record_size_limit extension\"):\n yield result\n # the record layer code expects a limit that excludes content type\n # from the value while extension is defined including it\n self._send_record_limit = size_limit_ext.record_size_limit - 1\n self._recv_record_limit = min(2**14, settings.record_size_limit - 1)\n\n # if we negotiated PSK then Certificate is not sent\n certificate_request = None\n certificate = None\n if not sr_psk:\n for result in self._getMsg(ContentType.handshake,\n (HandshakeType.certificate_request,\n HandshakeType.certificate,\n HandshakeType.compressed_certificate),\n CertificateType.x509):\n if result in (0, 1):\n yield result\n else:\n break\n\n if isinstance(result, CertificateRequest):\n certificate_request = result\n\n # we got CertificateRequest so now we'll get Certificate\n for result in self._getMsg(ContentType.handshake,\n HandshakeType.certificate,\n CertificateType.x509):\n if result in (0, 1):\n yield result\n else:\n break\n\n certificate = result\n assert isinstance(certificate, Certificate)\n\n srv_cert_verify_hh = self._handshake_hash.copy()\n\n for result in self._getMsg(ContentType.handshake,\n HandshakeType.certificate_verify):\n if result in (0, 1):\n yield result\n else:\n break\n certificate_verify = result\n assert isinstance(certificate_verify, CertificateVerify)\n\n signature_scheme = certificate_verify.signatureAlgorithm\n self.serverSigAlg = signature_scheme\n\n signature_context = KeyExchange.calcVerifyBytes((3, 4),\n srv_cert_verify_hh,\n signature_scheme,\n None, None, None,\n prfName, b'server')\n\n for result in self._clientGetKeyFromChain(certificate, settings):\n if result in (0, 1):\n yield result\n else:\n break\n publicKey, serverCertChain, tackExt = result\n\n if signature_scheme in (SignatureScheme.ed25519,\n SignatureScheme.ed448):\n pad_type = None\n hash_name = \"intrinsic\"\n salt_len = None\n method = publicKey.hashAndVerify\n elif signature_scheme[1] == SignatureAlgorithm.ecdsa:\n pad_type = None\n hash_name = HashAlgorithm.toRepr(signature_scheme[0])\n matching_hash = self._curve_name_to_hash_name(\n publicKey.curve_name)\n if hash_name != matching_hash:\n raise TLSIllegalParameterException(\n \"server selected signature method invalid for the \"\n \"certificate it presented (curve mismatch)\")\n\n salt_len = None\n method = publicKey.verify\n else:\n scheme = SignatureScheme.toRepr(signature_scheme)\n pad_type = SignatureScheme.getPadding(scheme)\n hash_name = SignatureScheme.getHash(scheme)\n salt_len = getattr(hashlib, hash_name)().digest_size\n method = publicKey.verify\n\n if not method(certificate_verify.signature,\n signature_context,\n pad_type,\n hash_name,\n salt_len):\n raise TLSDecryptionFailed(\"server Certificate Verify \"\n \"signature \"\n \"verification failed\")\n\n transcript_hash = self._handshake_hash.digest(prfName)\n\n for result in self._getMsg(ContentType.handshake,\n HandshakeType.finished,\n prf_size):\n if result in (0, 1):\n yield result\n else:\n break\n finished = result\n\n server_finish_hs = self._handshake_hash.copy()\n\n assert isinstance(finished, Finished)\n\n finished_key = HKDF_expand_label(sr_handshake_traffic_secret,\n b\"finished\", b'', prf_size, prfName)\n verify_data = secureHMAC(finished_key, transcript_hash, prfName)\n\n if finished.verify_data != verify_data:\n raise TLSDecryptionFailed(\"Finished value is not valid\")\n\n # now send client set of messages\n self._changeWriteState()\n\n # Master secret\n secret = derive_secret(secret, bytearray(b'derived'), None, prfName)\n secret = secureHMAC(secret, bytearray(prf_size), prfName)\n\n cl_app_traffic = derive_secret(secret, bytearray(b'c ap traffic'),\n server_finish_hs, prfName)\n sr_app_traffic = derive_secret(secret, bytearray(b's ap traffic'),\n server_finish_hs, prfName)\n\n if certificate_request:\n client_certificate = Certificate(serverHello.certificate_type,\n self.version)\n if clientCertChain:\n # Check to make sure we have the same type of certificates the\n # server requested\n if serverHello.certificate_type == CertificateType.x509 \\\n and not isinstance(clientCertChain, X509CertChain):\n for result in self._sendError(\n AlertDescription.handshake_failure,\n \"Client certificate is of wrong type\"):\n yield result\n\n client_certificate.create(clientCertChain)\n # we need to send the message even if we don't have a certificate\n for result in self._sendMsg(client_certificate):\n yield result\n\n if clientCertChain and privateKey:\n valid_sig_algs = certificate_request.supported_signature_algs\n if not valid_sig_algs:\n for result in self._sendError(\n AlertDescription.missing_extension,\n \"No Signature Algorithms found\"):\n yield result\n\n availSigAlgs = self._sigHashesToList(settings, privateKey,\n clientCertChain,\n version=(3, 4))\n signature_scheme = getFirstMatching(availSigAlgs,\n valid_sig_algs)\n scheme = SignatureScheme.toRepr(signature_scheme)\n signature_scheme = getattr(SignatureScheme, scheme)\n\n signature_context = \\\n KeyExchange.calcVerifyBytes((3, 4), self._handshake_hash,\n signature_scheme, None, None,\n None, prfName, b'client')\n\n if signature_scheme in (SignatureScheme.ed25519,\n SignatureScheme.ed448):\n pad_type = None\n hash_name = \"intrinsic\"\n salt_len = None\n sig_func = privateKey.hashAndSign\n ver_func = privateKey.hashAndVerify\n elif signature_scheme[1] == SignatureAlgorithm.ecdsa:\n pad_type = None\n hash_name = HashAlgorithm.toRepr(signature_scheme[0])\n salt_len = None\n sig_func = privateKey.sign\n ver_func = privateKey.verify\n else:\n pad_type = SignatureScheme.getPadding(scheme)\n hash_name = SignatureScheme.getHash(scheme)\n salt_len = getattr(hashlib, hash_name)().digest_size\n sig_func = privateKey.sign\n ver_func = privateKey.verify\n\n signature = sig_func(signature_context,\n pad_type,\n hash_name,\n salt_len)\n if not ver_func(signature, signature_context,\n pad_type,\n hash_name,\n salt_len):\n for result in self._sendError(\n AlertDescription.internal_error,\n \"Certificate Verify signature failed\"):\n yield result\n\n certificate_verify = CertificateVerify(self.version)\n certificate_verify.create(signature, signature_scheme)\n\n for result in self._sendMsg(certificate_verify):\n yield result\n\n # Do after client cert and verify messages has been sent.\n exporter_master_secret = derive_secret(secret,\n bytearray(b'exp master'),\n self._handshake_hash, prfName)\n\n self._recordLayer.calcTLS1_3PendingState(\n serverHello.cipher_suite,\n cl_app_traffic,\n sr_app_traffic,\n settings.cipherImplementations)\n # be ready to process alert messages from the server, which\n # MUST be encrypted with ap traffic secret when they are sent after\n # Finished\n self._changeReadState()\n\n cl_finished_key = HKDF_expand_label(cl_handshake_traffic_secret,\n b\"finished\", b'',\n prf_size, prfName)\n cl_verify_data = secureHMAC(\n cl_finished_key,\n self._handshake_hash.digest(prfName),\n prfName)\n\n cl_finished = Finished(self.version, prf_size)\n cl_finished.create(cl_verify_data)\n\n if not self._ccs_sent and clientHello.session_id:\n ccs = ChangeCipherSpec().create()\n msgs = [ccs, cl_finished]\n else:\n msgs = [cl_finished]\n\n for result in self._sendMsgs(msgs):\n yield result\n\n # CCS messages are not allowed in post handshake authentication\n self._middlebox_compat_mode = False\n\n # fully switch to application data\n self._changeWriteState()\n\n self._first_handshake_hashes = self._handshake_hash.copy()\n\n resumption_master_secret = derive_secret(secret,\n bytearray(b'res master'),\n self._handshake_hash, prfName)\n\n self.session = Session()\n self.extendedMasterSecret = True\n\n serverName = None\n if clientHello.server_name:\n serverName = clientHello.server_name.decode(\"utf-8\")\n\n appProto = None\n alpnExt = encrypted_extensions.getExtension(ExtensionType.alpn)\n if alpnExt:\n appProto = alpnExt.protocol_names[0]\n\n heartbeat_ext = encrypted_extensions.getExtension(ExtensionType.heartbeat)\n if heartbeat_ext:\n if not settings.use_heartbeat_extension:\n for result in self._sendError(\n AlertDescription.unsupported_extension,\n \"Server sent Heartbeat extension without one in \"\n \"client hello\"):\n yield result\n if heartbeat_ext.mode == HeartbeatMode.PEER_ALLOWED_TO_SEND and \\\n settings.heartbeat_response_callback:\n self.heartbeat_can_send = True\n self.heartbeat_response_callback = settings.\\\n heartbeat_response_callback\n elif heartbeat_ext.mode == HeartbeatMode.\\\n PEER_NOT_ALLOWED_TO_SEND or not settings.\\\n heartbeat_response_callback:\n self.heartbeat_can_send = False\n else:\n for result in self._sendError(\n AlertDescription.illegal_parameter,\n \"Server responded with invalid Heartbeat extension\"):\n yield result\n self.heartbeat_supported = True\n\n self.session.create(secret,\n bytearray(b''), # no session_id in TLS 1.3\n serverHello.cipher_suite,\n None, # no SRP\n clientCertChain,\n certificate.cert_chain if certificate else None,\n None, # no TACK\n False, # no TACK in hello\n serverName,\n encryptThenMAC=False, # all ciphers are AEAD\n extendedMasterSecret=True, # all TLS1.3 are EMS\n appProto=appProto,\n cl_app_secret=cl_app_traffic,\n sr_app_secret=sr_app_traffic,\n exporterMasterSecret=exporter_master_secret,\n resumptionMasterSecret=resumption_master_secret,\n # NOTE it must be a reference, not a copy!\n tickets=self.tickets)\n\n yield \"finished\" if not resuming else \"resumed_and_finished\"", "def _program_key(self, slot, base32_key, require_button):\n try:\n config = self.parent.yk.init_config()\n except (AttributeError, yubico.yubikey_usb_hid.usb.USBError):\n return tkMessageBox.showerror(\n \"Error\",\n \"No YubiKey detected\"\n )\n config.extended_flag('SERIAL_API_VISIBLE', True)\n config.mode_challenge_response(\n 'h:' + _rzfill(_base32_to_hex(base32_key), 40),\n type='HMAC',\n variable=True,\n require_button=bool(require_button),\n )\n try:\n self.parent.yk.write_config(config, slot=slot)\n tkMessageBox.showinfo(\n \"Success\",\n \"Successfully programmed YubiKey in slot %s.\" % slot\n )\n except (\n yubico.yubico_exception.YubicoError,\n yubico.yubico_exception.InputError,\n yubico.yubikey_usb_hid.usb.USBError\n ) as e:\n tkMessageBox.showerror(\"Error\", e)\n self._program_cancel()", "def generate_key():\n key = ''.join([chr(random.randint(0, 0x10)) for _ in range(block_size)])\n return AES.new(second_key, AES.MODE_ECB).encrypt(pad((key.encode('ascii')), block_size))", "def sendToClient(plaintext):\n signature = userKeys.signUsingPrivateKey(plaintext)\n encryptedText = userKeys.encrypt(plaintext, contactKey)\n s.send(encryptedText)\n time.sleep(1)\n s.send(signature)", "def generate_key(self):\n self.key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(self.key)", "def write_key():\n key = fernet.Fernet.generate_key()\n keyfile = open(KEY_PATH,'wb')\n keyfile.write(key)\n keyfile.close()", "def __init__( self, masterKey ):\n\n assert len(masterKey) == const.MASTER_KEY_SIZE\n\n checkKeys()\n\n # The random name is used to recognize previously issued tickets.\n #self.keyName = mycrypto.weak_random(NAME_LENGTH)\n\n # Initialization vector for AES-CBC.\n self.IV = mycrypto.strong_random(IV_LENGTH)\n\n # The server's actual (encrypted) protocol state.\n self.state = ProtocolState(masterKey)\n\n # AES and HMAC key to protect the ticket.\n self.symmTicketKey = AESKey\n self.hmacTicketKey = HMACKey", "def test_encryption_cycle_aes_256_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_256_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def key_manager():\n key = DBKey(5, [], 2)\n key.receive_db_key()\n key.send_db_key()\n return key", "def fernet_encript(key,message):\n\tf = Fernet(key)\n\treturn f.encrypt(message)", "def __init__(self, key_info):\n if (key_info.type != client_pb2.KeyInfo.ECDSA):\n raise error.UnsupportedAlgorithmError(\n \"Expected ECDSA key, but got key type %d\" % key_info.type)\n\n # Will raise a PemError on invalid encoding\n self.__der, _ = pem.from_pem(key_info.pem_key, self.__READ_MARKERS)\n try:\n self.__key = ecdsa.VerifyingKey.from_der(self.__der)\n except ecdsa.der.UnexpectedDER as e:\n raise error.EncodingError(e)", "def test_encryption_cycle_aes_128_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_128_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def generate_key():\n key = Fernet.generate_key()\n with open(\"secret.key\", \"wb\") as key_file:\n key_file.write(key)", "def newKeyGenerate():\n generate()\n return '', 204", "def expand_key(master_key):\n #s_box = bytes2matrix(s_box1)\n # Round constants https://en.wikipedia.org/wiki/AES_key_schedule#Round_constants\n r_con = (\n 0x00, 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40,\n 0x80, 0x1B, 0x36, 0x6C, 0xD8, 0xAB, 0x4D, 0x9A,\n 0x2F, 0x5E, 0xBC, 0x63, 0xC6, 0x97, 0x35, 0x6A,\n 0xD4, 0xB3, 0x7D, 0xFA, 0xEF, 0xC5, 0x91, 0x39,\n )\n\n # Initialize round keys with raw key material.\n key_columns = bytes2matrix(master_key, 4)\n #print(key_columns)\n iteration_size = len(master_key) // 4\n\n\n # Each iteration has exactly as many columns as the key material.\n columns_per_iteration = len(key_columns)\n i = 1\n while len(key_columns) < (N_ROUNDS + 1) * 4:\n # Copy previous word.\n word = list(key_columns[-1])\n\n # Perform schedule_core once every \"row\".\n if len(key_columns) % iteration_size == 0:\n # Circular shift.\n word.append(word.pop(0))\n # Map to S-BOX.\n word = [s_box[b-1] for b in word]\n\n # XOR with first byte of R-CON, since the others bytes of R-CON are 0.\n word[0] ^= r_con[i]\n i += 1\n elif len(master_key) == 32 and len(key_columns) % iteration_size == 4:\n # Run word through S-box in the fourth iteration when using a\n # 256-bit key.\n word = [s_box[b] for b in word]\n\n # XOR with equivalent word from previous iteration.\n word = bytes(i^j for i, j in zip(word, key_columns[-iteration_size]))\n key_columns.append(word)\n\n # Group key words in 4x4 byte matrices.\n return [key_columns[4*i : 4*(i+1)] for i in range(len(key_columns) // 4)]", "def genKey(self, otherKey):\n self.sharedSecret = self.genSecret(self.privateKey, otherKey)\n #print(\"Shared secret:\")\n #print(self.sharedSecret)\n s = hashlib.sha256()\n s.update(bytes(str(self.sharedSecret).encode()))\n self.key = s.digest()", "def sendToServer(plaintext):\n signature = keys.signUsingPrivateKey(plaintext)\n encryptedText = keys.encrypt(plaintext, serverSessionKey)\n s.send(encryptedText)\n time.sleep(1)\n s.send(signature)", "def test_encryption_cycle_aes_192_gcm_iv12_tag16_hkdf_sha256_non_framed(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=0,\n algorithm=Algorithm.AES_192_GCM_IV12_TAG16_HKDF_SHA256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def send(event=None): # event is passed by binders.\n msg = my_msg.get()\n my_msg.set(\"\") # Clears input field.\n if rsa.cont > 0:\n message = idea.get_encrypt_message(msg, idea.get_key()) \n \n client_socket.send(bytes(msg, \"utf8\"))\n\n if rsa.cont == 0:\n rsa.cont += 1\n print(\"chave publica \", rsa.get_public_key())\n print(\"chave privada\", rsa.get_private_key())\n client_socket.send(bytes(rsa.get_public_key(), \"utf8\"))\n\n if msg == \"{quit}\":\n cont = 0\n client_socket.close()\n top.quit()", "def sendInit(self, connID, key):\r\n assert len(connID) == 16\r\n assert len(key) == 16\r\n\r\n self.sendString(connID + key)", "def test_encryption_cycle_aes_128_gcm_iv12_tag16_hkdf_sha256_ecdsa_p256_single_frame(self):\n ciphertext, _ = aws_encryption_sdk.encrypt(\n source=VALUES[\"plaintext_128\"],\n key_provider=self.kms_master_key_provider,\n encryption_context=VALUES[\"encryption_context\"],\n frame_length=1024,\n algorithm=Algorithm.AES_128_GCM_IV12_TAG16_HKDF_SHA256_ECDSA_P256,\n )\n plaintext, _ = aws_encryption_sdk.decrypt(source=ciphertext, key_provider=self.kms_master_key_provider)\n assert plaintext == VALUES[\"plaintext_128\"]", "def from_key(self, public_id, key):\n otp = self.get_otp(key)\n from_key = modhex_encode(public_id.encode('hex')) + modhex_encode(otp.encode('hex'))\n return from_key", "def setup(self):\n self.nonce = generate_nonce()\n # print('nonce: ' + str(self.nonce))\n n, ciphertext, tag = aes_encode(self.aes, self.nonce)\n print(Colors.BOLD + 'N --> S: {N_N}K' + Colors.ENDC)\n print('\\t' + Colors.BOLD + 'N_N: ' + Colors.ENDC + str(self.nonce))\n print('\\t' + Colors.BOLD + 'K: ' + Colors.ENDC + str(self.aes))\n print('\\t' + Colors.BOLD + '{N_N}K : (n, c, t)' + Colors.ENDC)\n # print('sending encrypted, (n, c, t) : (' + str(n) + ', ' + str(ciphertext) + ', ' + str(tag) + ')')\n to_send = {'dest': 'setup', 'n': n, 'c': ciphertext, 't': tag} # dictionary to send to the server\n self.nodesocket.sendall(pickle.dumps(to_send))\n data = pickle.loads(self.nodesocket.recv(MAX_SIZE))\n self.id = data['id'] # set the given id from the server\n return data", "def test_encrypt_key_invalid(self):\n with pytest.raises(EncryptionError):\n encrypt('message', key=b'0' * 31)", "def prepare_env():\n if APPID is None or APIKEY is None or EHSM_IP is None:\n print(\"Please set environment variable APPID, APIKEY, ehsm_ip!\")\n exit(1)\n generate_primary_key(EHSM_IP, EHSM_PORT)\n global encrypted_primary_key_path\n encrypted_primary_key_path = \"./encrypted_primary_key\"\n generate_data_key(EHSM_IP, EHSM_PORT, encrypted_primary_key_path, 32)\n global encrypted_data_key_path\n encrypted_data_key_path = \"./encrypted_data_key\"\n patch_encryption()", "def gen_key(app):\n\tos.system('lxc-attach -n %s -- ssh-keygen -t rsa -N \"\" -f key' % app)", "def encryption_key(self) -> bytearray:\n # Handle if encryption is disabled.\n if self.aes_on == 0:\n return None\n # Encryption is enabled so read the key and return it.\n key = bytearray(16)\n self._read_into(_REG_AES_KEY1, key)\n return key", "def text(message):\n room = session.get('room')\n key = os.urandom(32)\n iv = os.urandom(16)\n print(key,iv)\n\n print(key[:2],key[:4])\n print(len(key),len(iv))\n print(type(key))\n data = 'hello world 1234' # <- 16 bytes\n\n enc = aes_encrypt(key,data,iv)\n dec = aes_decrypt(key,enc,iv)\n\n print('data:',data)\n print('cipher:', enc)\n print('plain:',dec)\n test = os.urandom(2)\n print('key:', int.from_bytes(test, byteorder='little'))\n print('key', test)\n \n emit('enc_msg', {'key': key.hex(),\n 'cipher': enc.hex(),\n 'iv' : iv.hex(),\n }, room=room)\n emit('message', {'msg': session.get('name') + ':' + message['msg']}, room=room)", "def main():\n\n f = sys.stdin\n\n # skip first message\n f.readline()\n\n m_frame = bitstring_to_bytes(f.readline()[-113:-17])\n c_frame = bitstring_to_bytes(f.readline()[-160:-16])\n\n tmp = xor(key, c_frame[11:17])\n aes = AES.new(tmp, AES.MODE_ECB)\n\n Pd_d = bytes.fromhex('00' * 6) + m_frame[1:11]\n Pd_ = aes.encrypt(Pd_d)\n\n parameters = m_frame[11:15]\n Pd = xor(Pd_, parameters)\n P = aes.encrypt(Pd)\n\n sys.stdout.write(bytes_to_bitstring(P))", "def test_ec_ca_no(self):\n key = c.KEY_EC\n usage = [\n c.KU_KEYENCIPHERMENT,\n c.KU_DATAENCIPHERMENT,\n ]\n self.assertFalse(utils.check_key_usage(key, usage, True))", "def get_otp(self, key):\n packed = self.pack()\n obj = AES.new(key, AES.MODE_ECB)\n ciphertext = obj.encrypt(packed)\n return ciphertext", "def main():\n challenge = ChristmasChallenge()\n\n # Opening flow.\n print challenge.RecvUntil('accessing sensitive services.\\n')\n\n # Join. Works fine on the server but not locally because I have no idea why.\n # challenge.Join()\n\n # Secure the connection.\n challenge.Secure()\n\n # Login.\n challenge.Login()\n\n # Fortran.\n challenge.Fortran()\n\n # Elevate.\n # challenge.Elevate()\n\n # Private.\n challenge.Private()", "def server_side_encryption_key(self, server_side_encryption_key):\n\n self._server_side_encryption_key = server_side_encryption_key", "def receive_exchange_request(self, pubkey, nonce, random_val, hint):\n if self.state != KeyExchangeManager.STATE_REQUESTING:\n #print(\"(%d) receive_exchange_request: processing\" % int(time.time()))\n self.peer_public_key = pubkey\n self.nonce = nonce\n self.random = random_val\n self.secret_key, self.peer_public_key, self.pending_key_name = message_key_types.get_ECDH_parameters()\n self.shared_key = message_key_types.derive_shared_key(self.secret_key, pubkey, random_val)\n self._set_state(KeyExchangeManager.STATE_CONFIRMING)\n self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, \"response\",\n self.peer_public_key, self.nonce, self.random,\n self.pending_key_name)\n self.set_cipher(self.pending_key_name, hint)\n else:\n #print(\"(%d) receive_exchange_request: ignoring\" % int(time.time()))\n message_key_types.unset_cipher(self.pending_key_name)\n self.pending_key_name = None\n if self.key_name is None:\n self._set_state(KeyExchangeManager.STATE_NONE)\n else:\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n rand_time = KeyExchangeManager.KEY_EXCHANGE_RETRY_INTERVAL * random.uniform(0.5, 1.5)\n if self.timer_entry is not None and self.timer_entry.active:\n self.timer_entry.update_expiration_time(rand_time)\n self.timer_entry.data[KeyType.retry_timer] = True\n else:\n self.set_invoke_timer(rand_time, retry_entry=True)", "def main():\n\n prime = 0xffffffffffffffffc90fdaa22168c234c4c6628b80dc1cd129024e088a67cc74020bbea63b139b22514a08798e3404ddef9519b3cd3a431b302b0a6df25f14374fe1356d6d51c245e485b576625e7ec6f44c42e9a637ed6b0bff5cb6f406b7edee386bfb5a899fa5ae9f24117c4b1fe649286651ece45b3dc2007cb8a163bf0598da48361c55d39a69163fa8fd24cf5f83655d23dca3ad961c62f356208552bb9ed529077096966d670c354e4abc9804f1746c08ca237327ffffffffffffffff\n base = 2\n \n connection = diffiehellman_mitm_sim(prime, base)\n\n # intercept alices public key\n prime, base , _ = next(connection)\n\n # send prime instead of alices public key to bob. Recieve Bobs public key, \n # which we forget as it is not needs. The shared kill will be 0.\n\n connection.send((prime, base, prime))\n\n #Send prime as bob's public key to alice. We have ensured that the shared\n #hared secret key is 0. Recieve Alice's ciphertext for bob\n ciphertext_a2b = connection.send(prime)\n\n # decrypt\n malcolm = AES_CBC(SHA1(bso.int_to_bytes(0)).digest()[:16], b'0'*16)\n messages = []\n messages.append(bso.remove_padding_pkcs7(malcolm.decrypt(ciphertext_a2b[:-16], ciphertext_a2b[-16:])))\n\n #Send the ciphertext to bob. Recieve his response\n ciphertext_b2a = connection.send(ciphertext_a2b)\n\n messages.append(bso.remove_padding_pkcs7(malcolm.decrypt(ciphertext_b2a[:-16], ciphertext_b2a[-16:])))\n\n assert messages[0] == b'Message to Bob'\n assert messages[1] == b'Message to Alice'\n\n \n return", "def get_cert_and_key(self, code):\n if self.data.get(code) is None:\n match = re.match(\"^[0-9a-zA-Z-]+$\", code)\n if not match:\n raise Exception(\"Invalid Code '%s'\" % code)\n match = re.match(\"^[0-9a-zA-Z.-]+$\", self.server)\n if not match:\n raise Exception(\"Invalid Server '%s'\" % self.server)\n # Race condition: if another process uses ps to see the code, it\n # could conceivably race to relay.globusonline.org to get \n # the cert/key. Not a dangerous situation, though, because then\n # this would fail, and we'd know something is wrong\n args = [\"gsissh\", \n \"-v\", \"-F\", \"/dev/null\", \n \"-o\", \"GSSApiTrustDns no\",\n \"-o\", \"ServerAliveInterval 15\",\n \"-o\", \"ServerAliveCountMax 8\",\n self.server, \"-p\", str(self.port),\n \"register\", code]\n self.__setup_x509_dirs()\n if self.debug:\n print \"Executing '\" + \"' '\" + join(args) + \"'\\n\"\n pipe = Popen(args, env = self.pipe_env,\n stdout=PIPE, stderr=PIPE, close_fds = True)\n (out, err) = pipe.communicate()\n returncode = pipe.returncode\n if returncode == 255:\n print \"Error: Could not connect to server\"\n print \"---\"\n print err\n return None\n elif returncode > 0:\n print \"Error: The server returned an error\" \n print \"---\"\n print out, err\n return None\n elif returncode < 0:\n print \"Error: Could not connect to server\" \n print \"---\"\n print \"Exited abnormaly: received signal \" + str(-returncode)\n print out, err\n return None\n self.data[code] = self.parse_config(out)\n\n return self.data[code]" ]
[ "0.6512742", "0.6338184", "0.63009053", "0.62812775", "0.62282526", "0.613101", "0.6050611", "0.60426515", "0.6037874", "0.5955243", "0.5955243", "0.5944734", "0.5941283", "0.58977014", "0.5856448", "0.5838825", "0.5827069", "0.5824423", "0.5812279", "0.5808559", "0.57647914", "0.57522196", "0.5745461", "0.5724081", "0.5685803", "0.56688005", "0.5666724", "0.56548184", "0.5649433", "0.56461996", "0.5621342", "0.55774105", "0.5566849", "0.5565285", "0.5544557", "0.553354", "0.5532449", "0.5529505", "0.55274165", "0.55272573", "0.5524541", "0.5524541", "0.5511124", "0.5504918", "0.54994833", "0.54914725", "0.5490835", "0.5489525", "0.5486731", "0.5474866", "0.54604167", "0.5460068", "0.545884", "0.5455172", "0.5454985", "0.54408026", "0.5437008", "0.5432049", "0.54315484", "0.5427671", "0.5422544", "0.5410627", "0.540394", "0.54029363", "0.5400359", "0.5391496", "0.53905696", "0.5372213", "0.53695935", "0.5369224", "0.53688926", "0.53630817", "0.5357812", "0.535567", "0.5353509", "0.5353267", "0.5347558", "0.533878", "0.53322196", "0.53230596", "0.5316897", "0.53141034", "0.5303956", "0.52952135", "0.5292009", "0.5290457", "0.5288786", "0.5272962", "0.52723545", "0.52695674", "0.5265824", "0.5259901", "0.5257738", "0.5256521", "0.5256263", "0.5255002", "0.52528876", "0.5251712", "0.5248801", "0.5247266", "0.5239446" ]
0.0
-1
Procedure when receiving message with BBcNetwork.REQUEST_KEY_EXCHANGE
def receive_exchange_request(self, pubkey, nonce, random_val, hint): if self.state != KeyExchangeManager.STATE_REQUESTING: #print("(%d) receive_exchange_request: processing" % int(time.time())) self.peer_public_key = pubkey self.nonce = nonce self.random = random_val self.secret_key, self.peer_public_key, self.pending_key_name = message_key_types.get_ECDH_parameters() self.shared_key = message_key_types.derive_shared_key(self.secret_key, pubkey, random_val) self._set_state(KeyExchangeManager.STATE_CONFIRMING) self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, "response", self.peer_public_key, self.nonce, self.random, self.pending_key_name) self.set_cipher(self.pending_key_name, hint) else: #print("(%d) receive_exchange_request: ignoring" % int(time.time())) message_key_types.unset_cipher(self.pending_key_name) self.pending_key_name = None if self.key_name is None: self._set_state(KeyExchangeManager.STATE_NONE) else: self._set_state(KeyExchangeManager.STATE_ESTABLISHED) rand_time = KeyExchangeManager.KEY_EXCHANGE_RETRY_INTERVAL * random.uniform(0.5, 1.5) if self.timer_entry is not None and self.timer_entry.active: self.timer_entry.update_expiration_time(rand_time) self.timer_entry.data[KeyType.retry_timer] = True else: self.set_invoke_timer(rand_time, retry_entry=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def key_request(self, user):\n\t\tclient_log.debug(f'Запрос публичного ключа для {user}')\n\t\treq = {\n\t\t\tACTION: PUBLIC_KEY_REQUEST,\n\t\t\tTIME: time.time(),\n\t\t\tACCOUNT_NAME: user\n\t\t}\n\t\twith socket_lock:\n\t\t\tsend_message(self.transport, req)\n\t\t\tans = get_message(self.transport)\n\t\tif RESPONSE in ans and ans[RESPONSE] == 511:\n\t\t\treturn ans[DATA]\n\t\telse:\n\t\t\tclient_log.error(f'Не удалось получить ключ собеседника{user}.')", "def keyExchangeClient(port):\n try:\n with socket.socket() as sock:\n sock.connect((GW_IP, KEY_EXCHANGE_PORT))\n sock.send(create_msg(HELLO_MASSEGE + \" \" + str(port)).encode())\n valid, data = get_msg(sock)\n if valid:\n key = get_key(data)\n key_dict[port] = key\n print(key_dict)\n except Exception as e:\n print(e, \"error in keyExchangeClient\")", "def receive_exchange_response(self, pubkey, random_val, hint):\n #print(\"(%d) receive_exchange_response:\" % int(time.time()))\n #print(\" **> state:\", self.state)\n if self.state != KeyExchangeManager.STATE_REQUESTING:\n return\n rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1))\n self.set_invoke_timer(rand_time)\n self.shared_key = message_key_types.derive_shared_key(self.secret_key, pubkey, random_val)\n self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER)\n self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, \"confirm\", self.peer_public_key,\n self.nonce, self.random, self.pending_key_name)\n self.key_name = self.pending_key_name\n self.set_cipher(self.key_name, hint)\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n #print(\"*STATE_ESTABLISHED\")", "def request_cb(self, target_user, request, ctx):\n\n if request == None:\n return\n\n if not validate({'r': str,\n 'uid': lambda s: type(s) == str and valid_uid(s) and s != self.my_uid,\n 'param': str}, request):\n debug('Key management: Broken payload: %s\\n' %(' '.join(payload)))\n return\n\n cmd = request['r']\n uid = request['uid']\n param = request['param']\n\n if uid == self.my_uid:\n return\n\n debug('Key management: got answer %s from %s\\n' %(cmd, uid))\n user = self.community.get_user(uid)\n\n if self.current['user'] and user != self.current['user']:\n warning('keymanagement: Protocol violation from %s: Current uid is %s\\n' %(nick, self.current['uid'].get('uid')))\n return {'r': self.KM_PROTOCOL_VIOLATION, 'uid': self.my_uid}\n\n if not self.check_answer(cmd):\n warning('keymanagement: Protocol violation from %s: request was %s but answer was %s' %(uid, self.current['state'], cmd))\n self.send_request(user, self.KM_PROTOCOL_VIOLATION, '')\n return\n\n self.key_exchange_gui.plugin_to_gui(user, cmd, False)\n\n payload = ''\n if cmd == self.KM_REQUEST_ACK:\n self.temp_key_watcher = self.gen_temp_key()\n self.temp_passphrase = self.gen_passphrase()\n debug('Key management: passphrase is %s\\n' %(self.temp_passphrase))\n return\n if cmd == self.KM_REQUEST_ANSWER_ACK:\n self.gen_temp_key()\n return\n elif cmd == self.KM_TEMP_KEY1:\n # Received temporery key: save it and send our temporary key\n # encrypted with the symmetric cipher\n temp_key = self.sym_dec(param, self.temp_passphrase)\n if temp_key and self.save_key(user, pub=temp_key, temp=True):\n send_cmd = self.KM_TEMP_KEY2\n payload = self.sym_enc(self.load_pub_key(self.myself, temp=True),\n self.temp_passphrase)\n if not payload:\n send_cmd = self.KM_ERROR\n payload = ''\n else:\n send_cmd = self.KM_ERROR\n payload = ''\n elif cmd == self.KM_PERM_KEY1:\n # Received counterpartys permanent key, so let's save it and send ours\n perm_key = self.asym_dec(param, self.key_path(self.myself, temp=True))\n if perm_key and self.save_key(user, pub=perm_key):\n send_cmd = self.KM_PERM_KEY2\n payload = self.asym_enc(self.load_pub_key(self.myself),\n self.key_path(user, temp=True))\n if not payload:\n send_cmd = KM.ERROR\n payload = ''\n else:\n send_cmd = KM_ERROR\n payload = ''\n elif cmd == self.KM_PERM_KEY_ACK:\n send_cmd = self.KM_FINISHED\n elif cmd == self.KM_FINISHED:\n # Successful key exchange\n self.current = {'user': None, 'state': None}\n self.community.announce_user_change(user) # update user state\n return\n elif cmd == self.KM_CANCEL:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_ERROR:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_PROTOCOL_VIOLATION:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_REQUEST_NACK:\n self.current = {'user': None, 'state': None}\n return\n\n self.current['state'] = send_cmd\n self.send_request(user, send_cmd, payload)", "def receive_message(self, message):", "def receive(self, message):", "def keyExchangeServer():\n with socket.socket() as sock:\n try:\n # if the port number already taken, the following line will not work\n sock.bind((GW_ADRRESS, KEY_EXCHANGE_PORT))\n print(\"success in binding\")\n except:\n print(\"error in binding\")\n sys.exit()\n sock.listen(0)\n while True:\n client_socket, client_address = sock.accept()\n valid, data = get_msg(client_socket)\n if valid:\n data = data.split(\" \")\n if data[0] == HELLO_MASSEGE:\n if (client_address[0], int(data[1])) not in key_dic:\n key = int.from_bytes(Fernet.generate_key(), \"big\")\n else:\n key = key_dic[(client_address[0], int(data[1]))]\n client_socket.send(create_msg(get_key(key)).encode())\n key_dic[(client_address[0], int(data[1]))] = key\n print(key_dic)", "def send(event=None): # event is passed by binders.\n msg = my_msg.get()\n my_msg.set(\"\") # Clears input field.\n if rsa.cont > 0:\n message = idea.get_encrypt_message(msg, idea.get_key()) \n \n client_socket.send(bytes(msg, \"utf8\"))\n\n if rsa.cont == 0:\n rsa.cont += 1\n print(\"chave publica \", rsa.get_public_key())\n print(\"chave privada\", rsa.get_private_key())\n client_socket.send(bytes(rsa.get_public_key(), \"utf8\"))\n\n if msg == \"{quit}\":\n cont = 0\n client_socket.close()\n top.quit()", "def ask_keys(self, update, context):\r\n update.message.reply_text('Введите новый ключ')\r\n return self.LISTEN", "def receive_key(self, key):\n self.queue.put(key)", "def receive_message(self, message):\r\n return", "def receive():\n pass", "def receive(self, msg):\n pass", "def receive_confirmation(self):\n #print(\"(%d) receive_confirmation:\" % int(time.time()))\n #print(\" **> state:\", self.state)\n if self.state != KeyExchangeManager.STATE_CONFIRMING:\n return\n rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1))\n self.set_invoke_timer(rand_time)\n self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER)\n self.key_name = self.pending_key_name\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n #print(\"*STATE_ESTABLISHED\")", "def receive():\n while True:\n try:\n msg = client_socket.recv(BUFSIZ).decode(\"utf8\")\n \n\n if rsa.cont != 1:\n msg_list.insert(tkinter.END, msg)\n \n elif rsa.cont > 1:\n dec = idea.get_decrypt_message(msg, idea.get_key())\n msg_list.insert(tkinter.END, msg)\n\n elif rsa.cont == 1:\n print(\"chave do idea \", msg)\n idea_key = rsa.get_decrypt_message(rsa.get_private_key(), msg);\n print(\"chave do idea \", idea_key)\n rsa.cont += 1\n idea.set_key(idea_key)\n\n #idea.set_key(msg)\n \n except OSError: # Possibly client has left the chat.\n break", "def receive_key(self, key):\n try:\n self.queue.put(key)\n except:\n raise #Just collecting possible exceptions for now", "def receive(self):\n pass", "def messageReceived(self, source, message):\n if not self.myKey:\n self.myKey = open('keyfil').read().strip('\\n').strip()\n\n key = self.myKey \n rmesg = self.decodeMessage(key, message)\n\n if not rmesg:\n print \"CRC error - bailing out\"\n return\n \n messageId, message = self.getID(rmesg)\n #print \"THIVE IN\", messageId, message, self.messageDeferreds\n\n if messageId in self.messageDeferreds:\n self.messageDeferreds[messageId].callback(message)\n reactor.callLater(10, self.cleanDeferred, messageId)\n else:\n # Defer this action\n reactor.callLater(0.05, self.messageHandler, source, message, messageId)", "def handleReceived():\r\n global receivedAck\r\n receivedAck = True", "def handle_key_request(self, event):\n if event['sender'] != self.user_id:\n logger.info(\"Ignoring m.room_key_request event from %s.\", event['sender'])\n return\n\n content = event['content']\n device_id = content['requesting_device_id']\n if device_id == self.device_id:\n return\n try:\n self.olm_device.device_keys[self.user_id][device_id]\n except KeyError:\n logger.info(\"Ignoring m.room_key_request event from device %s, which \"\n \"we don't own.\", device_id)\n return\n\n # Build a queue of key requests as we don't want to tell client of each requests,\n # knowing that the canceling event might be coming right up next.\n request_id = content['request_id']\n if content['action'] == 'request':\n body = content['body']\n if body['algorithm'] != self.olm_device._megolm_algorithm:\n return\n if request_id not in self.queued_key_requests[device_id]:\n self.queued_key_requests[device_id][request_id] = body\n elif content['action'] == 'cancel_request':\n # This doesn't remove request_id from the dict, so we will never\n # add an event with this request ID again.\n self.queued_key_requests[device_id][request_id].clear()", "def handle_requests(self, from_user, request):\n if request == None:\n return\n if not validate({'t': str,\n 'uid': lambda s: type(s) == str and valid_uid(s) and s != self.my_uid,\n 'param': str}, request):\n debug('Key management: Broken request: %s\\n' %(request))\n return {'r': self.KM_PROTOCOL_VIOLATION, uid: self.my_uid}\n cmd = request['t']\n uid = request['uid']\n param = request['param']\n\n debug('Key management: handling request %s from %s\\n' %(cmd, uid))\n\n user = self.community.get_user(uid)\n if user != from_user:\n warning(\"keymanagement: Invalid uid from fetcher: %s\\n\" %(uid))\n return {'r': self.KM_PROTOCOL_VIOLATION, 'uid': self.my_uid}\n nick = user.get('nick')\n\n if self.current['user'] and user != self.current['user']:\n warning('keymanagement: Paraller request from %s: Current uid is %s\\n' %(nick, self.current['user'].get('uid')))\n return {'r': self.KM_REQUEST_NACK, 'uid': self.my_uid}\n\n if not self.check_request(cmd):\n warning('keymanagement: Protocol violation from %s: Current state is %s but received request %s\\n' %(nick, self.current['state'], cmd))\n return {'r': self.KM_PROTOCOL_VIOLATION, 'uid': self.my_uid}\n\n self.current['state'] = cmd\n\n self.key_exchange_gui.plugin_to_gui(user, cmd, True)\n\n payload = ''\n if cmd == self.KM_REQUEST_KEY:\n self.current['user'] = user\n result = self.KM_REQUEST_ACK\n elif cmd == self.KM_REQUEST_DENIED:\n debug('keymanagement: %s denied request for key exchange\\n' %(nick))\n self.current = {'user': None, 'state': None}\n result = self.KM_CANCEL\n elif cmd == self.KM_REQUEST_OK:\n debug('keymanagement: started key exchange with %s\\n' %(nick))\n result = self.KM_REQUEST_ANSWER_ACK\n elif cmd == self.KM_TEMP_KEY_ACK:\n # Other user has typed in the passphrase. We can now send the\n # temporary key encrypted with it.\n result = self.KM_TEMP_KEY1\n payload = self.sym_enc(self.load_pub_key(self.myself, temp=True),\n self.temp_passphrase)\n if not payload:\n result = self.KM_ERROR\n payload = ''\n elif cmd == self.KM_TEMP_KEY2:\n # Received other party's temporary key. Let's send our\n # permanent key encrypted with this temporary key.\n temp_key = self.sym_dec(param, self.temp_passphrase)\n if temp_key and self.save_key(user, pub=temp_key, temp=True):\n result = self.KM_PERM_KEY1\n payload = self.asym_enc(self.load_pub_key(self.myself),\n self.key_path(user, temp=True))\n if not payload:\n result = self.KM_ERROR\n payload = ''\n else:\n result = self.KM_ERROR\n payload = ''\n elif cmd == self.KM_PERM_KEY2:\n # Received permanent key. Save it and send \"finished\".\n perm_key = self.asym_dec(param, self.key_path(self.myself, temp=True))\n if perm_key and self.save_key(user, pub=perm_key):\n result = self.KM_PERM_KEY_ACK\n else:\n result = self.KM_ERROR\n elif cmd == self.KM_CANCEL:\n self.current = {'user': None, 'state': None}\n # Key exchange canceled\n result = self.KM_CANCEL\n elif cmd == self.KM_FINISHED:\n self.community.announce_user_change(user) # update user state\n self.current = {'user': None, 'state': None}\n # Successful key exchange\n result = self.KM_FINISHED\n elif cmd == self.KM_ERROR:\n self.current = {'user': None, 'state': None}\n result = self.KM_ERROR\n elif cmd == self.KM_PROTOCOL_VIOLATION:\n self.current = {'user': None, 'state': None}\n result = self.KM_PROTOCOL_VIOLATION\n\n debug('Key management: sending answer %s to %s\\n' %(result, nick))\n return {'r': result, 'uid': self.my_uid, 'param': payload}", "def on_idkey_received(self, data):\r\n self.send(json.dumps({\"op\":\"mtgox.subscribe\", \"key\":data}))", "def handleReceived(): \n global receivedAck\n receivedAck = True", "def receive_message(self, context, message):\r\n pass", "def _rceCB(self, msg):\r\n rosMsg = rospy.AnyMsg()\r\n\r\n if _GZIP_LVL:\r\n rosMsg._buff = zlib.decompress(msg.getvalue())\r\n else:\r\n rosMsg._buff = msg.getvalue()\r\n\r\n self._pub.publish(rosMsg)", "def callback(self, ks_path, msg):\n pass", "def get_key_input():\n return get_input(message='Please enter your master key:',\n secure=True, check_timer=False)", "def receive_request(self):\n try:\n payload = self.SUB_COMMAND.recv_string(flags=zmq.NOBLOCK)\n topic, command = payload.split()\n if (topic == zmq_socket_config.TOPIC_REQUEST):\n if (command == zmq_socket_config.COMMAND_START):\n logger.debug(\"Noxalarm receive COMMAND_START\")\n self.start_alarm()\n elif (command == zmq_socket_config.COMMAND_STOP):\n logger.debug(\"Noxalarm receive COMMAND_STOP\")\n self.stop_alarm()\n elif (command == zmq_socket_config.STATUS_UPDATE):\n logger.debug(\"Noxalarm receive REQUEST_STATUS_UPDATE\")\n self.push_socket_state()\n \n # Else if no command received, do nothing\n except zmq.error.Again:\n pass", "def post_key(self):\n # print(self.key)\n #Sending the key to the attacker.\n s.send(bytes(\"K\\n{}\".format(str(self.key,'utf-8')),'utf-8'))", "def handleMessage(msg):", "def onMessageBegin(self, isBinary):", "def test_incoming_k(self):\n m_interface = Mock()\n m_interface.callback.return_value = True\n m_interface.read.return_value = ''\n upb = UPB(m_interface)\n upb.onCommand(address=(22,255), callback=m_interface.callback)\n m_interface.read.return_value = \"PU07141610FF3090\\x0DPU07151610FF308F\\x0D\"\n# time.sleep(4000)\n time.sleep(2)\n m_interface.callback.assert_called_with(address=(22,255), command='status', source=upb)\n m_interface.read.return_value = ''", "def controls():\n\n context = zmq.Context()\n\n print(\"Transmitting commands to process.\")\n socket = context.socket(zmq.REQ)\n rc = socket.connect(\"ipc:///tmp/mail_queue_ipc\")\n #print(rc)\n\n\n for request in range(2):\n print(\"Sending request %s\" % request)\n socket.send(b\"insert\")\n\n message = socket.recv()\n print(\"Recieved reply %s [ %s ]\" % (request, message))\n time.sleep(1)", "def cmd_handler():\n context = zmq.Context()\n\n # socket to receive commands (a subscription to ELECTION_CODE channel)\n cmd_socket = context.socket(zmq.SUB)\n cmd_socket.connect (\"tcp://%s:5556\" % SERVER_HOST)\n topicfilter = \"politiche2013\"\n cmd_socket.setsockopt(zmq.SUBSCRIBE, topicfilter)\n\n # socket to send replies\n reply_sender = context.socket(zmq.PUSH)\n reply_sender.connect(\"tcp://%s:5557\" % SERVER_HOST)\n\n # main loop\n while True:\n print \"Aye sir, unit {0} ready for your commands ...\".format(computer_id)\n # wait for a command\n string = cmd_socket.recv()\n\n # action\n print \"Message received: '%s'\" % (string,)\n\n # send reply to server\n print \"Sending reply to server\"\n reply = { 'unit' : computer_id, 'status' : 'configured'}\n reply_sender.send_json(reply)", "def consumeMsg():\n\tosuser = 'osdev'\n\tospass = 'osdev'\n\toshost = '10.32.29.94'\n\tosport = '5672'\n\tosvhost = '/openstack'\n\tneutronExchange = Exchange('quantum', type='topic', durable=False)\n\tinfoQueue = Queue('exthook', exchange=neutronExchange , durable=False,\n\t\t\trouting_key='notifications.info')\n\twith Connection(\"\".join(['amqp://', osuser, ':', ospass, '@', \n\t\toshost, ':',osport, '/', osvhost])) as conn:\n\t\twith conn.Consumer(infoQueue, callbacks=[msgParse]):\n\t\t\twhile True:\n\t\t\t\ttry: \n\t\t\t\t\tconn.drain_events()\n\t\t\t\texcept Exception, e:\n\t\t\t\t\tlogging.exception('Draining events from AMQP stop')\n\t\t\t\t\tbreak", "def ServerSyncReceived(self,message):", "def request_dedkey(mybox, myport=22):\n # start Requests session\n sc = requests.Session()\n\n # set up auth & headers\n sc.headers.update({'User-Agent': \"Mozilla/5.0\"})\n sc.auth = (udata.userauth['user'], udata.userauth['passwd'])\n\n # send request\n cpj = sc.post('https://cpjump.inmotionhosting.com/dedtmpkeys/process-dedkey.php',\n data={'server': mybox, 'port': myport, 'submit': \"Submit\"}, verify=False)\n\n # check login\n check_cpjump_login(cpj)\n\n print(\"** Queued key placement on %s:%s\" % (mybox, myport))\n\n if not xopts['nowait']:\n print(\">> Awaiting response from eDesk...\")\n edok = log_wait(find_latest_log(udata.srcs['edesk']))\n if not re.search(r'success', edok, re.I|re.M):\n print(\"!! Key establish was unsuccessful. Aborting.\")\n sys.exit(101)\n else:\n print(\"** Key established. Connecting to %s:%s...\" % (mybox, myport))\n ssh_to(mybox, myport)", "def get(self, msg):\n\n # print(\"get\")\n self.q.put(msg)", "def handle(self, message):", "def recipient_public_key(self):", "def handle_message(self, message):", "def on_message_received(ch, method, properties, body):\n # the body contains the command flag followed by a colon ':' and the message for the drone\n # decode the body to utf8\n received_bytes = body.decode('utf-8')\n # split the received_bytes to get the command _flag and message\n recieved_message = received_bytes.split(':')\n # since rabbit mq body is a byte\n if (str(recieved_message[0]) == \"c01\"):\n # c01 - command center orders the drone to deliver a item\n print(\"Order Received from the command center to deliver an item to the following address \\n\", str(\n recieved_message[1]))\n time.sleep(2)\n # print in the drone's console that the item has been lift off\n print('\\nLifting off the Item to the delivery address.')\n print('\\nUpdating Status to the command centre ......')\n # Assume the drone has reached the delivery address . Now send a\n # message to the warehouse command center that it has reached the\n # delivery area\n time.sleep(5)\n rpc_sendback(\"c02\")\n # Assume the drone has delivered the item and issue the status message\n # to the command center\n time.sleep(5)\n rpc_sendback(\"c03\")\n # #Assume the drone has reached the parking spot and issue the message to the command center that is available for next instruction\n time.sleep(5)\n rpc_sendback(\"c04\")\n\n else:\n print(\"Received Instruction from Warehouse \" +\n str(recieved_message[1]))\n channel.basic_ack(delivery_tag=method.delivery_tag)\n # channel.start_consuming()", "def notify(plaintext_message, signature):", "def onMessage(self, payload, isBinary):", "def kbaction_callback(self, kb_event):\n evtype = kb_event.event_type\n keyname = kb_event.name\n self.lastmesg = db2_movement_convert(evtype=evtype, kname=keyname)\n if self.lastmesg is None:\n return\n self.sock.send(self.lastmesg.encode())", "def manage_read_request(self, client):\n\n # obtain the message\n message = client.recv()\n message = json.loads(message)\n msg = message[\"payload\"].strip()\n if msg.startswith(\"/\"):\n type = \"c2s\"\n elif msg.startswith(\"@\"):\n type = \"c2c\"\n else:\n type = \"c2g\"\n\n func = getattr(self, \"request_\"+type)\n func(client, message)\n # self.msg_map[message['type']](client, message)", "def HandleMessage(msg, conn, requester):\n\n print(\"\\nReceived a new message:\\n{}\".format(msg))\n if msg['__class__'] == 'ReqDecryption':\n msg = msg['__value__']\n C = msg['C']\n D = msg['D']\n C = parse_point(C)\n D = parse_point(D)\n\n print(\"\\nReceived a new tallied contribution:\")\n print(\"C = {}\\nD = {}\".format(C, D))\n out = requester.decrypt(C, D)\n\n req = RespDecryption(out[0], out[1], out[2])\n write_message(conn, req)\n print(\"\\nThe final outcome is:\\n{}\".format(out[0]))\n exit()", "def receiveMessage(self, user, message):\n pass", "def receiveMessage(self, currentTime, msg):\n super().receiveMessage(currentTime, msg)\n if self.state == 'AWAITING_SPREAD' and msg.body['msg'] == 'QUERY_SPREAD':\n bid, _, ask, _ = self.getKnownBidAsk(self.symbol)\n if bid and ask:\n self.mid_list.append((bid + ask) / 2)\n if len(self.mid_list) > self.window1: self.avg_win1_list.append(pd.Series(self.mid_list).ewm(span=self.window1).mean().values[-1].round(2))\n if len(self.mid_list) > self.window2: self.avg_win2_list.append(pd.Series(self.mid_list).ewm(span=self.window2).mean().values[-1].round(2))\n if len(self.avg_win1_list) > 0 and len(self.avg_win2_list) > 0:\n if self.avg_win1_list[-1] >= self.avg_win2_list[-1]:\n # Check that we have enough cash to place the order\n if self.holdings['CASH'] >= (self.size * ask):\n self.placeLimitOrder(self.symbol, quantity=self.size, is_buy_order=True, limit_price=ask)\n else:\n if self.symbol in self.holdings and self.holdings[self.symbol] > 0:\n self.placeLimitOrder(self.symbol, quantity=self.size, is_buy_order=False, limit_price=bid)\n self.setWakeup(currentTime + self.getWakeFrequency())\n self.state = 'AWAITING_WAKEUP'", "def request( key, server, node, netrc=os.getenv('NETRC', os.path.join(os.path.expanduser('~'), '.netrc')), verbose=False ):\n ### format and send the packet\n packet = Packet(server, node, ptype='request', key=key)\n if verbose:\n print( \"%s->%s : %s\"%(server, node, packet.dumps()) )\n send( packet, server, node, netrc, verbose=verbose )", "def listen_for_any_message(self, msg, match):\n question=\"{}\".format(msg)\n return self.cbmodel.get_response(question)", "def _perform_key_exchange(self, query_entry):\n if KeyType.retry_timer in query_entry.data and query_entry.data[KeyType.retry_timer]:\n message_key_types.unset_cipher(self.pending_key_name)\n self.pending_key_name = None\n self._set_state(KeyExchangeManager.STATE_REQUESTING)\n #print(\"# (%d) _perform_key_exchange: to\" % int(time.time()), self.counter_node_id.hex())\n self.secret_key, self.peer_public_key, self.pending_key_name = message_key_types.get_ECDH_parameters()\n self.nonce = os.urandom(16)\n self.random = os.urandom(8)\n ret = self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, \"request\",\n self.peer_public_key, self.nonce, self.random,\n self.pending_key_name)\n if not ret:\n self._set_state(KeyExchangeManager.STATE_NONE)\n message_key_types.unset_cipher(self.pending_key_name)\n message_key_types.unset_cipher(self.key_name)\n self.secret_key = None\n self.peer_public_key = None\n self.pending_key_name = None\n self.nonce = None\n self.random = None\n return\n rand_time = KeyExchangeManager.KEY_EXCHANGE_RETRY_INTERVAL*random.uniform(0.5, 1.5)\n self.set_invoke_timer(rand_time, retry_entry=True)", "def main():\n # Create the socket\n server_sckt = socket(AF_INET, SOCK_STREAM)\n server_sckt.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)\n server_sckt.bind((HOST, PORT))\n server_sckt.listen()\n print(f\"Listening on {HOST}:{PORT}\")\n conn, client = server_sckt.accept()\n print(f\"New client: {client[0]}:{client[1]}\")\n\n # Negotiating the cipher\n print(\"Negotiating the cipher\")\n msg_in = conn.recv(4096).decode('utf-8')\n proposed = parse_proposal(msg_in)\n cipher_name, key_size = select_cipher(SUPPORTED_CIPHERS, proposed)\n print(f\"We are going to use {cipher_name}{key_size}\")\n msg_out = generate_cipher_response(cipher_name, key_size)\n conn.send(msg_out.encode())\n\n # Negotiating the key\n print(\"Negotiating the key\")\n dh = DiffieHellman()\n dh.generate_public_key()\n msg_in = conn.recv(4096).decode('utf-8')\n client_public_key = parse_dhm_request(msg_in)\n dh.generate_shared_secret(client_public_key)\n msg_out = generate_dhm_response(dh.public_key)\n conn.send(msg_out.encode())\n cipher, key, iv = get_key_and_iv(dh.shared_key, cipher_name, key_size)\n print(\"The key has been established\")\n\n print(\"Initializing cryptosystem\")\n crypto = cipher.new(key, cipher.MODE_CBC, iv)\n hashing = HMAC.new(key, digestmod=SHA256)\n print(\"All systems ready\")\n\n while True:\n msg_in = conn.recv(4096)\n if len(msg_in) < 1:\n conn.close()\n break\n msg, hmac = read_message(msg_in, crypto)\n validate_hmac(msg_in, hmac, hashing)\n print(f\"Received: {msg}\")\n msg_out = f\"Server says: {msg[::-1]}\"\n conn.send(msg_out.encode())", "def recAMQPmsg(self,msg):\n self.cv['Vm'] = msg['Vm']\n self.cv['Va'] = msg['Va']\n if self.mirror.AMQPdebug: \n print('AMQP values set!')", "def notify(self, sender, key, key2=b'\\x00'):\r\n\r\n EventListener.notify(self, sender, KeyPressEventArgs(key, key2))", "def on_receive(self, msg):\n raise NotImplementedError", "def pm_handler(self, msg):\n\t\tif str(msg['from']).split('/')[0] == self.boundjid.bare:\n\t\t\tself.recipient = str(msg['to']).split('/')[0]\n\t\telse:\n\t\t\tself.recipient = str(msg['from']).split('/')[0]\n\t\t# For some reason carbons sent by you come twice (from gajim at least)\n\t\tif self.user().last_msg == msg:\n\t\t\treturn\n\t\tif msg['body'][0] == '!':\n\t\t\tself.parse(msg)\n\t\telif msg['body'].split()[0].lower() in self.qwords \\\n\t\t\t\tor msg['body'][-1] == '?' \\\n\t\t\t\tor self.user().force[str(msg['from']).split('/')[0]]:\n\t\t\tself.assist(msg)\n\t\tself.user().last_msg = msg", "def subscribe_order_book_request_packet(self, pair_id):\n pass", "def callback(self):\n server_addresses = self._address_book.list_by_key(key)\n for address in server_addresses:\n if self._client_logic.connection_error.is_set():\n try:\n connection = socket.create_connection((address[0], 9665))\n self.sident_verify(connection, v_event)\n except socket.error:\n continue\n else:\n return True\n neighbor_addresses = self._client_list.list()\n for address in neighbor_addresses:\n if self._client_logic.connection_error.is_set():\n try:\n connection = socket.create_connection((address[0], address[1]))\n \n\n def sident_verify(self, connection):\n \"\"\"Request the server send a signed verification of its identity with \n IP address, port and timestamp.\n\n sident stands for 'Server Identity'\n\n An sident_verify message is of the following form:\n\n {'type':'sident_verify'\n 'timestamp':<UNIX TIMESTAMP>}\n\n The server should reply with an sident_response message which is of\n the following form:\n\n {'type':'sident_response',\n 'ip_addr':<IP ADDRESS AS A STRING>,\n 'port':<PORT NUMBER AS AN INTEGER>,\n 'timestamp':<UNIX TIMESTAMP>,\n 'signature':<SIGNED DIGEST OF THE THREE PREVIOUS VALUES AS A UTF-8 STRING \n CONCATENATED TOGETHER WITH COMMA SEPERATORS>}\"\"\"\n sident_verify_msg = {'type':'sident_verify',\n 'timestamp':calendar.timegm(time.gmtime())}\n self._send_queue.put((sident_verify_msg, connection))\n return True\n\n def request_server_address(self, connection):\n \"\"\"Request the best guess at the current server address from a client\n peer. \n\n P2P nodes use the same JSON messaging style as the normal client and\n server. address_request messages are of the form:\n\n {'type':'address_request'\n 'timestamp':<UNIX TIMESTAMP>}\n\n And a server_address message is of the form:\n\n {'type':'server_address',\n 'key':<CRYPTOGRAPHIC KEY THAT UNIQUELY IDENTIFIES SERVER>,\n 'address':<SERVER ADDRESS>,\n 'port':<WHAT PORT THE SERVER LISTENS ON>,\n 'address_timestamp':<UNIX TIMESTAMP OF WHEN PEER RECEIVED ADDRESS>,\n 'signature':<VERIFICATION THAT INFORMATION CAME FROM SERVER ORIGINALLY>,\n 'timestamp':<UNIX TIMESTAMP OF WHEN MESSAGE WAS SENT>}\"\"\"\n address_request = {'type':'sident_verify',\n 'timestamp':calendar.timegm(time.gmtime())}\n self._send_queue.put((address_request, connection))\n return True\n \n\n def send_loop(self):\n \"\"\"Send loop that is meant to be started from a seperate thread of \n execution. The send loop pulls 'raw' python object messages from this \n objects send_queue attribute and converts them to json strings before \n encoding them as utf-8 to send across the wire. Sent along with the \n message is the connection to send it on.\n\n Responses are handled and received by the receive_loop method of this class\n which is ran in a seperate thread of execution.\"\"\"\n while not self._shutdown.is_set():\n message_tuple = self._send_queue.get()\n message = message_tuple[0]\n message_length = self._calculate_recursive_length(message)\n wrapped_message = [message_length, message]\n wire_message = (json.dumps(wrapped_message) + \"\\r\\n\\r\\n\").encode('utf-8')\n message_tuple[1].sendall(wire_message)\n return True\n\n def receive_loop(self):\n \"\"\"Receive loop that is meant to be started from a seperate thread of\n execution. The receive loop takes in 'raw' utf-8 json messages from the\n wire and decodes them, then interprets them to produce native python \n objects. The resulting objects are then handled by a method of this class\n of the form handle_<message_type>. For example if a message with the \n 'type' key 'test' came in like so:\n\n {'type':'test'}\n\n The method self.handle_test(message) would be called with the message\n dictionary object passed along.\n \"\"\"\n msg_buffer = bytes() # The message input buffer\n while not self._shutdown.is_set():\n if msg_buffer:\n try:\n msg_length = self.determine_length_of_json_msg(msg_buffer)\n except InvalidLengthHeader:\n msg_length = float(\"inf\")\n if len(msg_buffer) >= msg_length:\n message = self.extract_msg(msg_buffer, msg_length)\n try:\n handler = getattr(self, \"handle_\" + message['type'])\n except AttributeError:\n print(\"Can't handle message of type: \" +\n str(message['type']))\n continue\n handler(message)\n msg_buffer = msg_buffer[msg_length:]\n else:\n try:\n msg_buffer += connection.recv(1024)\n except socket.timeout:\n pass\n else:\n try:\n msg_buffer += connection.recv(1024)\n except socket.timeout:\n pass\n \n def handle_sident_response(message):\n \"\"\"Handle an sident_response type message of the form:\n \n {'type':'sident_response',\n 'ip_addr':<IP ADDRESS AS A STRING>,\n 'port':<PORT NUMBER AS AN INTEGER>,\n 'timestamp':<UNIX TIMESTAMP>,\n 'signature':<SIGNED DIGEST OF THE THREE PREVIOUS VALUES AS A UTF-8 STRING \n CONCATENATED TOGETHER WITH COMMA SEPERATORS>}\n \n The handler verifies that the information given by the server is properly\n signed, then adds the information to address books/etc, and finally \n resolves the issue using provided client logic methods and clears the \n error indicator.\"\"\"\n if self._client_logic.connection_error.is_set():\n try:\n ip_addr = message['ip_addr']\n port = message['port']\n timestamp = message['timestamp']\n signature = message['signature']\n except KeyError:\n return False\n sha_hash = SHA256.new(\n (ip_addr + \",\" + port + \",\" + timestamp).encode('utf-8'))\n if self._key.verify(sha_hash.digest(), signature):\n self._address_book.add_address(self._key, ip_addr, timestamp,\n signature, port=port)\n self._address_book.save()\n if self._client_logic.reconnect(ip_addr, port):\n self._client_logic.connection_error.clear()\n return True\n else:\n return False\n else:\n return False\n\n \n def determine_length_of_json_msg(self, message_bytes):\n \"\"\"Incrementally parse a JSON message to extract the length header.\n\n message_bytes: The bytes that represent the portion of the message \n recieved.\n \"\"\"\n # All messages must be written in utf-8\n message = message_bytes.decode('utf-8')\n # Check that the message we have been given looks like a valid length header\n if \",\" not in message:\n raise InvalidLengthHeader(message)\n length_portion = message.split(\",\")[0]\n left_bracket = length_portion[0] == \"[\"\n number_before_comma = length_portion[-1] in \"1234567890\"\n if left_bracket and number_before_comma:\n for character in enumerate(length_portion):\n if character[1] not in \"[ \\n\\t\\r1234567890,\":\n raise InvalidLengthHeader(length_portion)\n elif character[1] in \"1234567890\":\n length_start = character[0]\n return int(length_portion[length_start:])\n elif left_bracket:\n raise InvalidLengthHeader(length_portion)\n else:\n raise MissingLengthHeader(length_portion)\n return False\n\n def extract_msg(self, msg_buffer, length):\n message = msg_buffer[:length].decode()\n try:\n right_curly_bracket = message[-6] == \"}\" or message[-2] == \"}\"\n except IndexError:\n print(message, msg_buffer, length)\n valid_delimiter = message[-6:] == \"}]\\r\\n\\r\\n\"\n if right_curly_bracket and valid_delimiter:\n return message\n elif right_curly_bracket:\n raise InvalidMessageDelimiter(message)\n else:\n raise MissingMessageDelimiter(message)\n\n def _calculate_recursive_length(self, msg_dict):\n \"\"\"Calculate the length of a dictionary represented as JSON once a length\n field has been added as a key.\"\"\"\n delimiter = \"\\r\\n\\r\\n\"\n initial_length = len(\n json.dumps(msg_dict) + delimiter)\n initial_list = [initial_length, msg_dict]\n recursive_length = len(\n json.dumps(initial_list) + delimiter)\n recursive_list = [recursive_length, msg_dict]\n while len(json.dumps(recursive_list) + delimiter) != recursive_list[0]:\n recursive_length = len(\n json.dumps(recursive_list) + delimiter)\n recursive_list = [recursive_length, msg_dict]\n return recursive_list[0]", "async def run(self):\n\n self.connection = await aio_pika.connect(self.mq_connection_str, loop=asyncio.get_event_loop())\n self.channel = await self.connection.channel()\n\n # connect to exchanger market data\n # market data send with routing key format: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n binding_mask = '*.*.*.#'\n topic_logs_exchange = await self.channel.declare_exchange(self.exchanger, aio_pika.ExchangeType.TOPIC)\n queue_topic = await self.channel.declare_queue('', auto_delete=True)\n await queue_topic.bind(topic_logs_exchange, routing_key=binding_mask)\n\n # listener queue for listing information\n queue_for_listing = await self.channel.declare_queue('', auto_delete=True)\n await queue_for_listing.bind(topic_logs_exchange, routing_key=self.name_queue_for_listing)\n\n # listener queue for error\n queue_for_error = await self.channel.declare_queue('', auto_delete=True)\n await queue_for_error.bind(topic_logs_exchange, routing_key=self.name_queue_for_error)\n\n def callback_crypto_currency_market_data(message):\n \"\"\"Callback for consume market data\"\"\"\n body = json.loads(message.body.decode('utf-8'))\n \n # routing_key have view: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n # mask: *.*.*.#\n message_type = message.routing_key.split('.')[0]\n data_id = '.'.join(message.routing_key.split('.')[1:])\n\n if message_type == 'update':\n for observer in self.subscribers.get(data_id):\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n elif message_type == 'starting':\n # if exist waiters, send data and move waiters in subscribers\n if not self.waiters_first_msg.get(data_id):\n return\n\n new_subscribers = []\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n new_subscribers.append(observer)\n\n # if not subscribers on this data_id, init new dict-value, else append to exist array\n subscribers = self.subscribers.get(data_id, None)\n if not subscribers and new_subscribers:\n self.subscribers[data_id] = new_subscribers\n asyncio.get_event_loop().create_task(self._send_message_for_subscribe(data_id))\n else:\n for new_subscriber in new_subscribers:\n if new_subscriber not in self.subscribers[data_id]:\n self.subscribers[data_id].append(new_subscriber)\n\n def callback_crypto_currency_listing(message):\n \"\"\"Callback for consume information about access pairs, exchanges and timeframes\"\"\"\n body = json.loads(message.body.decode('utf-8'))\n data_id = TYPE_LISTING\n\n if not self.waiters_first_msg.get(data_id):\n return\n\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=data_id,\n data=body\n )\n ))\n\n def callback_crypto_currency_error(message):\n \"\"\"Callback for consume error queue\"\"\"\n logger.error(message.body.decode('utf-8'))\n\n body = json.loads(message.body.decode('utf-8'))\n\n # validation\n error_place = body.get('error_place')\n message = 'Sorry! Error on server'\n if not message or not error_place:\n return\n\n # send information to ws, that wait or subscribe on error_place\n waiters = self.waiters_first_msg.get(error_place, ())\n for observer in waiters:\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=error_place,\n error=message\n )\n ))\n\n subscribers = self.subscribers.get(error_place, ())\n for observer in subscribers:\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=error_place,\n data=message\n )\n ))\n\n await queue_topic.consume(callback_crypto_currency_market_data)\n await queue_for_listing.consume(callback_crypto_currency_listing)\n await queue_for_error.consume(callback_crypto_currency_error)", "def receiver():\r\n global data\r\n DW1000.newReceive()\r\n DW1000.receivePermanently()\r\n DW1000.startReceive()", "def processMsgs(s, msg, state):\r\n# s = socket\r\n# msg = initial message being processed\r\n \"\"\"You will need to complete this method \"\"\"\r\n\r\n global clientdice\r\n global clientbid\r\n global serverdice\r\n global serverbid\r\n \r\n#---------------------------------------------------------------------------\r\n status = -2\r\n gen = int(state['Gen']) # integer generator\r\n prime = int(state['prime']) # integer prime\r\n sKey = int(state['SecretKey']) # secret key\r\n rcvrPK = int(state['RcvrPubKey']) # receiver's public key\r\n nonce = int(state['Nonce'])\r\n symmetricKey = int(state['SymmetricKey']) # shared symmetric key\r\n \r\n strTest = clientHello()\r\n if strTest in msg and status == -2:\r\n print(\"Message received: \"+ msg)\r\n msg = clientHello()\r\n s.sendall(bytes(msg,'utf-8'))\r\n print ('Sent',msg)\r\n status = 1\r\n \r\n strTest = \"110 Generator:\"\r\n if strTest in msg and status == -2:\r\n print(\"Message received: \"+ msg)\r\n RcvdStr = msg.split(' ')\r\n gen = int(RcvdStr[2][0:-1])\r\n prime = int(RcvdStr[4])\r\n sKey = computeSecretKey(gen, prime) #Computes Shared key secretly using receiver public key, send secret key and prime\r\n msg = \"111 Generator and Prime Rcvd\"\r\n s.sendall(bytes(msg, 'utf-8'))\r\n print(\"Message sent: \"+ msg)\r\n state['Gen'] = gen\r\n state['prime'] = prime\r\n state['SecretKey'] = sKey\r\n status = 1\r\n\r\n strTest = \"120 PubKey\"\r\n if strTest in msg and status == -2:\r\n print(\"Message received: \" + msg)\r\n RcvdStr = msg.split(' ')\r\n rcvrPK = int(RcvdStr[2])\r\n #print('g: ', gen)\r\n #print('p: ', prime)\r\n print('Secret Key: ', sKey)\r\n msg = sendPublicKey(gen, prime, sKey) # Complete this\r\n print(\"Message sent: \" + str(msg))\r\n s.sendall(bytes(msg, 'utf-8'))\r\n state['RcvrPubKey'] = rcvrPK\r\n status = 1\r\n \r\n strTest = \"130 Ciphertext\"\r\n if strTest in msg and status == -2:\r\n print(\"Message received: \" + str(msg))\r\n Pub = rcvrPK \r\n RcvdStr = msg.split(' ')\r\n y1 = int(RcvdStr[2])\r\n clntCtxt = int(RcvdStr[2])\r\n SymmKey = computeSessionKey(rcvrPK, sKey, prime)\r\n state['SymmetricKey'] = SymmKey\r\n print('Server Secret', sKey)\r\n print('Client public', rcvrPK)\r\n print('SymmetricKey', SymmKey)\r\n dcryptedNonce = DHdecrypt(clntCtxt, SymmKey, gen, prime) #decrypt msg using shared secret key genarate using Diffie Hellman for AES encrytion\r\n print(\"Decrypted Ciphertext: \", dcryptedNonce)\r\n dcryptedNonce = dcryptedNonce-5\r\n msg = sendEncryptedMsg(dcryptedNonce,SymmKey, gen, prime) \r\n s.sendall(bytes(msg, 'utf-8'))\r\n print(\"Message sent: \" + msg)\r\n status = 1 # To terminate loop at server.\r\n print(\"Let's Start........... \" )\r\n\r\n strTest = \"150 OK\"\r\n if strTest in msg and status == -2:\r\n BsymmetricKey = '{0:015b}'.format(symmetricKey)\r\n \"\"\"Converts string s to a string containing only 0s or 1s, representing the original string.\"\"\"\r\n \"\".join(format(ord(x), 'b') for x in BsymmetricKey)\r\n \r\n \"\"\"Generates a random key of bits (with 0s or 1s) of length n\"\"\"\r\n k = []\r\n for i in range(len(BsymmetricKey)):\r\n k.append(choice([\"0\", \"1\"]))\r\n gen_random_key = \"\".join(k)\r\n cipher = xor(BsymmetricKey, gen_random_key)\r\n print(\"Plain Text(SymmKey) : \", BsymmetricKey)\r\n print(\"Generated Key(Binary) : \", gen_random_key)\r\n print(\"Generated Key(decimal): \", int(gen_random_key,2))\r\n print(\"Cipher Text : \", cipher)\r\n msg = \"140 One Time Pad: \" + cipher\r\n s.sendall(bytes(msg, 'utf-8'))\r\n print (\"Message sent: \", msg)\r\n status = 1 \r\n#---------------------------------------------------------------------------\r\n\r\n #process hello message\r\n strTest = \"155 OK\"\r\n if strTest in msg and status == -2:\r\n #if msg == \"105 OK\":\r\n print('Received: ',msg) \r\n\r\n hello = \"105 Hello message\" \r\n data=str.encode(hello)\r\n s.sendall(data) \r\n status = 1 \r\n\r\n #process roll dice message \r\n if msg == \"200 Roll Dice\":\r\n print('Received: ',msg)\r\n \r\n #Roll Client Die, assign to global variable and send to client\r\n clientDiceStr=rollDice(clientdice, toRoll=[0,1,2,3,4]) \r\n clientdice = strToDice(clientDiceStr, clientdice)#Collect dice roll for msg\r\n rDice = RollDiceACK(clientDiceStr)\r\n data=str.encode(rDice)\r\n s.sendall(data)\r\n\r\n #Roll Server Die and assign to global variable\r\n ServerDiceStr=rollDice(serverdice, toRoll=[0,1,2,3,4])\r\n serverdice=strToDice(ServerDiceStr, serverdice)#Collect dice roll for msg\r\n print('Server Roll: ', serverdice)\r\n \r\n status = 1\r\n\r\n #process bid message\r\n if \"300 Bid\" in msg: \r\n print('Received: ',msg)\r\n\r\n #store client bid for challenge phase (comparison)\r\n clientbid=strToBid(msg,clientbid)\r\n \r\n\r\n #Server Challenges or Bid\r\n query = input('Enter c to Challenge or b to Bid ')\r\n bidAck= bidACK(serverdice, query)\r\n data=str.encode(bidAck)\r\n s.sendall(data)\r\n \r\n if query == 'b' or query == 'B':\r\n bid=[0,0]\r\n bid=make_bid(bid, msg)\r\n data=str.encode(bid)\r\n s.sendall(data)\r\n serverbid=strToBid(bid,serverbid)\r\n #print('Please wait on client response ....')\r\n status = 1\r\n else:\r\n #Challenge Client\r\n chal=challenge(', '.join(str(e) for e in serverdice), ', '.join(str(e) for e in clientdice), msg)\r\n data=str.encode(chal)\r\n s.sendall(data)\r\n #print('Message sent: ',chal)\r\n status = 0\r\n #Test if info is stored\r\n #print (serverdice)\r\n #print (serverbid)\r\n #print (clientdice)\r\n #print (clientbid)\r\n status = 1\r\n \r\n if 'Winner' in msg:\r\n print ('Client challenge your bid. \\n'+ msg)\r\n print ('Server Roll: ' +', '.join(str(e) for e in serverdice))\r\n print ('Client Roll: ' +', '.join(str(e) for e in clientdice)) \r\n \r\n status = -1\r\n\r\n return status", "def on_message(data):\n pass", "def receiveData(self):\n self.context = zmq.Context()\n self.socket = self.context.socket(zmq.PAIR)\n #self.socket.connect(\"tcp://localhost:5556\")\n self.socket.connect(\"ipc:///tmp/mysocket\")\n print(\"Communication via IPC - Mac and Linux Only\")\n #Envia uma mensagem pedindo para comecar\n startstr = \"START\"\n self.socket.send(startstr.encode('utf-8'))\n time.sleep(1)\n #Recebe os dados\n while True:\n contents = self.socket.recv()\n self.commsqueue.put(contents)", "def testStreamKeying1(self):\n\n yield self.connect(self.get_body_node(connect=True, useKey=True))\n yield self.proxy.send(self.get_body_node(useKey=True))\n yield self.proxy.send(self.get_body_node(useKey=True))", "def onMessageReceived(self, inputString):\n return", "def pub(payload):\n print(payload)\n sys.stdout.flush()\n\n corr_id = pub.send_request(payload)\n r.lpushx(\"payload\", payload)\n\n\n while pub.queue[corr_id] is None:\n time.sleep(0.1)\n\n return pub.queue[corr_id]", "def receive_message(datagram, connection):", "def on_privmsg(self, raw_msg, msg, source, **kwargs):", "def run(self):\n alogger.info(\"Recieved message from %s, Message: (%d) %s\" % (self.client.getaddress(), self.action_type, self.message))\n \n #Try to call th function associated with this message type.\n #format = \"handle_<type>\" (eg: handle_100)\n fn = globals().get(\"handle_\" + str(self.action_type))\n if fn and callable(fn):\n fn(self.message, self.address, self.client)\n else:\n alogger.info(\"Received unknown message from %d, type: %d\" % (self.client.getaddress(), self.action_type))", "def start(self):\n self.kb_client.subscribe(self.kb_ID, {\"_data\": {\"tag\": TAG_ANSWER, \"text\": \"$input\", \"timestamp\": \"$time\", \"language\": \"$lang\"}}, self.add_emotion) # from the 'gnlp' module", "def send_message_to_server(self, key, value):\n if self.from_kivy_queue is None:\n return\n self.from_kivy_queue.put((key, value))", "def send_key(self, key):\n # Assert that code exists\n if(key in self.commands):\n try:\n # Open Socket\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect((self.host, self.port))\n command = str(self.commands[key]) + '\\n'\n sock.send(command)\n sock.close()\n except socket.error, e:\n raise TVError(e[1], 'send_key')\n finally:\n sock.close()\n sock = None\n else:\n raise TVError('Key received is note a valid code', 'send_key')", "def broker_connect_reply(self, data):\n\n print(\"Broker Connected\")\n #IDEA: Does this need a handler?", "def handle(self):\n arg = self.get_message()\n if \"AKG\" in arg:\n self.akg_builder.handle(self, arg)\n else:\n self.send_ack(False)\n self.exit()", "def __init__(self, routing_key):\n self.routing_key = routing_key\n self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=rabbitmq_hostname))\n self.channel = self.connection.channel()\n result = self.channel.queue_declare(queue='', exclusive=True, durable=True)\n self.callback_queue = result.method.queue\n\n self.channel.basic_consume(\n queue=self.callback_queue,\n on_message_callback=self.on_response,\n auto_ack=True\n )", "def ns_authentication(conn):\n # get RSA key of Bob for decrypting\n rsa_key = rsa.import_key(\"RsaKey.asc\")\n\n # A -- {N_A, A}(K_PB) --> B\n request = rsa.decrypt(rsa_key, conn.recv(1024))\n client_nonce, client_name = request.split(',')\n print(\"Bob: recieved nonce {} from client {}\".format(client_nonce, client_name))\n\n # get client's public key\n subprocess.Popen([sys.executable, \"..\\\\pks\\\\pks.py\", \"--extract\"])\n pks_address = (PKS_HOST, PKS_PORT)\n client_pkey = ns.get_public_key(pks_address, client_name, NAME, rsa_key)\n client_pkey = rsa.import_key(client_pkey)\n\n # Lowe's fix: A <-- {N_A, N_B, B} -- B\n bob_nonce = ns.generate_nonce()\n response = \"{},{},{}\".format(client_nonce, bob_nonce, NAME)\n response = rsa.encrypt(client_pkey, response)\n conn.sendall(response)\n print(\"Bob: sent nonces {}, {} to {}\".format(client_nonce, bob_nonce, client_name))\n\n # A -- {K, N_B} --> B\n request = conn.recv(1024)\n if request.isdigit() and int(request) == RESP_DENIED:\n return print(\"Bob: request to shutdown recieved, shutting down...\")\n request = rsa.decrypt(rsa_key, request)\n ssn_key, bob_resp_nonce = request.split(',')\n ssn_key = bytes(ssn_key, \"utf-8\")\n bob_resp_nonce = int(bob_resp_nonce)\n print(\"Bob: recieved session key {} and nonce {}\".format(ssn_key, bob_resp_nonce))\n\n # check if client did actually recieve Bob's nonce\n if bob_resp_nonce == bob_nonce:\n response = bytes(str(RESP_VERIFIED), \"utf-8\")\n conn.sendall(response)\n print(\"Bob: connection verified!\")\n return ssn_key, client_name\n else:\n print(\"Bob: nonces {} and {} do not match!\".format(bob_nonce, bob_resp_nonce))", "def exchange_key(connection, pub_key):\r\n\r\n if main.diffe_key_exchange is False:\r\n # Get the server's public key\r\n server_pub_key_bytes = connection.recv(1024)\r\n\r\n # Send public key\r\n connection.sendall(rsa.PublicKey.save_pkcs1(pub_key))\r\n\r\n else:\r\n # Rounds of bit-shifting and XOR\r\n rounds = 64\r\n\r\n while True:\r\n\r\n # Generate 4096-bit keys (RFC 3526 Group 16)\r\n client_diffe_key = pyDHE.new(16)\r\n shared_secret = client_diffe_key.negotiate(connection)\r\n\r\n # Encrypt\r\n encrypted = int(binascii.hexlify(rsa.PublicKey.save_pkcs1(pub_key)).decode(), 16)\r\n for x in range(0, rounds):\r\n encrypted = encrypted ^ (shared_secret ** rounds)\r\n encrypted = encrypted << rounds\r\n encrypted = int(str(encrypted)[::-1])\r\n\r\n # Decrypt\r\n decrypted = encrypted\r\n decrypted = int(str(decrypted)[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n # Check if able to decrypt\r\n try:\r\n binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n client_success = True\r\n\r\n # Generate new keys upon failure and try again\r\n except UnicodeDecodeError:\r\n client_success = False\r\n pass\r\n except binascii.Error:\r\n client_success = False\r\n pass\r\n\r\n # Notify client about encryption status\r\n server_success = connection.recv(1024)\r\n if client_success is False:\r\n connection.send(b'DHE')\r\n else:\r\n connection.send(b'CONTINUE')\r\n\r\n # Get encryption status from client\r\n if client_success is False or server_success == b'DHE':\r\n pass\r\n elif server_success == b'CONTINUE':\r\n break\r\n\r\n # Hold encrypted server key\r\n server_encrypted = b''\r\n\r\n # Receive encrypted key from the server\r\n while True:\r\n data = connection.recv(8192)\r\n if data == b'ENDED':\r\n break\r\n elif data[-5:] == b'ENDED':\r\n server_encrypted += data[:-5]\r\n break\r\n server_encrypted += data\r\n\r\n # Send the encrypted key to the server\r\n connection.sendall(bytes(hex(encrypted).encode()))\r\n connection.send(b'ENDED')\r\n\r\n # Decrypt the client's public key\r\n decrypted = int(server_encrypted, 16)\r\n decrypted = int(str(int(decrypted))[::-1])\r\n for x in range(rounds, 0, -1):\r\n decrypted = decrypted >> rounds\r\n decrypted = decrypted ^ (shared_secret ** rounds)\r\n\r\n server_pub_key_bytes = binascii.unhexlify(hex(decrypted)[2:]).decode()\r\n\r\n server_pub_key = rsa.PublicKey.load_pkcs1(server_pub_key_bytes)\r\n # Determine max message size\r\n max_message_size = common.byte_size(server_pub_key.n) - 11\r\n\r\n # Return crypto key information\r\n return server_pub_key, server_pub_key_bytes, max_message_size", "def on_pub_rsa_request(self) -> Serialisable:\n msg = ServerMsgFactory().create(kind=ServerMsgFactory.KIND_HANDSHAKE_PKEY_RESP)\n with open(os.path.join(self.pki_path, self.KEY_PUBLIC)) as rsa_h:\n msg.internal[\"payload\"] = rsa_h.read()\n\n return msg", "def get_keys(self, update, context):\r\n self.SECRET_KEY = update.message.text\r\n update.message.reply_text(text=f'Новый ключ: {self.SECRET_KEY}')\r\n return ConversationHandler.END", "def request_idkey(self):\r\n if self.use_http():\r\n self.enqueue_http_request(\"money/idkey\", {}, \"idkey\")\r\n else:\r\n self.send_signed_call(\"private/idkey\", {}, \"idkey\")", "def gk_handshake_1_2_tkip( self , packet ):\n\t\ttry:\n\t\t\t\n\t\t\t# Decapsulate the TKIP packet, and rebuild the plaintext packet.\n\t\t\tplaintext \t\t= self.handleTKIP.decapsulate( packet , self.TK , self.MMICTxK )\n\t\t\tpacket \t\t\t= LLC()/SNAP()/EAPOL()/EAPOL_Key()/EAPOL_WPAKey()\n\t\t\tnew_packet \t\t= packet.__class__( plaintext )\n\t\t\t\n\t\t\t# Assert on the flags in the Key Information to verify it is GKHS Message 1/2.\n\t\t\tkeyinfoReceived \t= new_packet.getlayer( EAPOL_WPAKey ).KeyInfo\n\t\t\tself.__setKeyIDFromFlaglist( self.__getFlaglist( keyinfoReceived ) )\n\t\t\tflaglist\t\t= ['HMAC_MD5_RC4','group','ack','mic','secure']\n\t\t\tflaglist.append( self.keyID ) # Copying the Key ID from the received packet.\n\t\t\tkeyinfoCalculated \t= self.__getKeyInformation( flaglist )\n\t\t\tassert( keyinfoReceived == keyinfoCalculated ), \\\n\t\t\t\t'The received packet is not Group Key Handshake Message 1/2.'\n\t\t\tself.logger.log( self.logger.RECEIVED , 'EAPOL Group Key Handshake Message 1/2 TKIP' )\n\t\t\t\n\t\t\t# Assert that the EAPoL WPA Key layer has a valid MIC.\n\t\t\tself.__assertWPAKeyMIC( new_packet , Crypto.Hash.MD5 )\n\t\t\t\n\t\t\t# Update the Replay Counter.\n\t\t\tself.replayCounter\t= new_packet.getlayer( EAPOL_WPAKey ).ReplayCounter\n\t\t\t\n\t\t\t# Use ARC4 to decrypt the WPAKey-field, containing the Group Temporal Key.\n\t\t\t# First skip the first 256 bytes of ARC4, then decrypt the cipher.\n\t\t\t# Ref. IEEE 802.11i specification (2004); EAPOL-Key frames (Key Descriptor\n\t\t\t# Version 1).\n\t\t\tkey\t\t= new_packet.KeyIV + self.KEK\n\t\t\tarc4\t\t= ARC4.new( key )\n\t\t\tarc4.decrypt( '\\x00'*256 )\n\t\t\tself.GTK \t= arc4.decrypt( new_packet.WPAKey ) # Resulting key of 32 octets.\n\t\t\tself.logger.logKey( 'Group Temporal Key' , self.GTK )\n\t\t\t\n\t\texcept:\n\t\t\traise", "def incoming(self,message):\n #Convert to Dictionary, Whatever the input is\n if isinstance(message, str):\n message = json.loads(message)\n elif isinstance(message, bytes):\n message = self.deserialize(message)\n\n op = message.get(\"op\")\n if op == \"publish\":\n message[\"msg\"] = self.decompress(message[\"topic\"],message.get(\"msg\"))\n message[\"topic\"] = self.remap_topic(message[\"topic\"]) \n elif op == \"advertise\":\n message[\"topic\"] = self.remap_topic(message[\"topic\"])\n elif op == \"advertise_service\" or op == \"service_response\":\n message[\"service\"] = self.remap_service(message[\"service\"])\n\n\n message = json.dumps(message)\n #--------\n #replace JSON Null values in float32 types with infinity datatype (changed according to the error for LaserScan values)\n message = message.replace(\"null\", \"Infinity\")\n #--------\n self._protocol.incoming(message)", "def handle(self):\n try:\n # Wait for data\n data = json.loads(self.request.recv(1024).decode('UTF-8').strip())\n\n # Process data\n self.process_data(data)\n\n except Exception as e:\n print(\"Exception wile receiving message: \", e)\n self.request.sendall(\n bytes(json.dumps({'return': 'error'}), 'UTF-8'))", "def handle(self) -> None:\r\n\r\n if self.data.get(\"message-id\") != None:\r\n if self.data[\"status\"] == \"error\":\r\n print(self.data[\"error\"])\r\n return\r\n else:\r\n requestData = self.obs.pendingResponses.pop(self.data[\"message-id\"])\r\n request = requestData[\"request-type\"]\r\n #Requests as of version 4.8.0\r\n\r\n #General\r\n if request == \"GetVersion\":\r\n pass\r\n\r\n elif request == \"GetAuthRequired\":\r\n if self.data[\"authRequired\"]:\r\n secret_string: str = self.obs.password + self.data[\"salt\"]\r\n secret_hash: sha256 = sha256(secret_string.encode(\"utf-8\"))\r\n secret: bytes = b64encode(secret_hash.digest())\r\n\r\n response_string: str = secret.decode(\"utf-8\") + self.data[\"challenge\"]\r\n response_hash: sha256 = sha256(response_string.encode(\"utf-8\"))\r\n response: bytes = b64encode(response_hash.digest())\r\n\r\n self.obs.requests.append({\r\n \"type\": \"Authenticate\",\r\n \"auth\": response.decode(\"utf-8\")})\r\n\r\n else:\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"Authenticate\":\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"SetHeartbeat\":\r\n #To be removed in 5.0.0\r\n pass\r\n\r\n elif request == \"SetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetStats\":\r\n pass\r\n\r\n elif request == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n elif request == \"GetVideoInfo\":\r\n pass\r\n\r\n elif request == \"OpenProjector\":\r\n pass\r\n\r\n elif request == \"TriggerHotkeyByName\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"TriggerHotkeyBySequence\":\r\n #Unreleased\r\n pass\r\n\r\n #Media Control\r\n elif request == \"PlayPauseMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"RestartMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StopMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"NextMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"PreviousMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaDuration\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"SetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"ScrubMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaState\":\r\n #Unreleased\r\n pass\r\n\r\n #Sources\r\n\r\n elif request == \"GetMediaSourcesList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSourcesList\":\r\n pass\r\n\r\n elif request == \"GetSourceTypesList\":\r\n pass\r\n\r\n elif request == \"GetVolume\":\r\n pass\r\n\r\n elif request == \"SetVolume\":\r\n pass\r\n\r\n elif request == \"GetMute\":\r\n pass\r\n\r\n elif request == \"SetMute\":\r\n pass\r\n\r\n elif request == \"ToggleMute\":\r\n pass\r\n\r\n elif request == \"GetAudioActive\":\r\n pass\r\n\r\n elif request == \"SetSourceName\":\r\n pass\r\n\r\n elif request == \"SetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSourceSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceSettings\":\r\n pass\r\n\r\n elif request == \"GetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"SetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"GetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"SetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"GetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"SetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"GetSpecialSources\":\r\n pass\r\n\r\n elif request == \"GetSourceFilters\":\r\n source = self.obs.getSource(requestData[\"sourceName\"])\r\n if source != None:\r\n for _filter in self.data[\"filters\"]:\r\n source.addFilter(_filter) #type: ignore\r\n\r\n elif request == \"GetSourceFilterInfo\":\r\n pass\r\n\r\n elif request == \"AddFilterToSource\":\r\n pass\r\n\r\n elif request == \"RemoveFilterFromSource\":\r\n pass\r\n\r\n elif request == \"ReorderSourceFilter\":\r\n pass\r\n\r\n elif request == \"MoveSourceFilter\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterVisibility\":\r\n pass\r\n \r\n elif request == \"GetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"SetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"TakeSourceScreenshot\":\r\n pass\r\n\r\n #Outpute\r\n elif request == \"ListOutputs\":\r\n pass\r\n\r\n elif request == \"GetOutputInfo\":\r\n pass\r\n\r\n elif request == \"StartOutput\":\r\n pass\r\n\r\n elif request == \"StopOutput\":\r\n pass\r\n\r\n #Profiles\r\n elif request == \"SetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"GetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"ListProfiles\":\r\n pass\r\n\r\n #Recording\r\n elif request == \"GetRecordingStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopRecording\":\r\n pass\r\n\r\n elif request == \"StartRecording\":\r\n pass\r\n\r\n elif request == \"StopRecording\":\r\n pass\r\n\r\n elif request == \"PauseRecording\":\r\n pass\r\n\r\n elif request == \"ResumeRecording\":\r\n pass\r\n\r\n elif request == \"SetRecordingFolder\":\r\n pass\r\n\r\n elif request == \"GetRecordingFolder\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif request == \"GetReplayBufferStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StartReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"SaveReplayBuffer\":\r\n pass\r\n\r\n #Scene Collections\r\n elif request == \"SetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"GetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"ListSceneCollections\":\r\n pass\r\n\r\n #Scene Items\r\n elif request == \"GetSceneItemList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"SetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"ResetSceneItem\":\r\n pass\r\n\r\n elif request == \"SetSceneItemRender\":\r\n pass\r\n\r\n elif request == \"SetSceneItemPosition\":\r\n pass\r\n\r\n elif request == \"SetSceneItemTransform\":\r\n pass\r\n\r\n elif request == \"SetSceneItemCrop\":\r\n pass\r\n\r\n elif request == \"DeleteSceneItem\":\r\n pass\r\n\r\n elif request == \"AddSceneItem\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"DuplicateSceneItem\":\r\n pass\r\n\r\n #Scenes\r\n elif request == \"SetCurrentScene\":\r\n pass\r\n\r\n elif request == \"GetCurrentScene\":\r\n self.obs.setCurrentScene(self.data[\"name\"])\r\n\r\n elif request == \"GetSceneList\":\r\n for scene in self.data[\"scenes\"]:\r\n self.obs.addScene(scene)\r\n self.obs.setCurrentScene(self.data[\"current-scene\"])\r\n\r\n elif request == \"CreateScene\":\r\n pass\r\n\r\n elif request == \"ReorderSceneItems\":\r\n pass\r\n\r\n elif request == \"SetSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"RemoveSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"GetSceneTransitionOverride\":\r\n pass\r\n\r\n #Streaming\r\n elif request == \"GetStreamingStatus\":\r\n pass\r\n\r\n elif request == \"StartStopStreaming\":\r\n pass\r\n\r\n elif request == \"StartStreaming\":\r\n pass\r\n\r\n elif request == \"StopStreaming\":\r\n pass\r\n\r\n elif request == \"SetStreamSettings\":\r\n pass\r\n\r\n elif request == \"GetStreamSettings\":\r\n pass\r\n\r\n elif request == \"SaveStreamSettings\":\r\n pass\r\n\r\n elif request == \"SendCaptions\":\r\n pass\r\n\r\n #Studio Mode\r\n elif request == \"GetStudioModeStatus\":\r\n pass\r\n\r\n elif request == \"GetPreviewScene\":\r\n pass\r\n\r\n elif request == \"SetPreviewScene\":\r\n pass\r\n\r\n elif request == \"TransitionToProgram\":\r\n pass\r\n\r\n elif request == \"EnableStudioMode\":\r\n pass\r\n\r\n elif request == \"DisableStudioMode\":\r\n pass\r\n\r\n elif request == \"ToggleStudioMode\":\r\n pass\r\n\r\n #Transitions\r\n elif request == \"GetTransitionList\":\r\n pass\r\n\r\n elif request == \"GetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionPosition\":\r\n pass\r\n\r\n else:\r\n print(f\"Unhandled response of type {request} and data {self.data}.\")\r\n\r\n \r\n\r\n else:\r\n event: str = self.data[\"update-type\"]\r\n #Events as of 4.8.0\r\n\r\n #Scenes\r\n if event == \"SwitchScenes\":\r\n self.obs.setCurrentScene(self.data[\"scene-name\"])\r\n\r\n elif event == \"ScenesChanged\":\r\n #self.obs.purgeScenes()\r\n pass\r\n\r\n elif event == \"SceneCollectionChanged\":\r\n pass\r\n\r\n elif event == \"SceneCollectionListChanged\":\r\n pass\r\n\r\n #Transitions\r\n elif event == \"SwitchTransition\":\r\n pass\r\n\r\n elif event == \"TransitionListChanged\":\r\n pass\r\n\r\n elif event == \"TransitionDurationChanged\":\r\n pass\r\n\r\n elif event == \"TransitionBegin\":\r\n pass\r\n\r\n elif event == \"TransitionEnd\":\r\n pass\r\n\r\n elif event == \"TransitionVideoEnd\":\r\n pass\r\n\r\n #Profiles\r\n elif event == \"ProfileChanged\":\r\n pass\r\n\r\n elif event == \"ProfileListChanged\":\r\n pass\r\n\r\n #Streaming\r\n elif event == \"StreamStarting\":\r\n pass\r\n\r\n elif event == \"StreamStarted\":\r\n pass\r\n\r\n elif event == \"StreamStopping\":\r\n pass\r\n\r\n elif event == \"StreamStopped\":\r\n pass\r\n\r\n elif event == \"StreamStatus\":\r\n pass\r\n\r\n #Recording\r\n elif event == \"RecordingStarting\":\r\n pass\r\n\r\n elif event == \"RecordingStarted\":\r\n pass\r\n\r\n elif event == \"RecordingStopping\":\r\n pass\r\n\r\n elif event == \"RecordingStopped\":\r\n pass\r\n\r\n elif event == \"RecordingPaused\":\r\n pass\r\n\r\n elif event == \"RecordingResumed\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif event == \"ReplayStarting\":\r\n pass\r\n\r\n elif event == \"ReplayStarted\":\r\n pass\r\n\r\n elif event == \"ReplayStopping\":\r\n pass\r\n\r\n elif event == \"ReplayStopped\":\r\n pass\r\n\r\n #Other\r\n elif event == \"Exiting\":\r\n pass\r\n\r\n #General\r\n elif event == \"Heartbeat\":\r\n pass\r\n\r\n elif event == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n #Sources\r\n elif event == \"SourceCreated\":\r\n pass\r\n\r\n elif event == \"SourceDestroyed\":\r\n pass\r\n\r\n elif event == \"SourceVolumeChanged\":\r\n pass\r\n\r\n elif event == \"SourceMuteStateChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioDeactivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioActivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioSyncOffsetChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioMixersChanged\":\r\n pass\r\n\r\n elif event == \"SourceRenamed\":\r\n pass\r\n\r\n elif event == \"SourceFilterAdded\":\r\n pass\r\n\r\n elif event == \"SourceFilterRemoved\":\r\n pass\r\n\r\n elif event == \"SourceFilterVisibilityChanged\":\r\n source = self.obs.getSource(self.data[\"sourceName\"])\r\n if source != None:\r\n _filter = source.getFilter(self.data[\"filterName\"]) #type: ignore\r\n if _filter != None:\r\n _filter.setVisible(self.data[\"filterEnabled\"]) #type: ignore\r\n\r\n elif event == \"SourceFiltersReordered\":\r\n pass\r\n\r\n #Media\r\n elif event == \"MediaPlaying\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPaused\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaRestarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStopped\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaNext\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPrevious\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaEnded\":\r\n #Unreleased\r\n pass\r\n\r\n #Scene Items\r\n elif event == \"SceneItemOrderChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemAdded\":\r\n pass\r\n\r\n elif event == \"SceneItemRemoved\":\r\n pass\r\n\r\n elif event == \"SceneItemVisibilityChanged\":\r\n scene = self.obs.getScene(self.data[\"scene-name\"])\r\n if scene != None:\r\n source = scene.getSource(self.data[\"item-name\"]) #type: ignore\r\n if source != None:\r\n source.setVisible(self.data[\"item-visible\"]) #type: ignore\r\n \r\n\r\n elif event == \"SceneItemLockChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemTransformChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemSelected\":\r\n pass\r\n\r\n elif event == \"SceneItemDeselected\":\r\n pass\r\n\r\n #Studio Mode\r\n elif event == \"PreviewSceneChanged\":\r\n pass\r\n\r\n elif event == \"StudioModeSwitched\":\r\n pass\r\n\r\n #Unhandled Events\r\n else:\r\n print(\"Unhandled event with data: \" + str(self.data))", "def receive_call(self, private_key, sender_id, msg_id, message):\n return self._handle_call(private_key, sender_id, msg_id, message)", "def handle_msg(msg):\n if comm._msg_callback:\n comm._msg_callback(msg)", "async def ask_msg_packed(self, context):\n return await self.get_message_bytes(context, self.ask_msg(context))", "def receiver(): \n global data\n DW1000.newReceive()\n DW1000.receivePermanently()\n DW1000.startReceive()", "def handle_request(self, given_request: Request):\n key = des.DesKey(given_request.key.encode('utf-8'))\n if not request.data_input:\n en_message = key.encrypt(given_request.result.encode('utf-8'), padding=True)\n else:\n en_message = key.encrypt(given_request.data_input.encode('utf-8'), padding=True)\n if not self.next_handler:\n print(en_message)\n return True\n given_request.result = en_message\n return self.next_handler.handle_request(given_request)", "def gateway_receive(self, msg):\n\n if self.debug_api_messages:\n self.dump_input_message(msg)\n\n try:\n o = json.loads(msg)\n except Exception as e:\n return self.receive_exception(sys.exc_info()[0], e, msg)\n\n msg_type = o['type']\n msg_id = o['id']\n msg_data = o['data']\n\n if self.log_api_messages:\n self.output(f\"--> {msg_type} {msg_id} {msg_data}\")\n\n if msg_type == 'system':\n self.handle_system_message(msg_id, msg_data)\n else:\n if msg_id in self.active_cxn:\n c = self.active_cxn[msg_id].receive(msg_type, msg_data)\n else:\n self.error_handler(self.id, f\"Message Received on Unknown connection: {repr(msg)}\")\n\n return True", "def run(self):\n\n def callback(ch, method, properties, body):\n json_body = json.loads(body)\n self.buffer.append(Fvalue.fromdict(json_body))\n\n sleep(5) # We introduce a slight delay to let the RabbitMQ container to accept connections\n connection = pika.BlockingConnection(pika.ConnectionParameters(host=self.mq_host,port=self.mq_port))\n channel = connection.channel()\n channel.exchange_declare(exchange=self.mq_host + '_exchange', exchange_type='direct')\n result = channel.queue_declare(exclusive=True)\n queue_name = result.method.queue\n channel.queue_bind(exchange=self.mq_host + '_exchange',\n queue=queue_name,\n routing_key=self.routing_key)\n channel.basic_consume(callback,queue=queue_name,no_ack=True)\n channel.start_consuming()", "def dispatch(message,var=None):\r\n if message=='set_key':\r\n set_key(var)\r\n elif message=='empty_key':\r\n empty_key()\r\n elif message=='export_key':\r\n return export_key()\r\n elif message=='import_key':\r\n import_key(var)\r\n elif message=='encoding':\r\n return encoding(var)\r\n elif message=='decoding':\r\n return decoding(var)\r\n else:\r\n print(\"Unknown message\")", "def onMessage(self, msg, isBinary):\n\t\tif DEBUG:\n\t\t\tsys.stdout.write(\n\t\t\t\t\"Got {t}binary Message: '{m}'.\\n\".format(\n\t\t\t\t\tm=msg, t=\"\" if isBinary else \"non\"\n\t\t\t\t\t)\n\t\t\t\t)\n\t\tif not isBinary:\n\t\t\traise CommunicationError(\n\t\t\t\t\"Received non-binary Message: '{m}'!'\".format(m=msg)\n\t\t\t\t)\n\t\t\treturn\n\t\tif not self.did_handshake:\n\t\t\tif len(msg) != 1:\n\t\t\t\traise CommunicationError(\n\t\t\t\t\t\"Error during Handshake: Expected 1 Byte, got {n} Bytes!\".format(n=len(msg))\n\t\t\t\t\t)\n\t\t\tif msg == \"T\":\n\t\t\t\tself.did_handshake = True\n\t\t\t\treturn\n\t\t\telif msg == \"F\":\n\t\t\t\traise CommunicationError(\"Version Mismatch!\")\n\t\t\telse:\n\t\t\t\traise CommunicationError(\n\t\t\t\t\t\"Error during Handshake: Regeived Invalid Answer: '{a}'!\".format(a=msg)\n\t\t\t\t\t)\n\t\tidb = msg[0]\n\t\tpayload = msg[1:]\n\t\tif idb == ID_CTRL:\n\t\t\tif payload.startswith(\"I:JOIN\") and len(payload) == 7:\n\t\t\t\tself.cid = ord(payload[-1])\n\t\t\t\tself.joinstate = 2\n\t\t\telif payload == \"E:NOJOIN\":\n\t\t\t\tself.joinstate = 3\n\t\t\telif payload.startswith(\"I:CREATE\") and len(payload) == 9:\n\t\t\t\tself.cid = ord(payload[-1])\n\t\t\t\tself.createstate = 2\n\t\t\telif payload == \"E:NOCREATE\":\n\t\t\t\tself.createstate = 3\n\t\t\telif payload.startswith(\"I:RESERVE\"):\n\t\t\t\tself.reservestate = payload[9:]\n\t\t\telif payload == \"E:NORESERVE\":\n\t\t\t\tself.reservestate = 3\n\t\t\telif payload.startswith(\"I:EXTEND\"):\n\t\t\t\tself.extendstate = payload[8:]\n\t\t\telif payload == \"E:NOEXTEND\":\n\t\t\t\tself.extendstate = 3\n\t\t\telif payload.startswith(\"LEAVE\") and len(payload) == 6:\n\t\t\t\tpid = ord(payload[-1])\n\t\t\t\tself.AL.acquire()\n\t\t\t\tfor s in self.ls.keys():\n\t\t\t\t\tif self.ls[s][\"peer\"] == pid:\n\t\t\t\t\t\tself._close_s(s)\n\t\t\t\tfor s in self.s2i.keys():\n\t\t\t\t\ti = self.s2i[s]\n\t\t\t\t\tif i[\"peer\"] == pid:\n\t\t\t\t\t\t# port=i[\"local\"]\n\t\t\t\t\t\tself._close_s(s)\n\t\t\t\tself.AL.release()\n\t\telif idb == ID_PC:\n\t\t\tsender, payload = ord(payload[0]), payload[1:]\n\t\t\tif payload == \"PING\":\n\t\t\t\tself.send_to(sender, ID_PC + \"PINGANSW\")\n\t\t\telif payload == \"PINGANSW\":\n\t\t\t\tct = time.time()\n\t\t\t\tif sender not in self.pingstates.keys():\n\t\t\t\t\treturn\n\t\t\t\telse:\n\t\t\t\t\tst = self.pingstates[sender][1]\n\t\t\t\t\tres = ct - st\n\t\t\t\t\tself.pingstates[sender] = (True, res)\n\t\t\telif payload.startswith(\"BRIDGE\") and len(payload) == 10:\n\t\t\t\tlp, pp = struct.unpack(\"!HH\", payload[6:])\n\t\t\t\tai = (sender, pp)\n\t\t\t\tif isinstance(self.whitelist, list) or isinstance(self.whitelist, tuple):\n\t\t\t\t\tif lp not in self.whitelist:\n\t\t\t\t\t\tself.__send_close(ai)\n\t\t\t\t\t\treturn\n\t\t\t\ttry:\n\t\t\t\t\ts = socket.socket()\n\t\t\t\t\ts.connect((\"localhost\", lp))\n\t\t\t\t\tself.AL.acquire()\n\t\t\t\t\tself.s2i[s] = {\n\t\t\t\t\t\t\"socket\": s,\n\t\t\t\t\t\t\"recv\": 0,\n\t\t\t\t\t\t\"send\": 0,\n\t\t\t\t\t\t\"peer\": sender,\n\t\t\t\t\t\t\"local\": pp,\n\t\t\t\t\t\t\"port\": lp,\n\t\t\t\t\t\t\"creator\": sender\n\t\t\t\t\t\t}\n\t\t\t\t\tself.s2ai[s] = ai\n\t\t\t\t\tself.ai2s[ai] = s\n\t\t\t\t\tself.AL.release()\n\t\t\t\texcept:\n\t\t\t\t\tself._close_s(s)\n\t\t\telif payload.startswith(\"CLOSE\") and len(payload) == 8:\n\t\t\t\tc = ord(payload[5])\n\t\t\t\tlp = struct.unpack(\"!H\", payload[6:])[0]\n\t\t\t\tai = (c, lp)\n\t\t\t\tself.AL.acquire()\n\t\t\t\ttry:\n\t\t\t\t\ts = self.ai2s[ai]\n\t\t\t\t\tself._close_s(s, send_close=False)\n\t\t\t\texcept KeyError:\n\t\t\t\t\tpass\n\t\t\t\tself.AL.release()\n\t\telif idb == ID_MSG:\n\t\t\tsender, creator, pi, payload = (\n\t\t\t\tord(payload[0]), ord(payload[1]),\n\t\t\t\tpayload[2:4], payload[4:]\n\t\t\t\t)\n\t\t\tlp = struct.unpack(\"!H\", pi)[0]\n\t\t\tai = (creator, lp)\n\t\t\tcomp = decrypt(payload, self.__key)\n\t\t\tif COMPRESSION > 0:\n\t\t\t\ttosend = zlib.decompress(comp)\n\t\t\telse:\n\t\t\t\ttosend = comp\n\t\t\tlength = len(tosend)\n\t\t\tself.AL.acquire()\n\t\t\ttry:\n\t\t\t\ts = self.ai2s[ai]\n\t\t\texcept KeyError:\n\t\t\t\tself.AL.release()\n\t\t\t\treturn\n\t\t\tself.s2i[s][\"recv\"] += length\n\t\t\tself.AL.release()\n\t\t\ttry:\n\t\t\t\ts.send(tosend)\n\t\t\texcept:\n\t\t\t\tself.AL.acquire()\n\t\t\t\tself._close_s(s)\n\t\t\t\tself.AL.release()\n\t\telse:\n\t\t\traise CommunicationError(\n\t\t\t\t\"Received Message with invalid ID '{i}'!\".format(i=idb)\n\t\t\t\t)", "def request(self, msg):\n\t\tif msg.command in ('AUTH', 'EXIT', 'GET', 'SET', 'VERSION', 'COMMAND', 'UPLOAD'):\n\t\t\tmethod = 'handle_request_%s' % (msg.command.lower(),)\n\t\telse:\n\t\t\tmethod = 'handle_request_unknown'\n\n\t\tself.execute(method, msg)", "def handle(self):\r\n # self.request is the TCP socket connected to the client\r\n # read the incoming command\r\n request = self.request.recv(1024).strip()\r\n # write to the queue waiting to be processed by the server\r\n INPUT_QUEUE.put(request)\r\n # wait for the server answer in the output queue\r\n response = OUTPUT_QUEUE.get(timeout=5.0)\r\n # send back the answer\r\n self.request.send(response)", "def start(self, msg):\n self.jsock.encode(msg)", "def callback_crypto_currency_market_data(message):\n body = json.loads(message.body.decode('utf-8'))\n \n # routing_key have view: message_type.data_type.exchange.pair[.time_frame]\n # message_type == update | starting, data_type == ticker | candles | depth,\n # exchange, pair, time_frame - sending by listing_info\n # mask: *.*.*.#\n message_type = message.routing_key.split('.')[0]\n data_id = '.'.join(message.routing_key.split('.')[1:])\n\n if message_type == 'update':\n for observer in self.subscribers.get(data_id):\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n elif message_type == 'starting':\n # if exist waiters, send data and move waiters in subscribers\n if not self.waiters_first_msg.get(data_id):\n return\n\n new_subscribers = []\n while self.waiters_first_msg[data_id]:\n observer = self.waiters_first_msg[data_id].pop()\n asyncio.get_event_loop().create_task(observer.update(\n dict(\n data_id=message.routing_key,\n data=body\n )\n ))\n new_subscribers.append(observer)\n\n # if not subscribers on this data_id, init new dict-value, else append to exist array\n subscribers = self.subscribers.get(data_id, None)\n if not subscribers and new_subscribers:\n self.subscribers[data_id] = new_subscribers\n asyncio.get_event_loop().create_task(self._send_message_for_subscribe(data_id))\n else:\n for new_subscriber in new_subscribers:\n if new_subscriber not in self.subscribers[data_id]:\n self.subscribers[data_id].append(new_subscriber)", "def execute_message_received(self, message_received):\n pass", "def client(self,message):\n self.message = message\n self.run()" ]
[ "0.67223096", "0.6613165", "0.66030747", "0.65658045", "0.6505863", "0.64924264", "0.63855624", "0.63809603", "0.6345278", "0.6331297", "0.6206609", "0.6176792", "0.6141044", "0.61168796", "0.61084545", "0.5941246", "0.5910018", "0.5898104", "0.58852327", "0.5862589", "0.58563215", "0.58375853", "0.58262444", "0.57546586", "0.5743632", "0.5742693", "0.5728753", "0.5698905", "0.5676591", "0.56643736", "0.5658737", "0.564347", "0.5617709", "0.5573967", "0.5564153", "0.55490345", "0.5537438", "0.5531186", "0.55218047", "0.5516112", "0.55065393", "0.5502552", "0.5467344", "0.5458623", "0.54465425", "0.5439092", "0.54362714", "0.5434176", "0.542583", "0.5425582", "0.5424442", "0.54177386", "0.5416144", "0.5411927", "0.5408841", "0.5406821", "0.53836596", "0.5370283", "0.53700626", "0.5361465", "0.5359304", "0.5358707", "0.53518385", "0.534868", "0.53356856", "0.53299874", "0.5322232", "0.53173655", "0.5310375", "0.5304178", "0.5300898", "0.5297142", "0.52916795", "0.5288488", "0.5286447", "0.5284158", "0.5280599", "0.5270454", "0.5270161", "0.52672094", "0.52533185", "0.5253022", "0.5250909", "0.52502465", "0.525012", "0.52470624", "0.524279", "0.5241314", "0.5236331", "0.5234436", "0.5233856", "0.52200717", "0.52143776", "0.5209472", "0.5205086", "0.5205061", "0.52043736", "0.51998365", "0.5193311", "0.519183" ]
0.6525278
4
Process ECDH procedure (receiving response)
def receive_exchange_response(self, pubkey, random_val, hint): #print("(%d) receive_exchange_response:" % int(time.time())) #print(" **> state:", self.state) if self.state != KeyExchangeManager.STATE_REQUESTING: return rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1)) self.set_invoke_timer(rand_time) self.shared_key = message_key_types.derive_shared_key(self.secret_key, pubkey, random_val) self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER) self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, "confirm", self.peer_public_key, self.nonce, self.random, self.pending_key_name) self.key_name = self.pending_key_name self.set_cipher(self.key_name, hint) self._set_state(KeyExchangeManager.STATE_ESTABLISHED) #print("*STATE_ESTABLISHED")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def response(self):\n return self._send(bytes([0xef,0xfe,0x02,0x0,0x0,0x0,0x0,0x0]))", "def process_eas (chan, eas):\n\n text = os.popen('./dsame.py --msg \"' + eas + '\"').read().split(\"\\n\")\n text2 = list(filter(None, text))\n n = len(text2)\n if n:\n print (\"Transmitting...\");\n for i in range(0,n):\n msg = aprs_msg (mycall, product_id, '', 'NWS', \"[\" + str(i+1) + \"/\" + str(n) + \"] \" + text2[i])\n print (msg)\n send_msg (xmit_chan, msg)\n #print (\"---\")", "def handle_execution_response(self, data, *, wait):\n ...", "def _rceCB(self, resp, event):\r\n rosResp = rospy.AnyMsg()\r\n\r\n if _GZIP_LVL:\r\n rosResp._buff = zlib.decompress(resp.getvalue())\r\n else:\r\n rosResp._buff = resp.getvalue()\r\n\r\n event.set(rosResp)", "def recv_eplus_msg(self, msg):\n self.rcvd = msg\n self.parse_eplus_msg(msg)\n # Call Agent callback to do whatever with the message\n if self.callback is not None:\n self.callback()", "def test_decode_failure(self):\n\n def handle(event):\n def test():\n pass\n\n return 0x0000, test\n\n self.ae = ae = AE()\n ae.add_requested_context(\n ModalityPerformedProcedureStepNotification, ExplicitVRLittleEndian\n )\n ae.add_supported_context(ModalityPerformedProcedureStepNotification)\n\n handlers = [(evt.EVT_N_EVENT_REPORT, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n assoc = ae.associate(\"localhost\", 11112)\n\n class DummyReply:\n def getvalue(self):\n def test():\n pass\n\n return test\n\n class DummyMessage:\n is_valid_response = True\n EventReply = DummyReply()\n Status = 0x0000\n STATUS_OPTIONAL_KEYWORDS = []\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return 1, DummyMessage()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n assert assoc.is_established\n\n # Event Information\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_event_report(\n ds,\n 1,\n ModalityPerformedProcedureStepNotification,\n \"1.2.840.10008.5.1.1.40.1\",\n )\n\n assert status.Status == 0x0110\n assert ds is None\n\n assoc.release()\n scp.shutdown()", "def processReadback(resp):\n a = np.fromstring(resp, dtype='<u1')\n return {\n 'build': a[51],\n 'serDAC': a[56],\n 'noPllLatch': bool((a[58] & 0x80) > 0),\n 'ackoutI2C': a[61],\n 'I2Cbytes': a[69:61:-1],\n 'executionCounter': (a[53] << 8) + a[52]\n }", "def process_event(event, device_id):\n print(event)\n if event.type == EventType.ON_CONVERSATION_TURN_STARTED:\n adjustvolume('30')\n subprocess.Popen([\"aplay\", \"/opt/RPIGassistant/audio-files/Listening.wav\"], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n GPIO.output(5,GPIO.HIGH)\n led.ChangeDutyCycle(100)\n print()\n\n if (event.type == EventType.ON_RESPONDING_STARTED and event.args and not event.args['is_error_response']):\n GPIO.output(5,GPIO.LOW)\n GPIO.output(6,GPIO.HIGH)\n led.ChangeDutyCycle(50)\n\n if event.type == EventType.ON_RESPONDING_FINISHED:\n GPIO.output(6,GPIO.LOW)\n GPIO.output(5,GPIO.HIGH)\n led.ChangeDutyCycle(100)\n print()\n\n if (event.type == EventType.ON_CONVERSATION_TURN_TIMEOUT):\n say(random.choice(['sorry, i did not hear what you said', \n 'sorry, i did not hear anything', \n 'pardon', \n 'sorry, have you said something?']))\n restorevolume()\n print()\n\n if (event.type == EventType.ON_NO_RESPONSE):\n restorevolume()\n print()\n\n if (event.type == EventType.ON_CONVERSATION_TURN_FINISHED and\n event.args and not event.args['with_follow_on_turn']):\n restorevolume()\n GPIO.output(5,GPIO.LOW)\n led.ChangeDutyCycle(0)\n print()\n\n if event.type == EventType.ON_DEVICE_ACTION:\n for command, params in process_device_actions(event, device_id):\n print('Do command', command, 'with params', str(params))", "def handle(self):\n global log_th\n sent = 1\n msg_body = ''\n get_recv = True\n get_data = True\n empty_check = 0\n # Looping session requests\n while 1:\n try:\n # If enabled sleep feauture\n if self.sleep_between != 0:\n time.sleep(self.sleep_between)\n # If no answer feauture\n if self.no_answer != 0:\n time.sleep(1)\n continue\n # Changing receive size if receiving data part\n if sent == 3 or sent == 4:\n data = self.request.recv(self.data_recv_size)\n else:\n data = self.request.recv(self.std_recv_size)\n if sent != 5:\n self.command_w_th_inc.write_commands(\n data=bytes(data).decode().encode('ascii', 'ignore')\n .decode().rstrip(), qid=self.message_id)\n # To many empty line received, closed thread\n if self.func_empty_check(data):\n if empty_check >= 3:\n break\n else:\n empty_check += 1\n continue\n # Logging session requests if steps not equal to data section\n if sent != 5:\n log_th.log_info('{} - {} client executed : \"{}\"'.format(\n self.message_id, self.client_ip, bytes(data).decode().rstrip()))\n # Break the loop\n if self.func_quit(data):\n break\n except Exception as ae:\n log_th.log_warning('{} encounter an error from {} thread : {}'.format(\n self.client_ip, threading.current_thread().name, str(ae)))\n break\n else:\n try:\n # Checking the all steps\n if self.func_rset(data):\n sent = 2\n continue\n if self.func_auth(data):\n continue\n if self.func_auth_plain(data):\n continue\n if self.func_starttls(data):\n continue\n # Starting the sent steps\n # Ehlo/hello\n if sent == 1:\n if self.func_ehlo(data) or self.func_helo(data):\n sent += 1\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('command not found'))\n # Mail from, rcpt to, data\n elif sent == 2:\n if bytes(data).decode().encode('ascii', 'ignore').decode().rstrip().splitlines().__len__() > 2:\n get_data = False\n get_recv = False\n elif bytes(data).decode().encode('ascii',\n 'ignore').decode().rstrip().splitlines().__len__() > 1:\n get_recv = False\n if self.func_from(data, get_recv):\n sent += 1\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('mail from'))\n if not get_recv:\n if self.func_to(data, get_recv, get_data):\n sent += 1\n get_recv = True\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('rcpt to'))\n if not get_data:\n if self.func_data(data, get_recv, get_data):\n sent += 1\n get_data = True\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('data'))\n # rcpt to and data\n elif sent == 3:\n if bytes(data).decode().encode('ascii', 'ignore').decode().rstrip().splitlines().__len__() > 1:\n get_data = False\n if self.func_to(data, get_recv, get_data):\n sent += 1\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('rcpt to'))\n if not get_data:\n if self.func_data(data, get_recv, get_data):\n sent += 1\n get_data = True\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('data'))\n # data\n elif sent == 4:\n if self.func_to(data, get_recv, get_data):\n continue\n if self.func_data(data, get_recv, get_data):\n sent += 1\n else:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('data'))\n # content writing to file (if enabled) and quit statement\n elif sent == 5:\n data_list = bytes(data).decode().split('\\r\\n')\n for line in data_list:\n if str(line) == '.':\n if self.mail_save_enable != 0:\n out_file = open(self.mail_save_path + '/'\n + self.message_id + '.eml', 'w')\n out_file.write(msg_body)\n out_file.close()\n self.func_data_ok()\n sent = 1\n break\n else:\n msg_body += str(line) + '\\r\\n'\n except IndexError:\n if sent == 2:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('mail from'))\n elif sent == 3:\n self.func_denied(self.conf_th_ic.get_item(q_key='err-messages').get('rcpt to'))", "def process(self, data):\n\t\tif data['action'] == '0x40':\n\t\t\tself.authenticate(data)\n\t\telse:\n\t\t\t# Protocol error\n\t\t\tstack['clients'][self.client_ident].put(1,{'type':'0x000','status':'0x001'})", "def response(self, command_code, data):\n name, request_func, response_func = afpcommands.commands[command_code]\n return response_func(data)", "def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event", "def on_frame(self, frame: str) -> None:\n\n logger.debug(\"Frame: {}\".format(frame))\n try:\n message = json.loads(frame)\n except:\n logger.exception(\"Could not decode the JSON message\")\n self.transport.close()\n return\n\n mtype = message.get('type', None)\n self.log_state(mtype)\n if mtype == 'NEGOTIATION_RESPONSE':\n logger.debug(\"NEGOTIATION RESPONSE\")\n\n # Receive the chosen algorithms by the server \n self.process_negotiation_response(message)\n\n # Generate DH client private and public keys\n bytes_public_key,p,g,y=self.crypto.dh_client()\n \n message = {'type':'DH_PARAMETERS','parameters':{'p':p,'g':g,'public_key':str(bytes_public_key,'ISO-8859-1')}}\n self._send(message)\n self.state = STATE_DH\n \n return\n\n elif mtype == 'DH_PARAMETERS_RESPONSE':\n logger.debug('DH_PARAMETERS_RESPONSE')\n public_key=bytes(message['parameters']['public_key'],'ISO-8859-1')\n \n #Create shared key with the server public key\n self.crypto.create_shared_key(public_key)\n \n # Generate a symmetric key\n self.crypto.symmetric_key_gen()\n logger.debug(\"Key: {}\".format(self.crypto.symmetric_key))\n\n if self.state == STATE_ROTATION:\n self.state = STATE_OPEN\n self.send_file(self.file_name)\n \n elif self.state == STATE_DH:\n secure_message = self.encrypt_message({'type': 'OPEN', 'file_name': self.file_name})\n self._send(secure_message)\n self.send_mac()\n self.state = STATE_OPEN\n\n return\n\n elif mtype == 'INTEGRITY_CONTROL':\n flag = message['data']\n if flag == 'True':\n self._send(self.encrypt_message({'type': 'CLOSE'}))\n self.send_mac()\n logger.info(\"File transfer finished. Closing transport\")\n self.transport.close()\n\n elif mtype == 'OK': # Server replied OK. We can advance the state\n if self.state == STATE_OPEN:\n logger.info(\"Channel open\")\n self.send_file(self.file_name)\n elif self.state == STATE_DATA: # Got an OK during a message transfer.\n # Reserved for future use\n pass\n else:\n logger.warning(\"Ignoring message from server\")\n return\n\n elif mtype == 'ERROR':\n logger.warning(\"Got error from server: {}\".format(message.get('data', None)))\n \n else:\n logger.warning(\"Invalid message type\")\n\n logger.debug('Closing')\n self.transport.close()\n self.loop.stop()", "def test_rsp_success(self):\n\n def handle(event):\n return 0x0000, event.event_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStepNotification)\n scp = ae.start_server(\n (\"localhost\", 11112),\n block=False,\n evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)],\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStepNotification)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_event_report(\n ds,\n 1,\n ModalityPerformedProcedureStepNotification,\n \"1.2.840.10008.5.1.1.40.1\",\n )\n assert status.Status == 0x0000\n assert ds.PatientName == \"Test^test\"\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def handle_acsserver_response(self,message,conn):\n response=ResponseClientHandle.switch_msg_stream_type_str2dict(message)\n \n msg_type=response.get(event.KEY_MESSAGE)\n msg_group = int(msg_type) & 0xFF00\n \n #特殊处理AGENT 构建的给ACS的TIMOUT消息响应\n if self.msg_type == event.EV_RPC_AGENT_TIMEOUT_POST:\n if msg_type == event.EV_RPC_AGENT_TIMEOUT_RSP:\n log.debug_info(\"ACS server's response check agent timeout rpc request suc\")\n else:\n log.debug_info(\"ACS server's response check agent timeout rpc request fail\")\n \n return\n \n #检查消息的合法性\n #response message type error\n if not self.handle_response_message_type_verify(msg_group,msg_type,response):\n #check EV_RPC_CHECK_FAIL response\n if (msg_type==event.EV_RPC_CHECK_FAIL):\n\n tmp_obj = response.get(event.KEY_OBJECT)\n strio = StringIO(tmp_obj)\n tmp_msg_key_obj = pickle.load(strio)\n \n if not (isinstance(tmp_msg_key_obj,event.MsgUserRpcCheck)):\n err_info = \"ACS server's rpc response message type error\"\n log.debug_info(err_info)\n\n else:\n tmp_response_dict_ret=tmp_msg_key_obj.dict_ret\n if \"str_result\" in tmp_response_dict_ret:\n rc_str_result = tmp_response_dict_ret.get(\"str_result\")\n err_info = \"ACS server's rpc response check message fail, str_result: \" + rc_str_result\n log.debug_info(err_info)\n \n else:\n err_info = \"ACS server's rpc response message not found dict_ret data\"\n log.debug_info(err_info)\n \n else:\n err_info = \"ACS server's rpc response message type error\"\n log.debug_info(err_info)\n \n ResponseClientHandle.handle_except(self.msg,self.conn,err_info)\n return\n \n #response rpc post\n if (msg_group == event.EVENT_QUERY_GROUP or\n msg_group == event.EVENT_CONFIGURE_GROUP ): \n \n # send response to user or ACS\n ResponseClientHandle.handle_send_response(response,conn)\n \n elif (msg_group == event.EVENT_RPC_GROUP):\n \n if not DUTqueue.WAIT_RPC_RESPONSE_POST_FALG:\n # send response to user or ACS\n ResponseClientHandle.handle_send_response(response,conn)\n else:\n self.set_rpc_request_ACSServer_check_response(\"request_suc\")\n \n #response worklist build/bind/reserve/start/finish info post\n elif (msg_group == event.EVENT_WORKLIST_GROUP):\n\n self.handle_ACS_worklist_info_response(response,conn)\n \n else:\n err_info = \"Unsupport msg event group:%d\" % msg_group\n log.debug_info(err_info)\n ResponseClientHandle.handle_except(self.msg,self.conn,err_info)", "def on_eot(self, data):\n logger.debug('on_eot: %r', data)\n if self.in_transfer_state:\n # put the records together to a message\n if self.messages:\n message = b\"\".join(self.messages)\n self.queue.put_nowait(message)\n self.discard_env()\n else:\n raise InvalidState('Server is not ready to accept EOT message.')", "def _parse_dsdc_response(self, response, prompt):\n if prompt != SBE37Prompt.COMMAND:\n raise InstrumentProtocolException('dsdc command not recognized: %s.' % response)\n \n for line in response.split(SBE37_NEWLINE):\n self._param_dict.update(line)", "def process_cmd(self, cmd):\n\n resp = self.COMMANDS[cmd.cmd](cmd)\n\n logger.debug(\"Resp: %s\" % resp)\n # send to resp_queue\n # if type == G.CTRL_TYPE:\n #\n # response = json.dumps((corr_id, routing_key, resp))\n # logger.debug(\"Sending response: %s\" % response)\n # self.out_queue.put(response)\n\n response = cmd.make_response(resp)\n logger.debug(\"Sending response: %s\" % response)\n self.out_queue.put(str(response))", "def process_event(self, event):\n try:\n _LOGGER.debug(\"Hub: Process event: %s\", event)\n if self._encrypted:\n self._decrypt_string(event)\n _LOGGER.debug(\"Hub: Process event, after decrypt: %s\", event)\n self._update_states(event)\n except Exception as exc:\n _LOGGER.error(\"Hub: Process Event: %s gave error %s\", event, str(exc))\n\n # Even if decrypting or something else gives an error, create the acknowledgement message.\n return '\"ACK\"{}L0#{}[{}'.format(event.sequence, self._account_id, self._ending)", "def handle_data(handle, value):\n print(\"Received data: %s\" % hexlify(value))", "def TestResponse(port):\n\tcommandString = \"F\"\n\tport.write(commandString)\n\tcommandString = \"PM3,C,I1M500,I3M-500,I3M500,I1M-500,R\"\n\tport.write(commandString)\n\tWaitUntilReady(port)\n\tport.write(\"R\")\n\tresp=WaitUntilReady(port)\n\tcount=0\n\tprint(\"starting loop:\")\n\twhile('^' in resp):\n \tport.write(\"X\")\n\t\txpos=port.read(9)\n\t\tprint(xpos)\n\t\tport.write(\"R\")\n\t\ttime.sleep(5)\n\t\tresp=WaitUntilReady(port)\n\t\tcount = count+1\n\t\tprint(count)", "def handle_rpc(self):\n while True: # loop handling\n self.rbuf.seek(0)\n length_prefix = self.rbuf.read(4)\n if len(length_prefix) < 4: # half-package\n break\n\n try:\n length, = struct.unpack(\"I\", length_prefix.encode(\"utf-8\"))\n except Exception as e:\n print(e.__traceback__)\n body = self.rbuf.read(length)\n if len(body) < length: # half-package\n break\n\n request = json.loads(body)\n input = request[\"in\"]\n params = request[\"params\"]\n handler = self.handlers[input]\n handler(params)\n # cut read buffer\n left = self.rbuf.getvalue()[length + 4:]\n self.rbuf = StringIO()\n self.rbuf.write(left)\n # move position to EOF\n self.rbuf.seek(0, 2)", "def handle_helo(self, client, text):\n ip = socket.gethostbyname(socket.gethostname())\n reply = \"HELO \" + text + \"IP:\" + ip + \"\\nPort:\" + str(self.port) +\\\n \"\\nStudentID:13325878\\n\"\n client.send(reply.encode('utf-8'))", "def Echocallback(self, Frame_data):", "def parse_result(self, data):\n # typedef struct {\n # uint8_t nonce[YSM_AEAD_NONCE_SIZE]; // Nonce (publicId for Yubikey AEADs)\n # uint32_t keyHandle; // Key handle\n # YSM_STATUS status; // Status\n # uint8_t numBytes; // Number of bytes in AEAD block\n # uint8_t aead[YSM_AEAD_MAX_SIZE]; // AEAD block\n # } YSM_AEAD_GENERATE_RESP;\n\n nonce, \\\n key_handle, \\\n self.status, \\\n num_bytes = struct.unpack_from(\"< %is I B B\" % (pyhsm.defines.YSM_AEAD_NONCE_SIZE), data, 0)\n\n pyhsm.util.validate_cmd_response_hex('key_handle', key_handle, self.key_handle)\n\n if self.status == pyhsm.defines.YSM_STATUS_OK:\n pyhsm.util.validate_cmd_response_nonce(nonce, self.nonce)\n offset = pyhsm.defines.YSM_AEAD_NONCE_SIZE + 6\n aead = data[offset:offset + num_bytes]\n self.response = YHSM_GeneratedAEAD(nonce, key_handle, aead)\n return self.response\n else:\n raise pyhsm.exception.YHSM_CommandFailed(pyhsm.defines.cmd2str(self.command), self.status)", "def handle(self):\r\n # self.request is the TCP socket connected to the client\r\n # read the incoming command\r\n request = self.request.recv(1024).strip()\r\n # write to the queue waiting to be processed by the server\r\n INPUT_QUEUE.put(request)\r\n # wait for the server answer in the output queue\r\n response = OUTPUT_QUEUE.get(timeout=5.0)\r\n # send back the answer\r\n self.request.send(response)", "def read_and_response(self, vsr, address_h, address_l):\n # time.sleep(0.2)\n self.send_cmd([vsr, 0x41, address_h, address_l])\n # time.sleep(0.2)\n resp = self.read_response() # ie resp = [42, 144, 48, 49, 13]\n reply = resp[2:-1] # Omit start char, vsr address and end char\n reply = \"{}\".format(''.join([chr(x) for x in reply])) # Turn list of integers into ASCII string\n # print(\" RR. reply: {} (resp: {})\".format(reply, resp)) # ie reply = '01'\n return resp, reply", "def dataReceived(self,data):\n if DEBUG: print \"class CommandProtocol, function dataReceived\"\n if data[6:12] == \"status\":\n print self.server.xstatus()\n self.transport.write(\"<XML>\"+self.server.xstatus()+\"</XML>\")\n self.transport.loseConnection()\n return\n if DEBUG and len(data) < 10000: print \"data:\", data\n # on receipt of the first fragment determine message length, extract header info\n # NOTE: this can only handle header lengths smaller than the fragment size - \n # the header MUST arrive in the first fragment\n # append the new data \n self.alldata += data\n if u\"?console\" in data: self.provide_console()\n #requests = 0 #For use with priorities\n if not hasattr(self,'mlength'):\n # attempt to extract the header info with the current message subset\n try: \n self.dataHTTP = HTTPRequest(self.alldata)\n self.boundary = self.dataHTTP.headers['content-type'].split('boundary=')[-1]\n fb = data.find('--' + self.boundary) # find the first used boundary string\n if fb == -1:\n return # if there is none, the header must not be complete\n # if there is a boundary, header must be complete; get header data\n self.mlength = fb + int(self.dataHTTP.headers.dict['content-length'])\n headerItemsforCommand = ['host','origin','referer']\n self.request = {k: self.dataHTTP.headers[k] for k in headerItemsforCommand if k in self.dataHTTP.headers}\n self.request.update({'ctime':self.ctime,'protocol':self})\n # record where this request is coming from\n self.factory.connection_manager.elaborateLog(self,self.request)\n except: return # if unsuccessful, wait for next packet and try again\n \n # if we made it to here, the header has been received\n # if the entirety of message not yet received, append this fragment and continue\n if self.mlength > len(self.alldata):\n return\n # if we have made it here, this is last fragment of message \n # mark the 'all data received' time\n self.request.update({'timereceived':time.time()})\n # strip multipart data from incoming HTTP request\n kv = [datas.split('name=\"')[-1].split('\"\\n\\r\\n\\r') for datas in self.alldata.split('--'+self.boundary+'--')]\n self.params = {k:v.rstrip() for k,v in kv[:-1]}\n # insert request, if valid, into command queue (persistently resides in self.Factory) \n #pdb.set_trace()\n #SC=SocketCommand(self.params,self.request)\n SC=commands.SocketCommand(self.params,self.request, self.server.command_library)#CP 2014-10-28\n try:\n self.factory.connection_manager.server.command_queue.add(SC)\n #self.factory.commandQueue.add(SC)\n except AttributeError:\n if DEBUG: print 'Failed to insert SocketCommand in Queue, No Queue'\n raise\n #self.factory.commandQueue=CommandQueue(SC)\n except:\n if DEBUG: print \"Error No command included in request\", SC\n msg = {'Not_Command_text_message':'Failed to insert SocketCommand in Queue, reason unknown','terminator':'die'}\n self.transport.write(simplejson.dumps(msg, ensure_ascii = False).encode('utf8'))\n if DEBUG: print 'Failed to insert SocketCommand in Queue, reason unknown'\n self.transport.loseConnection()\n raise", "def process_rsvp():\n\n pass\n # needs to process rsvp", "def handle_request(self, query, request):\r\n request_pdu = None\r\n response_pdu = \"\"\r\n slave_id = None\r\n function_code = None\r\n func_code = None\r\n slave = None\r\n response = None\r\n\r\n try:\r\n # extract the pdu and the slave id\r\n slave_id, request_pdu = query.parse_request(request)\r\n if len(request_pdu) > 0:\r\n (func_code, ) = struct.unpack(\">B\", request_pdu[0])\r\n # 43 is Device Information\r\n if func_code == 43:\r\n # except will throw MissingKeyError\r\n slave = self.get_slave(slave_id)\r\n response_pdu = slave.handle_request(request_pdu)\r\n # make the full response\r\n response = query.build_response(response_pdu)\r\n # get the slave and let him execute the action\r\n elif slave_id == 0:\r\n # broadcast\r\n for key in self._slaves:\r\n response_pdu = self._slaves[key].handle_request(request_pdu, broadcast=True)\r\n response = query.build_response(response_pdu)\r\n elif slave_id == 255:\r\n r = struct.pack(\">BB\", func_code + 0x80, 0x0B)\r\n response = query.build_response(r)\r\n else:\r\n slave = self.get_slave(slave_id)\r\n response_pdu = slave.handle_request(request_pdu)\r\n # make the full response\r\n response = query.build_response(response_pdu)\r\n except (IOError, MissingKeyError) as e:\r\n # If the request was not handled correctly, return a server error response\r\n r = struct.pack(\">BB\", func_code + 0x80, defines.SLAVE_DEVICE_FAILURE)\r\n response = query.build_response(r)\r\n\r\n if slave:\r\n function_code = slave.function_code\r\n\r\n return (response, {'request': request_pdu.encode('hex'),\r\n 'slave_id': slave_id,\r\n 'function_code': function_code,\r\n 'response': response_pdu.encode('hex')})", "def hmVerifyMsgCRCOK(destination, protocol, source, expectedFunction, expectedLength, datal) :\r\n badresponse = 0\r\n if protocol == constants.HMV3_ID:\r\n checksum = datal[len(datal)-2:]\r\n rxmsg = datal[:len(datal)-2]\r\n crc = crc16() # Initialises the CRC\r\n expectedchecksum = crc.run(rxmsg)\r\n if expectedchecksum == checksum:\r\n print(\"CRC is correct\")\r\n else:\r\n print(\"CRC is INCORRECT\")\r\n s = \"Incorrect CRC: %s Expected: %s \\n\" % (datal, expectedchecksum)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n # Check the response\r\n dest_addr = datal[0]\r\n frame_len_l = datal[1]\r\n frame_len_h = datal[2]\r\n frame_len = (frame_len_h << 8) | frame_len_l\r\n source_addr = datal[3]\r\n func_code = datal[4]\r\n\r\n\r\n\r\n if (dest_addr != 129 and dest_addr != 160):\r\n print(\"dest_addr is ILLEGAL\")\r\n s = \"%s : Controller %s : Illegal Dest Addr: %s\\n\" % (localtime, loop, dest_addr)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (dest_addr != destination):\r\n print(\"dest_addr is INCORRECT\")\r\n s = \"%s : Controller %s : Incorrect Dest Addr: %s\\n\" % (localtime, loop, dest_addr)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (source_addr < 1 or source_addr > 32):\r\n print(\"source_addr is ILLEGAL\")\r\n s = \"%s : Controller %s : Illegal Src Addr: %s\\n\" % (localtime, loop, source_addr)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (source_addr != source):\r\n print(\"source addr is INCORRECT\")\r\n s = \"%s : Controller %s : Incorrect Src Addr: %s\\n\" % (localtime, loop, source_addr)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (func_code != constants.FUNC_WRITE and func_code != constants.FUNC_READ):\r\n print(\"Func Code is UNKNWON\")\r\n s = \"%s : Controller %s : Unknown Func Code: %s\\n\" % (localtime, loop, func_code)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (func_code != expectedFunction):\r\n print(\"Func Code is UNEXPECTED\")\r\n s = \"%s : Controller %s : Unexpected Func Code: %s\\n\" % (localtime, loop, func_code)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (func_code == constants.FUNC_WRITE and frame_len != 7):\r\n # Reply to Write is always 7 long\r\n print(\"response length is INCORRECT\")\r\n s = \"%s : Controller %s : Incorrect length: %s\\n\" % (localtime, loop, frame_len)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n if (len(datal) != frame_len):\r\n print(\"response length MISMATCHES header\")\r\n s = \"%s : Controller %s : Mismatch length: %s %s\\n\" % (localtime, loop, len(datal), frame_len)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\r\n \"\"\"if (func_code == constants.FUNC_READ and expectedLength !=len(datal) ):\r\n # Read response length is wrong\r\n print(\"response length not EXPECTED value\")\r\n print(len(datal))\r\n print(datal)\r\n s = \"%s : Controller %s : Incorrect length: %s\\n\" % (localtime, loop, frame_len)\r\n sys.stderr.write(s)\r\n badresponse += 1\r\n\"\"\"\r\n if (badresponse == 0):\r\n return True\r\n else:\r\n return False\r\n\r\n else:\r\n assert 0, \"Un-supported protocol found %s\" % protocol", "def test_rsp_invalid(self):\n\n def handle(event):\n return 0x0000, Dataset()\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStepNotification)\n scp = ae.start_server(\n (\"localhost\", 11112),\n block=False,\n evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)],\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStepNotification)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyResponse:\n is_valid_response = False\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return None, DummyResponse()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_event_report(\n ds,\n 1,\n ModalityPerformedProcedureStepNotification,\n \"1.2.840.10008.5.1.1.40.1\",\n )\n\n assert status == Dataset()\n assert ds is None\n assert assoc.is_aborted\n\n scp.shutdown()", "def _process(connection, process):\n try:\n command = connection.recv()\n except IOError as e:\n return \"Connection receive error: %s\" %(str(e))\n\n if command == __quit_command:\n try:\n connection.send(\"Exited server.\")\n finally:\n connection.close()\n return __quit_command\n\n #print \"Processing command\", command\n data = process(command)\n\n try:\n connection.send(data)\n except IOError as e:\n return \"Connection send error: %s\" %(str(e))\n\n connection.close()", "def process(self):\n\n try:\n self._read_buffer += self._socket.recv(4096)\n except socket.error as exc:\n if exc.errno not in [errno.EAGAIN,\n errno.EWOULDBLOCK,\n errno.WSAEWOULDBLOCK]:\n raise\n response, self._read_buffer = Message.decode(self._read_buffer)\n # Check if terminating RESPONSE_VALUE with body 00 01 00 00\n if (response.type == Message.SERVERDATA_RESPONSE_VALUE and\n response.body.encode(\"ascii\") == \"\\x00\\x01\\x00\\x00\"):\n response = Message(self._response[0].id,\n self._response[0].type,\n \"\".join([r.body for r in self._response]))\n self._active_requests[response.id].response = response\n self._response = []\n self._active_requests[response.id]\n elif response.type == Message.SERVERDATA_RESPONSE_VALUE:\n self._response.append(response)\n elif response.type == Message.SERVERDATA_AUTH_RESPONSE:\n self._active_requests[self._response[0].id].response = response\n # Clear empty SERVERDATA_RESPONSE_VALUE sent before\n # SERVERDATA_AUTH_RESPONSE\n self._response = []\n self._active_requests[response.id]", "def test_rsp_bad_dataset(self):\n\n def handle(event):\n def test():\n pass\n\n return 0x0000, test\n\n self.ae = ae = AE()\n ae.add_requested_context(ModalityPerformedProcedureStepRetrieve)\n ae.add_supported_context(ModalityPerformedProcedureStepRetrieve)\n\n handlers = [(evt.EVT_N_GET, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n assoc = ae.associate(\"localhost\", 11112)\n\n assert assoc.is_established\n\n # Event Information\n attrs = [0x00100010, 0x00100020]\n status, ds = assoc.send_n_get(\n attrs, ModalityPerformedProcedureStepRetrieve, \"1.2.840.10008.5.1.1.40.1\"\n )\n\n assert status.Status == 0x0110\n assert ds is None\n\n assoc.release()\n scp.shutdown()", "def test_rsp_success(self):\n\n def handle(event):\n return 0x0000, event.attribute_list\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n\n handlers = [(evt.EVT_N_CREATE, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_create(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0000\n assert ds.PatientName == \"Test^test\"\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def _process_ue_ho_response(self, job: HandOverResponse, overhead: str):\n ue_id = job.ue_id\n prev_ap_id = job.ap_from\n new_ap_id = job.ap_to\n response = job.response\n logging.info(overhead + '%s--->%s: handover to %s response: %s' % (ue_id, self.ap_id, new_ap_id, response))\n assert prev_ap_id == self.ap_id\n assert self.ue_path.pop(ue_id) == self.UE_HANDOVER_TO\n assert self.ue_to_ho_to.pop(ue_id) == new_ap_id\n if not response:\n self.ue_path[ue_id] = self.UE_CONNECTED\n else:\n self._send_connected_ue_list()", "def test_rsp_failure(self):\n\n def handle(event):\n return 0x0112, None\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStepNotification)\n scp = ae.start_server(\n (\"localhost\", 11112),\n block=False,\n evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)],\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStepNotification)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_event_report(\n ds,\n 1,\n ModalityPerformedProcedureStepNotification,\n \"1.2.840.10008.5.1.1.40.1\",\n )\n assert status.Status == 0x0112\n assert ds is None\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def read_response():\n global rs485\n # Response in 7 bytes\n buf = bytearray( 7 )\n rs485.read( buf )\n #print( 'Buffer: ', hex(buf[0]), hex(buf[1]), hex(buf[2]), hex(buf[3]), hex(buf[4]), hex(buf[5]), hex(buf[6]) )\n\n # Decode the response\n # 0 & 1 are the slave addr + function code\n if not( (buf[0]==0x02) and (buf[1]==0x03) ):\n raise Exception( 'Invalid Slave/function' )\n if buf[2] != 0x02:\n raise Exception( 'Invalid response length' )\n # bytes 3 & 4 are the data. With value from 0 to 15, we do only need the\n # lower byte value (higher byte will always be 0)\n\n # print the direction label\n label = dir_as_text( buf[4] )\n print( 'Direction:', label )\n\n # bytes 5 & 6 are CRC (not checked here)", "def test_decode_failure(self):\n\n def handle(event):\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n return 0x0000, ds\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 0.4\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)]\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStep, ExplicitVRLittleEndian)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyReply:\n def getvalue(self):\n def test():\n pass\n\n return test\n\n class DummyMessage:\n is_valid_response = True\n AttributeList = DummyReply()\n Status = 0x0000\n STATUS_OPTIONAL_KEYWORDS = []\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return 1, DummyMessage()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n assert assoc.is_established\n mod_list = Dataset()\n mod_list.PatientName = \"Test^test\"\n status, ds = assoc.send_n_set(\n mod_list, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n\n assert status.Status == 0x0110\n assert ds is None\n\n scp.shutdown()", "def test_rsp_invalid(self):\n\n def handle(event):\n return 0x0000, Dataset()\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n\n handlers = [(evt.EVT_N_CREATE, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyResponse:\n is_valid_response = False\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return None, DummyResponse()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_create(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status == Dataset()\n assert ds is None\n assert assoc.is_aborted\n\n scp.shutdown()", "def receiveCallback(self, socket, stateMask):\n # read the PDU header\n pduHeader = self.recv(EGSE.EDENPDU.PDU_HEADER_BYTE_SIZE)\n if pduHeader == None:\n # failure handling was done automatically by derived logic\n return\n # consistency check\n pduHeaderLen = len(pduHeader)\n if pduHeaderLen != EGSE.EDENPDU.PDU_HEADER_BYTE_SIZE:\n LOG_ERROR(\"Read of PDU header failed: invalid size: \" + str(pduHeaderLen), \"EDEN\")\n self.disconnectFromServer()\n return\n pdu = EGSE.EDENPDU.PDU(pduHeader)\n # read the data field for the PDU\n dataFieldLength = pdu.dataFieldLength\n if dataFieldLength > 0:\n dataField = self.recv(dataFieldLength)\n if dataField == None:\n # failure handling was done automatically by derived logic\n return\n # consistency check\n remainingSizeRead = len(dataField)\n if remainingSizeRead != dataFieldLength:\n LOG_ERROR(\"Read of remaining PDU failed: invalid remaining size: \" + str(remainingSizeRead), \"EDEN\")\n self.disconnectFromServer()\n return\n pdu.setDataField(dataField)\n # dispatch depending on pduType and subType\n try:\n if pdu.pduType == EGSE.EDENPDU.PDU_TYPE_TC_A:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_SPACE:\n # (TC_A,SPACE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TC_A,SPACE)\", \"EDEN\")\n self.notifyTc_aSpace(pdu.field2, pdu.field3)\n elif pdu.subType == EGSE.EDENPDU.SUB_TYPE_SCOE:\n # (TC_A,SCOE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TC_A,SCOE)\", \"EDEN\")\n self.notifyTc_aScoe(pdu.field2, pdu.field3)\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType), \"EDEN\")\n LOG(\"PDU = \" + str(pdu), \"EDEN\")\n self.disconnectFromServer()\n elif pdu.pduType == EGSE.EDENPDU.PDU_TYPE_TC_E:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_SPACE:\n # (TC_E,SPACE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TC_E,SPACE)\", \"EDEN\")\n tc_eSpacePDU = EGSE.EDENPDU.TC_Espace(pdu.buffer)\n self.notifyTc_eSpace(tc_eSpacePDU.getCCSDSpacket())\n elif pdu.subType == EGSE.EDENPDU.SUB_TYPE_SCOE:\n # (TC_E,SCOE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TC_E,SCOE)\", \"EDEN\")\n tc_eScoePDU = EGSE.EDENPDU.TC_Escoe(pdu.buffer)\n self.notifyTc_eScoe(tc_eScoePDU.getCCSDSpacket())\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType), \"EDEN\")\n LOG(\"PDU = \" + str(pdu))\n self.disconnectFromServer()\n elif pdu.pduType == EGSE.EDENPDU.PDU_TYPE_TM:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_SPACE:\n # (TM,SPACE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TM,SPACE)\", \"EDEN\")\n tmSpacePDU = EGSE.EDENPDU.TMspace(pdu.buffer)\n self.notifyTmSpace(tmSpacePDU.getCCSDSpacket())\n elif pdu.subType == EGSE.EDENPDU.SUB_TYPE_SCOE:\n # (TM,SCOE)\n LOG_INFO(\"EDEN.Client.receiveCallback(TM,SCOE)\", \"EDEN\")\n tmScoePDU = EGSE.EDENPDU.TMscoe(pdu.buffer)\n self.notifyTmScoe(tmScoePDU.getCCSDSpacket())\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType), \"EDEN\")\n LOG(\"PDU = \" + str(pdu), \"EDEN\")\n self.disconnectFromServer()\n elif pdu.pduType == EGSE.EDENPDU.PDU_TYPE_CMD:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_ANSW:\n # (CMD,ANSW)\n LOG_INFO(\"EDEN.Client.receiveCallback(CMD,ANSW)\", \"EDEN\")\n self.notifyCmdAnsw(pdu.getDataField().tostring())\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType), \"EDEN\")\n LOG(\"PDU = \" + str(pdu), \"EDEN\")\n self.disconnectFromServer()\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid pduType: \" + str(pdu.pduType), \"EDEN\")\n LOG(\"PDU = \" + str(pdu), \"EDEN\")\n self.disconnectFromServer()\n except Exception as ex:\n LOG_ERROR(\"Processing of received PDU failed: \" + str(ex), \"EDEN\")\n self.disconnectFromServer()", "def handle(self):\n for request in self._each_msg():\n r_len, r_type = struct.unpack_from('> I B', request)\n\n if r_type == self.SSH2_AGENTC_REQUEST_IDENTITIES:\n response = self._merge_identities(request)\n elif r_type == self.SSH2_AGENTC_SIGN_REQUEST:\n # Extract key blob from request\n key_blob_len = struct.unpack_from('> I', request, 5)[0]\n key_blob = request[9:9 + key_blob_len]\n hex_blob = ''.join('{:02x}'.format(b) for b in key_blob)\n\n agent = self._identity_map[hex_blob]\n\n if agent:\n if agent == self.server.alternate_agent:\n key_digest = self._key_digest(key_blob)\n LOG.info(\"identity %s used by %s: %s\", key_digest,\n self.username, self.process_info)\n\n response = agent.forward_request(request)\n else:\n response = \\\n self.server.default_agent.forward_request(request)\n else:\n response = self.server.default_agent.forward_request(request)\n\n self.request.sendall(response)", "def handle(self) -> None:\r\n\r\n if self.data.get(\"message-id\") != None:\r\n if self.data[\"status\"] == \"error\":\r\n print(self.data[\"error\"])\r\n return\r\n else:\r\n requestData = self.obs.pendingResponses.pop(self.data[\"message-id\"])\r\n request = requestData[\"request-type\"]\r\n #Requests as of version 4.8.0\r\n\r\n #General\r\n if request == \"GetVersion\":\r\n pass\r\n\r\n elif request == \"GetAuthRequired\":\r\n if self.data[\"authRequired\"]:\r\n secret_string: str = self.obs.password + self.data[\"salt\"]\r\n secret_hash: sha256 = sha256(secret_string.encode(\"utf-8\"))\r\n secret: bytes = b64encode(secret_hash.digest())\r\n\r\n response_string: str = secret.decode(\"utf-8\") + self.data[\"challenge\"]\r\n response_hash: sha256 = sha256(response_string.encode(\"utf-8\"))\r\n response: bytes = b64encode(response_hash.digest())\r\n\r\n self.obs.requests.append({\r\n \"type\": \"Authenticate\",\r\n \"auth\": response.decode(\"utf-8\")})\r\n\r\n else:\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"Authenticate\":\r\n self.obs.requests.append({\"type\": \"GetSceneList\"})\r\n\r\n elif request == \"SetHeartbeat\":\r\n #To be removed in 5.0.0\r\n pass\r\n\r\n elif request == \"SetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetFilenameFormatting\":\r\n pass\r\n\r\n elif request == \"GetStats\":\r\n pass\r\n\r\n elif request == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n elif request == \"GetVideoInfo\":\r\n pass\r\n\r\n elif request == \"OpenProjector\":\r\n pass\r\n\r\n elif request == \"TriggerHotkeyByName\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"TriggerHotkeyBySequence\":\r\n #Unreleased\r\n pass\r\n\r\n #Media Control\r\n elif request == \"PlayPauseMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"RestartMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StopMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"NextMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"PreviousMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaDuration\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"SetMediaTime\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"ScrubMedia\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetMediaState\":\r\n #Unreleased\r\n pass\r\n\r\n #Sources\r\n\r\n elif request == \"GetMediaSourcesList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSourcesList\":\r\n pass\r\n\r\n elif request == \"GetSourceTypesList\":\r\n pass\r\n\r\n elif request == \"GetVolume\":\r\n pass\r\n\r\n elif request == \"SetVolume\":\r\n pass\r\n\r\n elif request == \"GetMute\":\r\n pass\r\n\r\n elif request == \"SetMute\":\r\n pass\r\n\r\n elif request == \"ToggleMute\":\r\n pass\r\n\r\n elif request == \"GetAudioActive\":\r\n pass\r\n\r\n elif request == \"SetSourceName\":\r\n pass\r\n\r\n elif request == \"SetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSyncOffset\":\r\n pass\r\n\r\n elif request == \"GetSourceSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceSettings\":\r\n pass\r\n\r\n elif request == \"GetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"SetTextGDIPlusProperties\":\r\n pass\r\n\r\n elif request == \"GetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"SetTextFreetype2Properties\":\r\n pass\r\n\r\n elif request == \"GetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"SetBrowserSourceProperties\":\r\n pass\r\n\r\n elif request == \"GetSpecialSources\":\r\n pass\r\n\r\n elif request == \"GetSourceFilters\":\r\n source = self.obs.getSource(requestData[\"sourceName\"])\r\n if source != None:\r\n for _filter in self.data[\"filters\"]:\r\n source.addFilter(_filter) #type: ignore\r\n\r\n elif request == \"GetSourceFilterInfo\":\r\n pass\r\n\r\n elif request == \"AddFilterToSource\":\r\n pass\r\n\r\n elif request == \"RemoveFilterFromSource\":\r\n pass\r\n\r\n elif request == \"ReorderSourceFilter\":\r\n pass\r\n\r\n elif request == \"MoveSourceFilter\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterSettings\":\r\n pass\r\n\r\n elif request == \"SetSourceFilterVisibility\":\r\n pass\r\n \r\n elif request == \"GetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"SetAudioMonitorType\":\r\n pass\r\n\r\n elif request == \"TakeSourceScreenshot\":\r\n pass\r\n\r\n #Outpute\r\n elif request == \"ListOutputs\":\r\n pass\r\n\r\n elif request == \"GetOutputInfo\":\r\n pass\r\n\r\n elif request == \"StartOutput\":\r\n pass\r\n\r\n elif request == \"StopOutput\":\r\n pass\r\n\r\n #Profiles\r\n elif request == \"SetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"GetCurrentProfile\":\r\n pass\r\n\r\n elif request == \"ListProfiles\":\r\n pass\r\n\r\n #Recording\r\n elif request == \"GetRecordingStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopRecording\":\r\n pass\r\n\r\n elif request == \"StartRecording\":\r\n pass\r\n\r\n elif request == \"StopRecording\":\r\n pass\r\n\r\n elif request == \"PauseRecording\":\r\n pass\r\n\r\n elif request == \"ResumeRecording\":\r\n pass\r\n\r\n elif request == \"SetRecordingFolder\":\r\n pass\r\n\r\n elif request == \"GetRecordingFolder\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif request == \"GetReplayBufferStatus\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"StartStopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StartReplayBuffer\":\r\n pass\r\n\r\n elif request == \"StopReplayBuffer\":\r\n pass\r\n\r\n elif request == \"SaveReplayBuffer\":\r\n pass\r\n\r\n #Scene Collections\r\n elif request == \"SetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"GetCurrentSceneCollection\":\r\n pass\r\n\r\n elif request == \"ListSceneCollections\":\r\n pass\r\n\r\n #Scene Items\r\n elif request == \"GetSceneItemList\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"GetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"SetSceneItemProperties\":\r\n pass\r\n\r\n elif request == \"ResetSceneItem\":\r\n pass\r\n\r\n elif request == \"SetSceneItemRender\":\r\n pass\r\n\r\n elif request == \"SetSceneItemPosition\":\r\n pass\r\n\r\n elif request == \"SetSceneItemTransform\":\r\n pass\r\n\r\n elif request == \"SetSceneItemCrop\":\r\n pass\r\n\r\n elif request == \"DeleteSceneItem\":\r\n pass\r\n\r\n elif request == \"AddSceneItem\":\r\n #Unreleased\r\n pass\r\n\r\n elif request == \"DuplicateSceneItem\":\r\n pass\r\n\r\n #Scenes\r\n elif request == \"SetCurrentScene\":\r\n pass\r\n\r\n elif request == \"GetCurrentScene\":\r\n self.obs.setCurrentScene(self.data[\"name\"])\r\n\r\n elif request == \"GetSceneList\":\r\n for scene in self.data[\"scenes\"]:\r\n self.obs.addScene(scene)\r\n self.obs.setCurrentScene(self.data[\"current-scene\"])\r\n\r\n elif request == \"CreateScene\":\r\n pass\r\n\r\n elif request == \"ReorderSceneItems\":\r\n pass\r\n\r\n elif request == \"SetSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"RemoveSceneTransitionOverride\":\r\n pass\r\n\r\n elif request == \"GetSceneTransitionOverride\":\r\n pass\r\n\r\n #Streaming\r\n elif request == \"GetStreamingStatus\":\r\n pass\r\n\r\n elif request == \"StartStopStreaming\":\r\n pass\r\n\r\n elif request == \"StartStreaming\":\r\n pass\r\n\r\n elif request == \"StopStreaming\":\r\n pass\r\n\r\n elif request == \"SetStreamSettings\":\r\n pass\r\n\r\n elif request == \"GetStreamSettings\":\r\n pass\r\n\r\n elif request == \"SaveStreamSettings\":\r\n pass\r\n\r\n elif request == \"SendCaptions\":\r\n pass\r\n\r\n #Studio Mode\r\n elif request == \"GetStudioModeStatus\":\r\n pass\r\n\r\n elif request == \"GetPreviewScene\":\r\n pass\r\n\r\n elif request == \"SetPreviewScene\":\r\n pass\r\n\r\n elif request == \"TransitionToProgram\":\r\n pass\r\n\r\n elif request == \"EnableStudioMode\":\r\n pass\r\n\r\n elif request == \"DisableStudioMode\":\r\n pass\r\n\r\n elif request == \"ToggleStudioMode\":\r\n pass\r\n\r\n #Transitions\r\n elif request == \"GetTransitionList\":\r\n pass\r\n\r\n elif request == \"GetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetCurrentTransition\":\r\n pass\r\n\r\n elif request == \"SetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionDuration\":\r\n pass\r\n\r\n elif request == \"GetTransitionPosition\":\r\n pass\r\n\r\n else:\r\n print(f\"Unhandled response of type {request} and data {self.data}.\")\r\n\r\n \r\n\r\n else:\r\n event: str = self.data[\"update-type\"]\r\n #Events as of 4.8.0\r\n\r\n #Scenes\r\n if event == \"SwitchScenes\":\r\n self.obs.setCurrentScene(self.data[\"scene-name\"])\r\n\r\n elif event == \"ScenesChanged\":\r\n #self.obs.purgeScenes()\r\n pass\r\n\r\n elif event == \"SceneCollectionChanged\":\r\n pass\r\n\r\n elif event == \"SceneCollectionListChanged\":\r\n pass\r\n\r\n #Transitions\r\n elif event == \"SwitchTransition\":\r\n pass\r\n\r\n elif event == \"TransitionListChanged\":\r\n pass\r\n\r\n elif event == \"TransitionDurationChanged\":\r\n pass\r\n\r\n elif event == \"TransitionBegin\":\r\n pass\r\n\r\n elif event == \"TransitionEnd\":\r\n pass\r\n\r\n elif event == \"TransitionVideoEnd\":\r\n pass\r\n\r\n #Profiles\r\n elif event == \"ProfileChanged\":\r\n pass\r\n\r\n elif event == \"ProfileListChanged\":\r\n pass\r\n\r\n #Streaming\r\n elif event == \"StreamStarting\":\r\n pass\r\n\r\n elif event == \"StreamStarted\":\r\n pass\r\n\r\n elif event == \"StreamStopping\":\r\n pass\r\n\r\n elif event == \"StreamStopped\":\r\n pass\r\n\r\n elif event == \"StreamStatus\":\r\n pass\r\n\r\n #Recording\r\n elif event == \"RecordingStarting\":\r\n pass\r\n\r\n elif event == \"RecordingStarted\":\r\n pass\r\n\r\n elif event == \"RecordingStopping\":\r\n pass\r\n\r\n elif event == \"RecordingStopped\":\r\n pass\r\n\r\n elif event == \"RecordingPaused\":\r\n pass\r\n\r\n elif event == \"RecordingResumed\":\r\n pass\r\n\r\n #Replay Buffer\r\n elif event == \"ReplayStarting\":\r\n pass\r\n\r\n elif event == \"ReplayStarted\":\r\n pass\r\n\r\n elif event == \"ReplayStopping\":\r\n pass\r\n\r\n elif event == \"ReplayStopped\":\r\n pass\r\n\r\n #Other\r\n elif event == \"Exiting\":\r\n pass\r\n\r\n #General\r\n elif event == \"Heartbeat\":\r\n pass\r\n\r\n elif event == \"BroadcastCustomMessage\":\r\n pass\r\n\r\n #Sources\r\n elif event == \"SourceCreated\":\r\n pass\r\n\r\n elif event == \"SourceDestroyed\":\r\n pass\r\n\r\n elif event == \"SourceVolumeChanged\":\r\n pass\r\n\r\n elif event == \"SourceMuteStateChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioDeactivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioActivated\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"SourceAudioSyncOffsetChanged\":\r\n pass\r\n\r\n elif event == \"SourceAudioMixersChanged\":\r\n pass\r\n\r\n elif event == \"SourceRenamed\":\r\n pass\r\n\r\n elif event == \"SourceFilterAdded\":\r\n pass\r\n\r\n elif event == \"SourceFilterRemoved\":\r\n pass\r\n\r\n elif event == \"SourceFilterVisibilityChanged\":\r\n source = self.obs.getSource(self.data[\"sourceName\"])\r\n if source != None:\r\n _filter = source.getFilter(self.data[\"filterName\"]) #type: ignore\r\n if _filter != None:\r\n _filter.setVisible(self.data[\"filterEnabled\"]) #type: ignore\r\n\r\n elif event == \"SourceFiltersReordered\":\r\n pass\r\n\r\n #Media\r\n elif event == \"MediaPlaying\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPaused\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaRestarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStopped\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaNext\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaPrevious\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaStarted\":\r\n #Unreleased\r\n pass\r\n\r\n elif event == \"MediaEnded\":\r\n #Unreleased\r\n pass\r\n\r\n #Scene Items\r\n elif event == \"SceneItemOrderChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemAdded\":\r\n pass\r\n\r\n elif event == \"SceneItemRemoved\":\r\n pass\r\n\r\n elif event == \"SceneItemVisibilityChanged\":\r\n scene = self.obs.getScene(self.data[\"scene-name\"])\r\n if scene != None:\r\n source = scene.getSource(self.data[\"item-name\"]) #type: ignore\r\n if source != None:\r\n source.setVisible(self.data[\"item-visible\"]) #type: ignore\r\n \r\n\r\n elif event == \"SceneItemLockChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemTransformChanged\":\r\n pass\r\n\r\n elif event == \"SceneItemSelected\":\r\n pass\r\n\r\n elif event == \"SceneItemDeselected\":\r\n pass\r\n\r\n #Studio Mode\r\n elif event == \"PreviewSceneChanged\":\r\n pass\r\n\r\n elif event == \"StudioModeSwitched\":\r\n pass\r\n\r\n #Unhandled Events\r\n else:\r\n print(\"Unhandled event with data: \" + str(self.data))", "def _handleRequestEvseId(self, data):\r\n print(\"\\\"Request EVSE ID\\\" received\")\r\n message = self.whitebeet.v2gParseRequestEvseId(data)\r\n if message['format'] == 0:\r\n print(\"No EVSE ID available\")\r\n try:\r\n self.whitebeet.v2gSetEvseId(None)\r\n except Warning as e:\r\n print(\"Warning: {}\".format(e))\r\n except ConnectionError as e:\r\n print(\"ConnectionError: {}\".format(e))\r\n else:\r\n evseid = \"DE*ABC*E*00001*01\"\r\n print(\"Set EVSE ID: {}\".format(evseid))\r\n try:\r\n self.whitebeet.v2gSetEvseId(evseid)\r\n except Warning as e:\r\n print(\"Warning: {}\".format(e))\r\n except ConnectionError as e:\r\n print(\"ConnectionError: {}\".format(e))", "def handle(self):\n\t\ttry:\n\t\t\trequest_line = self.rfile.readline().decode(\"ascii\")\n\t\t\tassert request_line.endswith(\"\\r\\n\"), \"Request line must end in CRLF\"\n\t\t\tparts = request_line.strip().split()\n\t\t\tassert len(parts)==3, \"Invalid request line\"\n\t\t\thost, path, content_length = parts\n\t\t\tif (content_length:=int(content_length))>0:\n\t\t\t\tdata = self.rfile.read(content_length)\n\t\t\telse:\n\t\t\t\tdata = b''\n\t\t\tself.handle_request(host,path,data)\n\t\texcept AssertionError as e:\n\t\t\tself.response_code(4,e.args[0])", "def test_decode_failure(self):\n\n def handle(event):\n return 0x0000, event.attribute_list\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n\n handlers = [(evt.EVT_N_CREATE, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyReply:\n def getvalue(self):\n def test():\n pass\n\n return test\n\n class DummyMessage:\n is_valid_response = True\n is_valid_request = False\n AttributeList = DummyReply()\n Status = 0x0000\n STATUS_OPTIONAL_KEYWORDS = []\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return 1, DummyMessage()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_create(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0110\n assert ds is None\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def handle(self):\n try:\n # Wait for data\n data = json.loads(self.request.recv(1024).decode('UTF-8').strip())\n\n # Process data\n self.process_data(data)\n\n except Exception as e:\n print(\"Exception wile receiving message: \", e)\n self.request.sendall(\n bytes(json.dumps({'return': 'error'}), 'UTF-8'))", "def GetServerResponse( self, b_raise = True ):\r\n\r\n code = self._socket.read(1)\r\n\r\n\r\n if code == 'Z':\r\n\r\n return True\r\n\r\n elif code == 'F' : # an 'F' <error code> sequence \r\n\r\n error_info_length = self._fmt.format_length( code ) \r\n error_info = self._socket.read( error_info_length )\r\n\r\n if b_raise :\r\n\r\n err_msg = \"server returned an error : \" + repr( self._fmt.unpack(code, error_info) ) \r\n raise Eggog( err_msg ) \r\n \r\n else : \r\n return False \r\n \r\n elif code == 'I' : # a version byte should follow \r\n\r\n version_length = self._fmt.format_length( code ) \r\n version_info = self._socket.read( version_length )\r\n version = self._fmt.unpack( code, version_info ) \r\n\r\n ## # debug\r\n ## print version\r\n\r\n self._egi_protocol_version = version \r\n self._egi_protocol_version = version[0] \r\n\r\n return self._egi_protocol_version # just a bit more informative than 'None' \r\n \r\n else : # something completely unexpected \r\n\r\n if b_raise :\r\n \r\n raise Eggog( \"unexpected character code returned from server: '%s'\" % (code, ) ) \r\n\r\n else :\r\n\r\n return False", "def test_rsp_failure(self):\n\n def handle(event):\n return 0x0112, None\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)]\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_set(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0112\n assert ds is None\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def test_rsp_success(self):\n\n def handle(event):\n return 0x0000, event.modification_list\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)]\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_set(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0000\n assert ds.PatientName == \"Test^test\"\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def process_AResponse(self) :\n while (1):\n str = self.recv(self.sock)\n if (len(str) > 0):\n response = amazon_pb2.AResponses()\n response.ParseFromString(str)\n print(response)\n # handle import new stock\n for arrive in response.arrived:\n things = arrive.things\n for thing in things:\n products = Whstock.objects.filter(pid = thing.id)\n if len(products) != 0:\n products[0].count = products[0].count + thing.count\n products[0].save()\n else :\n #need to specify world id\n whstock = Whstock()\n whstock.hid = arrive.whnum\n whstock.pid = thing.id\n whstock.dsc = thing.description\n whstock.count = thing.count\n whstock.save()\n # handle pack ready response\n #when ready send AU command to let UPS truck pickup,\n #use another thread for wait for UPS response\n #when receive response send ALoad command\n #when reveived loaded for Sim send AU command and let flag = 1;\n # tell UPS packages is ready and ask for trucks (provide destinaiton address)\n # tell warehouse to load when UPS trucks ready\n for currReady in response.ready:\n #save current state\n trans = Transaction.objects.get(ship_id = currReady)\n trans.ready = True\n trans.save()\n #connect to UPS\n ups_handler = threading.Thread(target=self.process_Uresponse, args=(trans,))\n ups_handler.start()\n self.AUCommand(trans, 0)\n print(\"first msg for UPS sent(to pickup)\")\n ups_handler.join()\n\n #load info from sim\n for load in response.loaded:\n #save current state\n trans = Transaction.objects.get(ship_id = load)\n trans.loaded = True\n trans.save()\n #connect to UPS\n self.AUCommand(trans, 1)\n print(\"second msg for UPS sent(get load success from sim world)\")", "def handleRequest(self, access_id, msg):\n log.msg('handling engine request for %s' % access_id)\n try:\n engine_client = yield self.backend.getEngine(access_id)\n log.msg('got engine Client %s' % str(engine_client))\n except InvalidAccessId:\n err = {'status':'ERR', 'response':'InvalidAccessId'}\n log.err('InvalidAccessId %s' % access_id)\n defer.returnValue(err)\n\n result = yield engine_client.send(msg)\n sucs = {'status':'OK', 'response':result}\n defer.returnValue(sucs)", "def test_rsp_invalid(self):\n\n def handle(event):\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n return 0x0000, ds\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 0.4\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_SET, handle)]\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyResponse:\n is_valid_response = False\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return None, DummyResponse()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n mod_list = Dataset()\n mod_list.PatientName = \"Test^test\"\n status, ds = assoc.send_n_set(\n mod_list, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n\n assert status == Dataset()\n assert ds is None\n assert assoc.is_aborted\n\n scp.shutdown()", "async def handle_client(reader, writer):\n\n request = None\n \n # loop to continually handle incoming data\n while request != 'quit': \n request = (await reader.read(255)).decode('utf8')\n print(request.encode('utf8'))\n #log.info('COMMAND = '+request)\n writer.write(('COMMAND = '+request.upper()+'\\n').encode('utf8')) \n\n response = 'BAD'\n # check if data is empty, a status query, or potential command\n dataDec = request\n if dataDec == '':\n break\n elif 'status' in dataDec.lower():\n response = 'OK'\n # check if the command thread is running\n try:\n if exposureState() > 0:\n response = response + '\\nBUSY'\n else:\n response = response + '\\nIDLE'\n except:\n response = response + '\\nIDLE'\n\n if ccd_frame[0].s == PyIndi.ISS_ON:\n frameType = 'LIGHT'\n elif ccd_frame[1].s == PyIndi.ISS_ON:\n frameType = 'BIAS'\n elif ccd_frame[2].s == PyIndi.ISS_ON:\n frameType = 'DARK'\n elif ccd_frame[3].s == PyIndi.ISS_ON:\n frameType = 'FLAT'\n\n response = response+\\\n '\\nBIN MODE = '+str(ccd_bin[0].value)+'x'+str(ccd_bin[1].value)+\\\n '\\nCCD TEMP = '+str(ccd_temp[0].value)+\\\n 'C\\nLAST FRAME TYPE = '+str(frameType)+\\\n '\\nFILE DIR = '+str(fileDir)+\\\n '\\nLAST IMAGE = '+str(imgName)\n\n # send current status to open connection & log it\n #log.info('RESPONSE: '+response)\n writer.write((response+'\\nDONE\\n').encode('utf-8'))\n \n elif 'stop' in dataDec.lower():\n # check if the command thread is running\n try:\n if comThread.is_alive():\n response = 'OK: aborting exposure'\n ccd_abort[0].s=PyIndi.ISS_ON \n indiclient.sendNewSwitch(ccd_abort)\n blobEvent.set() #Ends the currently running thread.\n response = response+'\\nExposure Aborted'\n else:\n response = 'OK: idle'\n except:\n response = 'OK: idle'\n\n # send current status to open connection & log it\n #log.info('RESPONSE = '+response)\n writer.write((response+'\\nDONE\\n').encode('utf-8'))\n\n else:\n # check if the command thread is running, may fail if not created yet, hence try/except\n try:\n if comThread.is_alive():\n response = 'BAD: busy'\n # send current status to open connection & log it\n #log.info('RESPONSE = '+response)\n writer.write((response+'\\nDONE\\n').encode('utf-8'))\n else:\n # create a new thread for the command\n comThread = threading.Thread(target=handle_command, args=(log, writer, dataDec,))\n comThread.start()\n except:\n # create a new thread for the command\n comThread = threading.Thread(target=handle_command, args=(log, writer, dataDec,))\n comThread.start()\n\n await writer.drain()\n writer.close()", "def command_processor(self, command_dict):\n current_command = command_dict.get('CMD')\n\n if current_command == 'PING':\n return {'CMD': 'PONG'}\n\n elif current_command == 'NEW_PEER_JOIN':\n peer_id = command_dict.get('peer_id')\n peer_host = command_dict.get('peer_host')\n port = command_dict.get('peer_port')\n protocol_logger('NEW_PEER_JOIN recieved from {} at {}:{}'.format(peer_id, peer_host, port))\n\n protocol_logger('Adding new peer with id {} at {}:{}'.format(peer_id, peer_host, port))\n self.add_new_peer(peer_id, peer_host, port)\n\n protocol_logger('Peer added successfully. Initiating upload node data to remote peer')\n self.data_upload(peer_id)\n\n elif current_command == 'NEW_JOIN_DATA_UPLOAD':\n protocol_logger('NEW_PEER_JOIN_DATA_UPLOAD recieved')\n\n self.handle_data_upload(command_dict.get('peer_list'), command_dict.get('data_list'))\n\n elif current_command == 'ADD_PEER':\n\n peer_id = command_dict.get('peer_id')\n peer_host = command_dict.get('peer_host')\n port = command_dict.get('peer_port')\n protocol_logger('ADD_PEER recieved')\n\n self.add_new_peer(peer_id, peer_host, port)\n protocol_logger('New peer {} at {}:{} added to peer list successfully'.format(peer_id, peer_host, port))\n\n elif current_command == 'VALIDATE_BLOCK':\n protocol_logger('VALIDATE_BLOCK recieved')\n block_data = command_dict.get('block_data')\n\n protocol_logger('Initiaiting block validation')\n if not self.chain_instance.validate_block(block_data):\n protocol_logger('Block validation failed')\n return 'KCA'\n\n elif current_command == 'ADD_BLOCK':\n protocol_logger('ADD_BLOCK recieved')\n block_data = command_dict.get('data')\n protocol_logger('Adding data block')\n self.chain_instance.add_block(block_data)\n protocol_logger('Block added successfully')\n\n return 'ACK'", "def handleMessage(self, e):\n if len(self.messages) == 0:\n return\n messageInfo = self.messages[0]\n requestInfo = self.helpers.analyzeRequest(messageInfo)\n # run cheker\n self.run_replace_param_with_a_previously_valid_one(requestInfo, messageInfo)\n self.run_replace_param_with_modifying_with_one_different_character(requestInfo, messageInfo)\n self.run_replace_param_with_radamsa_output(requestInfo, messageInfo)\n self.run_remove_param(requestInfo, messageInfo)\n self.run_replace_body_with_radamsa_output(requestInfo, messageInfo)\n self.updatedb(messageInfo.getHttpService().getHost(), requestInfo.getParameters())", "def process_response(response):\n # Print it and exit with 1 if operation wasn't successful\n print(response['message'])\n if response['status'] != 'success':\n sys.exit(1)", "def _receive(self, what, address='localhost:44818', **kwargs):\n\n tag_string = ''\n tag_string = EnipProtocol._tuple_to_cpppo_tag(what)\n\n # print(\"DEBUG \" + tag_string)\n\n cmd = shlex.split(\n self._client_cmd +\n '--log ' + self._client_log +\n ' --print --address ' + address +\n ' ' + tag_string\n )\n # print 'DEBUG enip _receive cmd shlex list: ', cmd\n\n try:\n client = subprocess.Popen(cmd, shell=False,\n stdout=subprocess.PIPE)\n\n # client.communicate is blocking\n raw_out = client.communicate()\n # print('DEBUG1 ', raw_out)\n\n # value is stored as first tuple element\n # between a pair of square brackets\n\n raw_string = raw_out[0]\n # print(\"DEBUG2 \" + str(raw_string))\n raw_string = str(raw_string)\n out = raw_string[(raw_string.find('[') + 1):raw_string.find(']')]\n # print(\"DEBUG4 \" + out)\n return out\n\n except Exception as error:\n print('ERROR enip _receive: ', error)", "def handle_response(response):\n try:\n #Cancel any active timeout for this HTTPS call.\n if timeoutCall.active():\n timeoutCall.cancel()\n def cbBody(bodystring):\n \"\"\"Process response body for JSON-RPC batch query invocation.\"\"\"\n try:\n results = None\n #The bosy SHOULD be JSON, it not always is.\n try:\n results = json.loads(bodystring)\n except Exception as ex:\n #If the result is NON-JSON, may want to move to the next node in the node list\n self.log.error(\"Non-JSON response from server {node!r}\", node = self.nodes[self.node_index])\n self._next_node()\n #Add the failed sub-queue back to the command queue, we shall try again soon.\n self.queue = subqueue + self.queue\n if results != None:\n ok = False\n if isinstance(results, dict):\n #Running in legacy single JSON-RPC call mode (no batches), process the result of the single call.\n process_one_result(results)\n ok = True\n else:\n if isinstance(results, list):\n #Running in batch mode, process the batch result, one response at a time\n for reply in results:\n process_one_result(reply)\n ok = True\n else:\n #Completely unexpected result type, may want to move to the next node in the node list.\n self.log.error(\"Error: Invalid JSON-RPC response, expecting list as response on batch. {node!r}\",node = self.nodes[self.node_index])\n self._next_node()\n #Add the failed sub-queue back to the command queue, we shall try again soon.\n self.queue = subqueue + self.queue\n if ok == True:\n #Clean up the entries dict by removing all fully processed commands that now are no longer in the queu.\n for request_id in subqueue:\n if request_id in self.entries:\n del self.entries[request_id]\n else:\n self.log.error(\"Error: No response entry for request entry in result: {rid!r}. {node!r}\",rid=request_id, node = self.nodes[self.node_index])\n except Exception as ex:\n self.log.failure(\"Error in cbBody {err!r}. {node!r}\",err=str(ex), node = self.nodes[self.node_index])\n #This HTTPS POST is now fully processed.\n self.active_call_count = self.active_call_count - 1\n #Invoke self, possibly sending new queues RPC calls to the current node\n self()\n deferred2 = readBody(response)\n deferred2.addCallback(cbBody)\n return deferred2\n except Exception as ex:\n self.log.failure(\"Error in handle_response {err!r}. {node!r}\",err=str(ex),node = self.nodes[self.node_index])\n #If something went wrong, the HTTPS POST isn't active anymore.\n self.active_call_count = self.active_call_count - 1\n #Invoke self, possibly sending new queues RPC calls to the current node\n self()", "def HandleMessage(msg, conn, requester):\n\n print(\"\\nReceived a new message:\\n{}\".format(msg))\n if msg['__class__'] == 'ReqDecryption':\n msg = msg['__value__']\n C = msg['C']\n D = msg['D']\n C = parse_point(C)\n D = parse_point(D)\n\n print(\"\\nReceived a new tallied contribution:\")\n print(\"C = {}\\nD = {}\".format(C, D))\n out = requester.decrypt(C, D)\n\n req = RespDecryption(out[0], out[1], out[2])\n write_message(conn, req)\n print(\"\\nThe final outcome is:\\n{}\".format(out[0]))\n exit()", "def _response(self, *lines):\n for line in lines:\n self.client.dataReceived(line + b'\\r\\n')\n self.client.dataReceived(\n b'0001 OK [READ-ONLY] ' + self.command + b' completed\\r\\n')", "def process(self, msg):\n print \"HANDLER: received a msg: %s\" % msg", "def respond(self, frame):\n try:\n self.reply_ack(frame)\n time.sleep(0.1)\n cmd = readable_value(frame[ZWaveSwitchBin], Z_CMD)\n home_id = text_id(frame.homeid)\n\n if cmd == CMD_SET:\n if self.decoys[home_id][str(frame.dst)][DEC_STATE] != DEC_STATE_CONTROLLER:\n self.reply_report(frame)\n self.logger.debug('Responding ACK, REPORT')\n\n elif cmd == CMD_GET:\n if self.decoys[home_id][str(frame.dst)][DEC_STATE] != DEC_STATE_CONTROLLER:\n self.reply_report(frame)\n self.logger.debug('Responding ACK, REPORT')\n\n elif cmd == CMD_REPORT:\n self.logger.debug('Responding ACK')\n\n except Exception as e:\n pass", "def receiveCallback(self, socket, stateMask):\n # read the PDU header\n pduHeader = self.recv(EGSE.EDENPDU.PDU_HEADER_BYTE_SIZE)\n if pduHeader == None:\n # failure handling was done automatically by derived logic\n return\n # consistency check\n pduHeaderLen = len(pduHeader)\n if pduHeaderLen != EGSE.EDENPDU.PDU_HEADER_BYTE_SIZE:\n LOG_ERROR(\"Read of PDU header failed: invalid size: \" + str(pduHeaderLen))\n self.disconnectClient()\n return\n pdu = EGSE.EDENPDU.PDU(pduHeader)\n # read the data field for the PDU\n dataFieldLength = pdu.dataFieldLength\n if dataFieldLength > 0:\n dataField = self.recv(dataFieldLength)\n if dataField == None:\n # failure handling was done automatically by derived logic\n return\n # consistency check\n remainingSizeRead = len(dataField)\n if remainingSizeRead != dataFieldLength:\n LOG_ERROR(\"Read of remaining PDU failed: invalid remaining size: \" + str(remainingSizeRead))\n self.disconnectClient()\n return\n pdu.setDataField(dataField)\n # dispatch depending on pduType and subType\n try:\n if pdu.pduType == EGSE.EDENPDU.PDU_TYPE_TC:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_SPACE:\n # (TC,SPACE)\n LOG_INFO(\"EDEN.Server.receiveCallback(TC,SPACE)\")\n tcSpacePDU = EGSE.EDENPDU.TCspace(pdu.buffer)\n if self.notifyTcSpace(tcSpacePDU.getCCSDSpacket()):\n # forwarding OK\n self.sendTc_eSpace(tcSpacePDU, 0)\n self.sendTc_aSpace(0, tcSpacePDU.tcIdentificationWord)\n else:\n # forwarding failed\n self.sendTc_eSpace(tcSpacePDU, 1)\n self.sendTc_aSpace(1, tcSpacePDU.tcIdentificationWord)\n elif pdu.subType == EGSE.EDENPDU.SUB_TYPE_SCOE:\n # (TC,SCOE)\n LOG_INFO(\"EDEN.Server.receiveCallback(TC,SCOE)\")\n tcScoePDU = EGSE.EDENPDU.TCscoe(pdu.buffer)\n if self.notifyTcScoe(tcScoePDU.getCCSDSpacket()):\n # forwarding OK\n self.sendTc_eScoe(tcScoePDU, 0)\n self.sendTc_aScoe(0, tcScoePDU.tcIdentificationWord)\n else:\n # forwarding failed\n self.sendTc_eScoe(tcScoePDU, 1)\n self.sendTc_aScoe(1, tcScoePDU.tcIdentificationWord)\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType))\n LOG(\"PDU = \" + str(pdu))\n self.disconnectClient()\n elif pdu.pduType == EGSE.EDENPDU.PDU_TYPE_CMD:\n if pdu.subType == EGSE.EDENPDU.SUB_TYPE_EXEC:\n # (CMD,EXEC)\n LOG_INFO(\"EDEN.Server.receiveCallback(CMD,EXEC)\")\n self.notifyCmdExec(pdu.getDataField().tostring())\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid subType: \" + str(pdu.subType))\n LOG(\"PDU = \" + str(pdu))\n self.disconnectClient()\n else:\n LOG_ERROR(\"Read of PDU header failed: invalid pduType: \" + str(pdu.pduType))\n LOG(\"PDU = \" + str(pdu))\n self.disconnectClient()\n except Exception as ex:\n LOG_ERROR(\"Processing of received PDU failed: \" + str(ex))\n self.disconnectClient()", "def ProcessRemoteCommandsRequest(self):\n return (200, '')", "def exec_handler(self, body = None):\n h = self.MyTestHandler()\n h.request = Request.blank('/test_rpc/')\n h.response = Response()\n h.request.body = body\n h.post()\n return (h.response._Response__status[0], h.response.out.getvalue())", "def _sendVersion_result (self, (code, data)) :\n\n assert code == \"REPLY_HELLO\"\n\n return data", "def test_rsp_failure(self):\n\n def handle(event):\n return 0x0112, Dataset()\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStep)\n\n handlers = [(evt.EVT_N_CREATE, handle)]\n scp = ae.start_server((\"localhost\", 11112), evt_handlers=handlers, block=False)\n\n ae.add_requested_context(ModalityPerformedProcedureStep)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_create(\n ds, ModalityPerformedProcedureStep, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0112\n assert ds is None\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def handle(self):\n req_lines = self._read_lines()\n if not req_lines:\n self.cleanup()\n for req in req_lines:\n log.debug('%s => %s', self.client, req)\n req = req.split()\n cmd = req.pop(0)\n try:\n self.get_command(cmd)(req)\n result = [OK]\n except Exception as error:\n result = [ERROR, error.message]\n self.send_line(' '.join(result))\n self.flush()", "def process_frame():\n return \"OK\"", "def set_emissivity_resp(self):\n return_mess = b''\n while True:\n return_mess += self.recv_resp_simple(1)\n if return_mess[-2:] == self.end_mess_bytes:\n # Sometimes we have overflow of data when attempting to stop stream - probably extra scan data we\n # haven't picked up yet. So we try to decode it all, but if we hit an error we just decode the last\n # few bytes which contain the message we are interested in\n # try:\n # mess_ascii = return_mess.decode(self.encoding)\n # except UnicodeDecodeError as e:\n # mess_ascii = return_mess[-7:].decode(self.encoding)\n start_idx = return_mess.rfind(b'REP') # Find start of response\n mess_ascii = return_mess[start_idx:] # Extract response\n return_code = int(mess_ascii[4:-2]) # Extract return code and convert to integer\n\n if self.gui_message is not None:\n if return_code == 0:\n self.gui_message.message('[LSP] Response: emissivity set!')\n else:\n self.gui_message.message('[LSP] Set emissivity error code: %i' % return_code)\n else:\n if return_code == 0:\n print('[LSP] Response: emissivity set!')\n else:\n print('[LSP] Set emissivity error code: %i' % return_code)\n return return_code", "def test_rsp_none(self):\n\n def handle(event):\n time.sleep(5)\n return 0x0000, Dataset()\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 0.2\n ae.network_timeout = 5\n ae.add_supported_context(ModalityPerformedProcedureStepNotification)\n scp = ae.start_server(\n (\"localhost\", 11112),\n block=False,\n evt_handlers=[(evt.EVT_N_EVENT_REPORT, handle)],\n )\n\n ae.add_requested_context(ModalityPerformedProcedureStepNotification)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_event_report(\n ds,\n 1,\n ModalityPerformedProcedureStepNotification,\n \"1.2.840.10008.5.1.1.40.1\",\n )\n assert status == Dataset()\n assert ds is None\n assert assoc.is_aborted\n\n scp.shutdown()", "def handle_ACS_worklist_info_response(self,message,conn):\n response=ResponseClientHandle.switch_msg_stream_type_str2dict(message)\n \n msg_type=response.get(event.KEY_MESSAGE)\n msg_group = int(msg_type) & 0xFF00\n \n if (msg_group == event.EVENT_WORKLIST_GROUP):\n \n # check worklist reseve response\n if(msg_type == event.EV_WORKLIST_RESERVE_RSP):\n log.debug_info(\"ACS server's response worklist reserve suc\")\n\n # call worklist execute start request \n DUTqueue.ResponseWLexecHandle.handle_WLexec_start_request(self.msg,response,None)\n \n elif(msg_type == event.EV_WORKLIST_RESERVE_FAIL):\n log.debug_info(\"ACS server's response worklist reserve fail\")\n\n ResponseClientHandle.handle_send_response(response,conn)\n \n # check worklist start response \n elif(msg_type == event.EV_WORKLIST_EXEC_START_RSP):\n log.debug_info(\"ACS server's response worklist execute start suc\")\n \n # call worklist execute request\n DUTqueue.ResponseWLexecHandle.handle_WLexec_request(self.dut_obj_handle,self.msg,response,conn)\n\n elif(msg_type == event.EV_WORKLIST_EXEC_START_FAIL):\n log.debug_info(\"ACS server's response worklist execute start fail\")\n \n ResponseClientHandle.handle_send_response(response,conn)\n\n # check worklist finish response \n elif(msg_type == event.EV_WORKLIST_EXEC_FINISH_RSP):\n log.debug_info(\"ACS server's response worklist execute finish suc\")\n\n elif(msg_type == event.EV_WORKLIST_EXEC_FINISH_FAIL):\n log.debug_info(\"ACS server's response worklist execute finish fail\")\n \n # check worklist build/bind/download response\n else:\n ResponseClientHandle.handle_send_response(response,conn)\n \n else:\n err_info = \"Unsupport msg event group:%d\" % msg_group\n log.debug_info(err_info)\n ResponseClientHandle.handle_except(self.msg,self.conn,err_info)", "def _process_error_response(self, toc, buf):\n\n\t\terrorSev = None\n\t\terrorMsg = None\n\t\terrorDet = None\n\n\t\tif toc != 'E' and toc != 'N':\n\t\t\treturn\n\n\t\tparts = buf.split(b'\\0')\n\n\t\tfor part in parts:\n\t\t\tpart = part.decode()\n\t\t\tif len(part) < 1:\n\t\t\t\tcontinue\n\t\t\t_type = part[0]\n\t\t\tif _type == 'M':\n\t\t\t\terrorMsg = part[1:]\n\t\t\telif _type == 'S':\n\t\t\t\terrorSev = part[1:]\n\t\t\telif _type == 'D':\n\t\t\t\terrorDet = part[1:]\n\t\t\n\t\tif not errorSev and not errorMsg:\n\t\t\treturn\n\n\t\tif toc != 'E':\t\t\t\t# This is not an error report it as debug\n\t\t\tif self.Pfdebug:\n\t\t\t\tself.Pfdebug.write(f'BACKEND {errorSev}: {errorMsg}\\n')\n\t\t\t\tif errorDet:\n\t\t\t\t\tself.Pfdebug.write(f'DETAIL: {errorDet}\\n')\n\t\telse:\n\t\t\tif errorDet:\n\t\t\t\tself.pcp_internal_error(f'{errorSev}: {errorMsg}\\nDETAIL: {errorDet}\\n')\n\t\t\telse:\n\t\t\t\tself.pcp_internal_error(f'{errorSev}: {errorMsg}\\n')\n\t\t\tself._setResultStatus(ResultStateType.BACKEND_ERROR)", "def test_rsp_success(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_action(\n ds, 1, ProceduralEventLogging, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0000\n assert ds.PatientName == \"Test^test\"\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "def _parseData(self, payload):\n out=[]\n bytesParsed = 0\n while bytesParsed < len(payload):\n\n #check for the extended Code Level, code and length\n #count the number of EXCODE_BYTE\n #extendedCodeLevel = sum([1 for x in data if x == EXCODE_BYTE] )\n #bytesParsed += extendedCodeLevel\n\n #identify the length of the expected bytes in the payload\n code = payload[bytesParsed]\n bytesParsed +=1\n if code > 0x7F:\n # multi-byte code, length > 1\n length = payload[bytesParsed]\n bytesParsed +=1\n else:\n length = 1\n\n if code == SENSOR_STATUS:\n # value of 0==no contact, 200==contact\n #print \"leadoff: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'leadoff': payload[bytesParsed] } )\n bytesParsed +=1\n\n elif code == HEART_RATE:\n #print \"HR: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'HR': payload[bytesParsed:] } )\n bytesParsed +=1\n\n elif code == CONFIG_BYTE:\n #print \"config: %i\" % payload[bytesParsed]\n out.append( {'timestamp': self.curtime, 'config': payload[bytesParsed:] } )\n bytesParsed +=1\n\n elif code == RAW_ECG:\n # raw value is between -32768 and 32767, in twos compliment form\n # if the raw value is higher than 32768, it should be rolled around to allow for negative values\n raw = payload[bytesParsed]*256 + payload[bytesParsed]\n if raw >= 32768: \n raw = raw - 65536\n #print \"ecg: %i\" % ecg\n\n # create the timestamp on each ECG sample, starting from the first\n if self.starttime is None:\n self.starttime = time.time()\n self.curtime = self.starttime\n else:\n self.curtime = self.curtime + 1./self.Fs\n\n out.append( {'timestamp': self.curtime, 'ecg_raw': raw } )\n bytesParsed += length\n\n elif code == DEBUG_1:\n #print \"debug1: \" + str(payload[bytesParsed:]).strip('[]')\n out.append( {'timestamp': self.curtime, 'debug1': payload[bytesParsed:] } )\n bytesParsed += length\n\n elif code == DEBUG_2:\n #print \"debug2: \" + str(payload[bytesParsed:]).strip('[]')\n out.append( {'timestamp': self.curtime, 'debug2': payload[bytesParsed:] } )\n bytesParsed += length\n\n else:\n print \"unknown code: %i\" % code\n\n return out", "def cmd_handler():\n context = zmq.Context()\n\n # socket to receive commands (a subscription to ELECTION_CODE channel)\n cmd_socket = context.socket(zmq.SUB)\n cmd_socket.connect (\"tcp://%s:5556\" % SERVER_HOST)\n topicfilter = \"politiche2013\"\n cmd_socket.setsockopt(zmq.SUBSCRIBE, topicfilter)\n\n # socket to send replies\n reply_sender = context.socket(zmq.PUSH)\n reply_sender.connect(\"tcp://%s:5557\" % SERVER_HOST)\n\n # main loop\n while True:\n print \"Aye sir, unit {0} ready for your commands ...\".format(computer_id)\n # wait for a command\n string = cmd_socket.recv()\n\n # action\n print \"Message received: '%s'\" % (string,)\n\n # send reply to server\n print \"Sending reply to server\"\n reply = { 'unit' : computer_id, 'status' : 'configured'}\n reply_sender.send_json(reply)", "def clientHelloResp(n, e):\n status = \"105 Hello \"+ str(n) + \" \" + str(e)\n return status", "def clientHelloResp(n, e):\n status = \"105 Hello \"+ str(n) + \" \" + str(e)\n return status", "def process_response(self, id, result):\n raise NotImplementedError('process_response not implemented in BaseService')", "def _validate_GetEC_response(self, response, prompt):\n error = self._find_error(response)\n\n if error:\n log.error(\"GetEC command encountered error; type='%s' msg='%s'\", error[0], error[1])\n raise InstrumentProtocolException('GetEC command failure: type=\"%s\" msg=\"%s\"' % (error[0], error[1]))\n\n return response", "def _logic(self, algosec_hostname):\n logger.info(\"algosec_hostname: %s\", algosec_hostname)\n\n # PUT YOUR FUNCTION IMPLEMENTATION CODE HERE\n yield StatusMessage(\"starting...\")\n isolate_traffic_lines = [\n ChangeRequestTrafficLine(\n action=ChangeRequestAction.DROP,\n sources=[algosec_hostname],\n destinations=['*'],\n services=['*'],\n ),\n ChangeRequestTrafficLine(\n action=ChangeRequestAction.DROP,\n sources=['*'],\n destinations=[algosec_hostname],\n services=['*'],\n )\n ]\n try:\n yield StatusMessage(\"creating isolation change request...\")\n change_request_url = self.algosec.fire_flow().create_change_request(\n subject=self.options['isolation_request_subject'].format(algosec_hostname),\n requestor_name=self.options['isolation_request_requestor'],\n email=self.options['isolation_request_requestor_email'],\n traffic_lines=isolate_traffic_lines,\n description=self.options['isolation_request_description'],\n template=self.options.get('isolation_request_template') or None,\n )\n except Exception:\n raise Exception(\n \"Error occured while trying to create the isolation change request for {}\".format(algosec_hostname)\n )\n\n yield StatusMessage(\"done...\")\n\n change_request_id = int(change_request_url.split('=')[1])\n result = {\n 'id': change_request_id,\n 'hostname': algosec_hostname,\n 'url': '<a href=\"{}\">Change Request #{}</a>'.format(change_request_url, change_request_id),\n }\n\n # Produce a FunctionResult with the result\n yield FunctionResult(result)", "def _r_send_result(self, response, protocol):\n #print(\"Send result: %s\" % result)\n protocol.send_message(response)", "def recv_read_response(self, recv_payload): \n\t#Only unpack the headers because we want to store the file data as binary\n\tunpacked_payload = struct.unpack('!H3IQ', recv_payload[:22])\n\tstatus = unpacked_payload[0:1][0]\n\tepoch_no = unpacked_payload[1:2][0]\n\thandle_no = unpacked_payload[2:3][0]\t\n\t\n\t#Check that file handle is the same, to make sure it is the same file request.\n\tif (self.epoch_no == epoch_no and self.handle_no == handle_no):\n\t start_position = unpacked_payload[3:4][0]\n\t num_bytes_been_read = unpacked_payload[4:5][0] \n\t # If we receive less bytes than the number we requested to read, this means that\n\t # end of file has been reached\n\t if (num_bytes_been_read < self.NUM_BYTES_TO_READ):\n\t\tself.eof = True\n\t data_to_write = recv_payload[22:]\t \n\t #If status field says that response contains real data: Append to file. Otherwise react \n\t #depending on error code received.\n\t #Status 00 = OK\n\t #Status 01 = Epoch no. of file handle doesnt match epoch no. of current invocation\n\t #Status 10 = No context found for file-handle and no data has been read\n\t #Status 11 = Context could be found but start position out of range\n\t if (status == 0b00):\n\t\tself.file_append.seek(start_position)\n\t\tself.file_append.write(data_to_write)\n\t elif (status == 0b01):\n\t\tprint(\"Error: Epoch no. of file handle doesnt match epoch no. of current invocation\")\n\t\tsys.exit()\n\t elif (status == 0b10):\n\t\tprint(\"Error: No context found for file-handle and no data has been read\")\n\t\tsys.exit()\n\t elif(status == 0b11):\n\t\tprint(\"Error: Context could be found but start position out of range\")\n\t\tsys.exit()\n\telse:\n\t print(\"Error: File handle does not match file handle stored in client. Wrong file received.\")\n\t sys.exit() \t \n\t#Then return control to read_service_loop() method so that next iteration of send_read_request \n\t#from new start position is called.\n return", "def receive_primitive_execution_response(self):\n resp = self.read_response_buffer(ATI_RESPONSE_BUFFER_SIZE)\n return resp[1:]", "def test_rsp_invalid(self):\n\n def handle(event):\n return 0x0000, event.action_information\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(ProceduralEventLogging)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_ACTION, handle)]\n )\n\n ae.add_requested_context(ProceduralEventLogging)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n class DummyResponse:\n is_valid_response = False\n\n class DummyDIMSE:\n msg_queue = queue.Queue()\n gotten = False\n\n def send_msg(*args, **kwargs):\n return\n\n def get_msg(self, *args, **kwargs):\n if not self.gotten:\n self.gotten = True\n return None, DummyResponse()\n return None, None\n\n assoc._reactor_checkpoint.clear()\n while not assoc._is_paused:\n time.sleep(0.01)\n assoc.dimse = DummyDIMSE()\n ds = Dataset()\n ds.PatientName = \"Test^test\"\n status, ds = assoc.send_n_action(\n ds, 1, ProceduralEventLogging, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status == Dataset()\n assert ds is None\n assert assoc.is_aborted\n\n scp.shutdown()", "def comms_callback(received, prog):\n\n try:\n self.handle_incoming_bytes(received)\n return int(self.__comm_term) \n\n except Exception as e:\n self.__comm_term = True\n self.__comm_exc = e\n return 1", "def handle(self):\n try:\n # First, send a response to allow the server to continue.\n rsp = \"220 dictserver <xnooptions> <msgid@msgid>\\n\"\n self.request.sendall(rsp.encode(\"utf-8\"))\n\n # Receive the request.\n data = self.request.recv(1024).strip()\n log.debug(\"[DICT] Incoming data: %r\", data)\n\n if VERIFIED_REQ in data:\n log.debug(\"[DICT] Received verification request from test \"\n \"framework\")\n response_data = VERIFIED_RSP.format(pid=os.getpid())\n else:\n log.debug(\"[DICT] Received normal request\")\n response_data = \"No matches\"\n\n # Send back a failure to find.\n response = \"552 {0}\\n\".format(response_data)\n log.debug(\"[DICT] Responding with %r\", response)\n self.request.sendall(response.encode(\"utf-8\"))\n\n except IOError:\n log.exception(\"[DICT] IOError hit during request\")", "def OnESPacket(current_pid, packet, header_size):\n pass", "def handle_line(self, line):\n _LOGGER.debug(\"TCP: Handle Line: Income raw string: %s\", line)\n try:\n event = SIAEvent(line)\n _LOGGER.debug(\"TCP: Handle Line: event: %s\", str(event))\n if not event.valid_message:\n _LOGGER.error(\n \"TCP: Handle Line: CRC mismatch, received: %s, calculated: %s\",\n event.msg_crc,\n event.calc_crc,\n )\n raise Exception(\"CRC mismatch\")\n if event.account not in HASS_PLATFORM.data[DOMAIN]:\n _LOGGER.error(\n \"TCP: Handle Line: Not supported account %s\", event.account\n )\n raise Exception(\n \"TCP: Handle Line: Not supported account {}\".format(event.account)\n )\n response = HASS_PLATFORM.data[DOMAIN][event.account].process_event(event)\n except Exception as exc:\n _LOGGER.error(\"TCP: Handle Line: error: %s\", str(exc))\n timestamp = datetime.fromtimestamp(time.time()).strftime(\n \"_%H:%M:%S,%m-%d-%Y\"\n )\n response = '\"NAK\"0000L0R0A0[]' + timestamp\n\n header = (\"%04x\" % len(response)).upper()\n response = \"\\n{}{}{}\\r\".format(\n AlarmTCPHandler.crc_calc(response), header, response\n )\n byte_response = str.encode(response)\n self.request.sendall(byte_response)", "def _handle_call(self, call):\n try:\n result = self.execute_call(call)\n if not call.want_response:\n return\n if isiter(result):\n for to_yield in result:\n self.cxn.send_message((\"yield\", to_yield))\n self.cxn.send_message((\"stop\", ))\n else:\n self.cxn.send_message((\"return\", result))\n except ConnectionError:\n raise\n except Exception, e:\n if call.want_response:\n self.cxn.send_message((\"raise\", self._serialize_exception(e)))\n raise", "def handleCommand(self,message):\n command = message[0]\n pcaId = None\n if len(message) > 1:\n pcaId = message[1].decode()\n if command == codes.ping:\n self.commandSocket.send(codes.ok)\n elif command == codes.pcaAsksForDetectorStatus:\n pcaId = message[1].decode()\n if pcaId and pcaId in self.PCAs:\n if pcaId in self.pcaConfigTag:\n self.commandSocket.send_multipart([self.StateMachineForPca[pcaId].currentState.encode(),self.pcaConfigTag[pcaId].encode()])\n else:\n self.commandSocket.send_multipart([self.StateMachineForPca[pcaId].currentState.encode()])\n elif command == codes.addPartition:\n data = partitionDataObject(json.loads(message[1].decode()))\n self.addPartition(data)\n self.commandSocket.send(codes.ok)\n elif command == codes.deletePartition:\n pcaId = message[1].decode()\n self.deletePartition(pcaId)\n self.commandSocket.send(codes.ok)\n elif command == codes.remapDetector:\n detectorId = message[2].decode()\n if message[1] == codes.removed:\n self.abortFunction(self.detectorMapping[detectorId])\n del self.detectorMapping[detectorId]\n else:\n pcaId = message[1].decode()\n self.abortFunction(pcaId)\n if detectorId in self.detectorMapping:\n self.abortFunction(self.detectorMapping[detectorId])\n self.detectorMapping[detectorId] = pcaId\n self.commandSocket.send(codes.ok)\n #transitions\n elif command.decode() == GlobalSystemTransitions.configure:\n conf = None\n if len(message) > 2:\n conf = configObject(json.loads(message[2].decode()))\n if self.isPCAinTransition[pcaId]:\n self.commandSocket.send(codes.busy)\n elif not self.StateMachineForPca[pcaId].checkIfPossible(GlobalSystemTransitions.configure) or not conf:\n self.commandSocket.send(codes.error)\n print(\"error\")\n else:\n self.commandSocket.send(codes.ok)\n self.isPCAinTransition[pcaId] = True\n workThread = threading.Thread(name=\"worker\", target=self.configure, args=(pcaId,conf))\n workThread.start()\n elif command.decode() == GlobalSystemTransitions.abort:\n if pcaId and pcaId in self.PCAs:\n self.abortFunction(pcaId)\n self.commandSocket.send(codes.ok)\n else:\n self.commandSocket.send(codes.error)\n elif command.decode() == GlobalSystemTransitions.reset:\n self.reset(pcaId)\n self.commandSocket.send(codes.ok)\n else:\n #command unknown\n return False\n return True", "def handler(topic, message: json) -> None:\n\n resp = {}\n operation_timeout = TIMEOUT\n response_format = RESPONSE_FORMAT\n logger.info(f\"Processing message received on topic {topic}\")\n\n # validate message and attributes\n try:\n command = json.loads(message.decode(\"utf-8\"))\n # Verify required keys are provided\n if not all(k in command for k in (\"txid\", \"command\")):\n resp[\"response\"] = MSG_MISSING_ATTRIBUTE\n resp[\"return_code\"] = 255\n client = iotcore.Client()\n client.publish(\n args.response_topic,\n bytes(json.dumps(resp), encoding=\"utf-8\"),\n iotcore.QOS.AT_MOST_ONCE,\n )\n logger.error(f\"{MSG_MISSING_ATTRIBUTE} for message: {message}\")\n return\n # check for and update optional settings\n for k in command:\n if k.lower() == \"timeout\" and isinstance(command[k], (int, float)):\n operation_timeout = command[k]\n elif k.lower() == \"format\" and (\n any(format in command[k] for format in [\"json\", \"text\"])\n ):\n response_format = command[k].lower()\n except json.JSONDecodeError as e:\n resp[\"response\"] = MSG_INVALID_JSON\n resp[\"return_code\"] = 255\n client = iotcore.Client()\n client.publish(\n args.response_topic,\n bytes(json.dumps(resp), encoding=\"utf-8\"),\n iotcore.QOS.AT_MOST_ONCE,\n )\n logger.error(f\"{MSG_INVALID_JSON} for message: {message}\")\n return\n except Exception as e:\n raise e\n\n # Run command and process results\n client = iotcore.Client(timeout=operation_timeout)\n try:\n output = subprocess.run(\n command[\"command\"],\n timeout=operation_timeout,\n capture_output=True,\n shell=True,\n )\n if output.returncode == 0:\n resp[\"response\"] = output.stdout.decode(\"utf-8\")\n else:\n resp[\"response\"] = output.stderr.decode(\"utf-8\")\n resp[\"txid\"] = command[\"txid\"]\n resp[\"return_code\"] = output.returncode\n except subprocess.TimeoutExpired:\n resp[\"response\"] = MSG_TIMEOUT\n logger.error(\n f\"Comand took longer than {operation_timeout} seconds for message: {message}\"\n )\n\n # Publish response\n if response_format == \"json\":\n command_response = bytes(json.dumps(resp), encoding=\"utf-8\")\n elif response_format == \"text\":\n command_response = bytes(\n f\"TX_ID: {command['txid']}\\nRETURN_CODE: {resp['return_code']}\\nRESPONSE:\\n{resp['response']}\",\n encoding=\"utf-8\",\n )\n client.publish(\n args.response_topic,\n command_response,\n iotcore.QOS.AT_MOST_ONCE,\n )", "async def parse_handle_response(self, json_response):\n try:\n vasp = self.vasp\n other_key = vasp.info_context.get_peer_compliance_verification_key(\n self.other_address_str\n )\n message = await other_key.verify_message(json_response)\n response = json.loads(message)\n response = CommandResponseObject.from_json_data_dict(\n response, JSONFlag.NET\n )\n\n return self.handle_response(response)\n\n except OffChainInvalidSignature as e:\n logger.warning(\n f'(other:{self.other_address_str}) '\n f'Signature verification failed. OffChainInvalidSignature: {e}'\n )\n raise e\n except JSONParsingError as e:\n logger.warning(\n f'(other:{self.other_address_str}) JSONParsingError: {e}'\n )\n raise e\n except OffChainException or OffChainProtocolError as e:\n logger.warning(\n f'(other:{self.other_address_str}) '\n f'OffChainException/OffChainProtocolError: {e}',\n )\n raise e", "def handle_received(self) -> None:\n self.buffer: bytes\n while self.buffer:\n try:\n request, self.buffer = parse_request(self.buffer)\n if request is None:\n _LOGGER.debug(\"Not enough data to parse request on event channel\")\n break\n\n _LOGGER.debug(\"Got message on event channel: %s\", request)\n\n # Send a positive response to satisfy the other end of the channel\n # TODO: Add public method to pyatv.http to format a message\n headers = {\n \"Content-Length\": 0,\n \"Audio-Latency\": 0,\n \"Server\": request.headers.get(\"Server\"),\n \"CSeq\": request.headers.get(\"CSeq\"),\n }\n response = (\n f\"{request.protocol}/{request.version} 200 OK\\r\\n\"\n + \"\\r\\n\".join(f\"{key}: {value}\" for key, value in headers.items())\n + \"\\r\\n\\r\\n\"\n )\n self.send(response.encode(\"utf-8\"))\n except Exception:\n _LOGGER.exception(\"Failed to handle message on event channel\")", "def test_rsp_success(self):\n\n def handle(event):\n ds = Dataset()\n ds.PatientName = \"Test\"\n ds.SOPClassUID = DisplaySystem\n ds.SOPInstanceUID = \"1.2.3.4\"\n return 0x0000, ds\n\n self.ae = ae = AE()\n ae.acse_timeout = 5\n ae.dimse_timeout = 5\n ae.network_timeout = 5\n ae.add_supported_context(DisplaySystem)\n scp = ae.start_server(\n (\"localhost\", 11112), block=False, evt_handlers=[(evt.EVT_N_GET, handle)]\n )\n\n ae.add_requested_context(DisplaySystem)\n assoc = ae.associate(\"localhost\", 11112)\n assert assoc.is_established\n\n status, ds = assoc.send_n_get(\n [(0x7FE0, 0x0010)], DisplaySystem, \"1.2.840.10008.5.1.1.40.1\"\n )\n assert status.Status == 0x0000\n assert ds is not None\n assert isinstance(ds, Dataset)\n assert ds.PatientName == \"Test\"\n assert ds.SOPClassUID == DisplaySystem\n assert ds.SOPInstanceUID == \"1.2.3.4\"\n assoc.release()\n assert assoc.is_released\n\n scp.shutdown()", "async def interpret(self, response, id):\n if(response['id'] == id and response['event'] == \"data\"):\n return await self.on_data(response['data'])\n if(response['id'] == None and response['event'] == \"heartbeat\"):\n return await self.on_heartbeat(response['data'])\n if(response['id'] == id and response['event'] == \"subscribed\" and response['data']['success'] == True):\n return await self.on_subscribed(response['data'])\n if(response['event'] == 'error' and response['data']['success'] == False):\n return await self.on_error(response['data'])\n return None", "def do(self, command):\r\n command += xsct_line_end\r\n logger.info('Sending command: %s ...', repr(command))\r\n self.send(command)\r\n ans = self.recv()\r\n if ans.startswith('okay'):\r\n return ans[5:]\r\n if ans.startswith('error'):\r\n raise PyXilException(ans[6:])\r\n raise PyXilException('Illegal start-string in protocol. Answer is: ' + ans)", "def parse_message(message, sock):\n message_arr = str(message).split()\n if message_arr[1] == 'STATUS':\n first = message_arr[2]\n operator = message_arr[3]\n second = message_arr[4]\n\n result = calculate(first, second, operator)\n\n sock.send(\"cs3700spring2016 \" + str(result) + '\\n')\n if message_arr[1] == 'BYE':\n sock.close()\n global closed_flag\n closed_flag = True", "async def process(self, msg):\n logger.debug(\"msg:\", json.dumps(msg), caller=self)\n e = msg.get(\"e\")\n if e == \"executionReport\": # Order update.\n if msg[\"s\"] != self._raw_symbol:\n return\n order_no = \"{}_{}\".format(msg[\"i\"], msg[\"c\"])\n if msg[\"X\"] == \"NEW\":\n status = ORDER_STATUS_SUBMITTED\n elif msg[\"X\"] == \"PARTIALLY_FILLED\":\n status = ORDER_STATUS_PARTIAL_FILLED\n elif msg[\"X\"] == \"FILLED\":\n status = ORDER_STATUS_FILLED\n elif msg[\"X\"] == \"CANCELED\":\n status = ORDER_STATUS_CANCELED\n elif msg[\"X\"] == \"REJECTED\":\n status = ORDER_STATUS_FAILED\n elif msg[\"X\"] == \"EXPIRED\":\n status = ORDER_STATUS_FAILED\n else:\n logger.warn(\"unknown status:\", msg, caller=self)\n return\n order = self._orders.get(order_no)\n if not order:\n info = {\n \"platform\": self._platform,\n \"account\": self._account,\n \"strategy\": self._strategy,\n \"order_no\": order_no,\n \"action\": msg[\"S\"],\n \"order_type\": msg[\"o\"],\n \"symbol\": self._symbol,\n \"price\": msg[\"p\"],\n \"quantity\": msg[\"q\"],\n \"ctime\": msg[\"O\"]\n }\n order = Order(**info)\n self._orders[order_no] = order\n order.remain = float(msg[\"q\"]) - float(msg[\"z\"])\n order.status = status\n order.utime = msg[\"T\"]\n if self._order_update_callback:\n SingleTask.run(self._order_update_callback, copy.copy(order))" ]
[ "0.6189765", "0.59589136", "0.5955229", "0.5889146", "0.57184684", "0.5701011", "0.5673351", "0.5545665", "0.55449224", "0.554279", "0.55046767", "0.5504035", "0.55030364", "0.5494673", "0.54934955", "0.5493104", "0.54797333", "0.5475469", "0.5475273", "0.5459943", "0.54588914", "0.54475", "0.5445733", "0.5444043", "0.54380316", "0.5433786", "0.5428748", "0.540038", "0.5396524", "0.53923774", "0.53787404", "0.5376299", "0.5367065", "0.53604174", "0.53596854", "0.53531075", "0.53510433", "0.53487474", "0.5346315", "0.53456134", "0.53450245", "0.53398734", "0.53335154", "0.53333867", "0.53288615", "0.53288424", "0.5327532", "0.5325109", "0.53194237", "0.5318658", "0.53167176", "0.5305042", "0.5301843", "0.52962285", "0.52938026", "0.52921844", "0.5289573", "0.52878326", "0.52818286", "0.52595526", "0.5258777", "0.5255496", "0.5254233", "0.52518284", "0.524804", "0.52406335", "0.523895", "0.5236671", "0.52347356", "0.52256876", "0.52252686", "0.5225135", "0.5214606", "0.5206653", "0.5205355", "0.5205333", "0.52005273", "0.51914984", "0.5188487", "0.5188487", "0.5188106", "0.5184917", "0.5182768", "0.517578", "0.5173454", "0.51696396", "0.51688254", "0.51664346", "0.516385", "0.516", "0.515912", "0.51590264", "0.5157112", "0.5156421", "0.5153932", "0.5150471", "0.5145328", "0.5140185", "0.513976", "0.51387376", "0.51374215" ]
0.0
-1
Confirm that the key has been agreed
def receive_confirmation(self): #print("(%d) receive_confirmation:" % int(time.time())) #print(" **> state:", self.state) if self.state != KeyExchangeManager.STATE_CONFIRMING: return rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1)) self.set_invoke_timer(rand_time) self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER) self.key_name = self.pending_key_name self._set_state(KeyExchangeManager.STATE_ESTABLISHED) #print("*STATE_ESTABLISHED")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "async def confirm(self, msg, *args):\n if Controller.prev_regex is None:\n await msg.channel.send(**{\n 'content': 'No key change in progress',\n 'reference': msg.to_reference(),\n 'mention_author': True,\n })\n return\n Controller.prev_regex = None\n Controller.prev_help = None\n await msg.channel.send(**{\n 'content': 'Key change confirmed',\n 'reference': msg.to_reference(),\n 'mention_author': True,\n })", "def RequestConfirmation(self, device, passkey):\n print(\"RequestConfirmation:\", device, passkey)\n # reject like this:\n # raise(BluezErrorRejected)", "def confirm_email(request, key):\n alt_email = cpm.Email.objects.filter(activation_key=key)\n if alt_email.exists():\n alt_email[0].confirm()\n return redirect('/')\n hero_title = 'We weren\\'t able to complete your request...'\n return render_err_msg(request, hero_title)", "async def enter_confirmation_сode(self):\n raise VkTwoFactorCodeNeeded()", "def confirmed(self):", "def action_confirm(self):\n self.check_txt_ids()\n self.write({'state': 'confirmed'})\n return True", "def test_approve_agreement(self):\n pass", "def confirm():\n\t\traise NotImplementedError", "def ask_keys(self, update, context):\r\n update.message.reply_text('Введите новый ключ')\r\n return self.LISTEN", "def _program_confirm(self, slot, base32_key, require_button):\n if int(slot) not in (1, 2):\n return tkMessageBox.showerror(\"Error\", \"Please Choose a slot\")\n try:\n _base32_to_hex(base32_key.replace(' ', ''))\n except ValueError:\n return tkMessageBox.showerror(\n \"Error\",\n \"{0} is not a valid base32 key\".format(base32_key)\n )\n try:\n serial = self.parent.yk.serial()\n except (AttributeError, yubico.yubikey_usb_hid.usb.USBError):\n return tkMessageBox.showerror(\"Error\", \"No YubiKey detected\")\n if tkMessageBox.askokcancel(\n \"Confirm\",\n \"Overwrite slot {0} of key with serial {1}?\\n\"\n \"This will purge the existing setup and cannot be undone\".format(\n slot,\n serial\n )\n ):\n self._program_key(slot, base32_key, require_button)\n else:\n self._program_cancel()", "def confirm(self, token):\n ser = Serializer(current_app.config['SECRET_KEY'])\n try:\n data = ser.loads(token.encode('utf-8'))\n except (BadSignature, SignatureExpired):\n return False\n if data.get('confirm') != self.id:\n return False\n self.confirmed = True\n db.session.add(self)\n return True", "def test_reject_agreement(self):\n pass", "def confirm(self, message):\n raise NotImplementedError", "def receive_exchange_response(self, pubkey, random_val, hint):\n #print(\"(%d) receive_exchange_response:\" % int(time.time()))\n #print(\" **> state:\", self.state)\n if self.state != KeyExchangeManager.STATE_REQUESTING:\n return\n rand_time = int(KeyExchangeManager.KEY_REFRESH_INTERVAL*random.uniform(0.9, 1.1))\n self.set_invoke_timer(rand_time)\n self.shared_key = message_key_types.derive_shared_key(self.secret_key, pubkey, random_val)\n self._set_delete_timer(self.key_name, KeyExchangeManager.KEY_OBSOLETE_TIMER)\n self.networking.send_key_exchange_message(self.domain_id, self.counter_node_id, \"confirm\", self.peer_public_key,\n self.nonce, self.random, self.pending_key_name)\n self.key_name = self.pending_key_name\n self.set_cipher(self.key_name, hint)\n self._set_state(KeyExchangeManager.STATE_ESTABLISHED)\n #print(\"*STATE_ESTABLISHED\")", "def waiting_confirmation(self):", "def confirm(text, window=None):\n return message(text, u'Confirma', M_QUESTION, B_YES_NO, window) == R_YES", "def test_successful_verification(self):\n for i in (-2, -1, 0, 1, 2):\n\n description = \"TOTP not verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertTrue(confirmed, description)\n\n self.relate.confirm = False", "def confirm(key):\n manager = EmailManager.find_key(key)\n if not manager:\n # If key is wrong, return False\n return False\n\n if manager.is_active:\n # Do not reactivate users\n return False\n\n if manager.other_email:\n # If other_email\n if EmailManager.email_used(manager.other_email):\n # Other_email already being used by someone\n return False\n # Other email is not being used by anybody else, make it the active one\n\n # if username == email, set it as new email\n if manager.user.email == manager.user.username:\n manager.user.username = manager.other_email\n manager.user.email = manager.other_email\n\n manager.user.is_active = True\n manager.user.save()\n\n # Activate email\n manager.active = True\n manager.save()\n\n # Returns the activated User's obj\n return manager.user", "def confirm(self):\n with self.handle_alert(confirm=True):\n self.q(css='button#confirm').first.click()", "def _confirm_action(self, action):\n\t\treturn True", "def attempt(chal, request):\n team = Teams.query.filter_by(id=session['id']).first()\n if locked(chal):\n return False, 'Challenge Locked. You need at least {} points.'.format(chal.unlock_at)\n \n provided_key = request.form['key'].strip()\n chal_keys = Keys.query.filter_by(chal=chal.id).all()\n for chal_key in chal_keys:\n if get_key_class(chal_key.type).compare(chal_key.flag, provided_key):\n return True, 'Correct'\n return False, 'Incorrect'", "def test_acknowledge(client):\n g.test_authorized_for = []\n res = client.get(\"/v0/acknowledge\" + get_request_args)\n assert \"Thanks for acknowledging!\" in res.data.decode(\"utf-8\")", "def Confirm(self):\n self.PrintMetadata()\n answer = input(\"Continue [Y/n]? \").lower()\n return not answer.startswith(\"n\")", "def action_confirm(self):\n # context = self._context or {}\n inv_obj = self.env['account.invoice']\n\n brw = self.browse(self.ids[0])\n line_ids = brw.line_ids\n if not line_ids:\n raise exceptions.except_orm(\n _('Invalid Procedure!'), _(\"No retention lines\"))\n\n res = [True]\n res += [False for i in line_ids\n if (i.wh_amount <= 0.0 or\n i.base_amount <= 0.0 or\n i.wh_src_rate <= 0.0)]\n if not all(res):\n raise exceptions.except_orm(\n _('Invalid Procedure!'),\n _(\"Verify retention lines do not have Null values(0.00)\"))\n\n res = 0.0\n for i in line_ids:\n res += i.wh_amount\n if abs(res - brw.wh_amount) > 0.0001:\n raise exceptions.except_orm(\n _('Invalid Procedure!'),\n _(\"Check the amount of withholdings\"))\n\n inv_ids = [i.invoice_id.id for i in brw.line_ids]\n if inv_ids:\n inv_obj.write({'wh_src_id': self.ids[0]})\n\n return self.write({'state': 'confirmed'})", "def Confirm(self):\r\n \r\n global references\r\n self.from_ed = self.ed_result.get(\"1.0\",'end-1c')\r\n references.append(self.from_ed)\r\n self.confirm_b.configure(state = 'disabled')\r\n self.discard_b.configure(state = 'disabled')\r\n self.finalresult.configure(state = 'normal')\r\n self.finalresult.delete('1.0', END)\r\n \r\n self.final()", "def confirmation_failed(self):", "async def complete_challenge(\n self,\n key: josepy.jwk.JWK,\n identifier: acme.messages.Identifier,\n challenge: acme.messages.ChallengeBody,\n ):\n pass", "def on_ConfirmWalletOP_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError", "def decision(question):\n return click.confirm(question, show_default=True)", "def post(self):\n data = request.get_json()\n user = actions.get_user_by_email(data['email'])\n html = '<p>Confirming your account will give you </p> <b>full access to Kwikker</b>'\n subject = 'Confirm your Kwikker account, ' + user['username']\n actions.send_email(data['email'], user['username'], user['password'], subject,\n '/confirm/', html, True)\n return \"\", 200\n pass", "def confirm_wellbeing(self, update, context):\n chat_id = update.effective_chat.id\n response_code = int(update.callback_query[\"data\"].split(\"_\")[-1]) # state_{0..4}\n request_id = context.user_data[\"current_request\"]\n log.info(\"Wellbeing req:%s %s\", request_id, response_code)\n\n # Write this amount to the persistent state, so we can rely on it later\n context.bot_data[request_id][\"wellbeing\"] = response_code\n\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=c.MSG_SYMPTOMS % context.bot_data[request_id][\"beneficiary\"],\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=InlineKeyboardMarkup(k.symptom_choices, one_time_keyboard=True),\n )", "def test_terminate_agreement(self):\n pass", "def confirm_dispatch(self, update, context):\n chat_id = update.effective_chat.id\n response_code = update.callback_query[\"data\"] # caution_ok or caution_cancel\n request_id = context.user_data[\"reviewed_request\"]\n log.info(\"Confirm req:%s %s\", request_id, response_code)\n\n request_details = context.bot_data[request_id]\n\n if response_code == \"caution_ok\":\n # They're in good health, let's go\n\n # send a location message, if this info is available in the request\n if \"latitude\" in request_details:\n self.updater.bot.send_location(\n chat_id, request_details[\"latitude\"], request_details[\"longitude\"]\n )\n\n # then send the rest of the details as text\n message = c.MSG_FULL_DETAILS % request_details\n\n if \"remarks\" in request_details:\n message += \"\\n\" + c.MSG_OTHER_REMARKS\n for remark in request_details[\"remarks\"]:\n message += \"- %s\\n\" % remark\n\n if \"hasDisabilities\" in request_details:\n message += \"\\n%s\\n\" % (c.MSG_DISABILITY % request_details)\n\n message += \"\\n\" + c.MSG_LET_ME_KNOW\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=message,\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=InlineKeyboardMarkup(k.handling_choices),\n )\n\n else: # caution_cancel\n # eventually they chose not to handle this request\n # TODO ask them why, maybe they're sick and they need help? Discuss whether this is relevant\n self.send_message(chat_id, c.MSG_NO_WORRIES_LATER)\n context.user_data[\"reviewed_request\"] = None\n context.user_data[\"state\"] = c.State.AVAILABLE\n self.backend.update_request_status(request_id, \"CANCELLED\")", "def m_credential_confirm(node_name, key, participantDID, account_name):\n # Confirm the credential\n acc_address, acc_key = wallet.account_from_name(\n account_name, \"ThePassword\")\n if acc_address is None:\n print(f\"Account {account_name} does not exist\")\n return\n\n print(f\"\\n==> Confirming the credential by {account_name}\")\n\n try:\n resolver.confirmCredential(\n node_name=node_name,\n key=key,\n participantDID=participantDID,\n caller_key=acc_key\n )\n except ValueError as err:\n print(f\"Error: {err}\")\n else:\n print(\"Confirmed!\")", "def _program_key(self, slot, base32_key, require_button):\n try:\n config = self.parent.yk.init_config()\n except (AttributeError, yubico.yubikey_usb_hid.usb.USBError):\n return tkMessageBox.showerror(\n \"Error\",\n \"No YubiKey detected\"\n )\n config.extended_flag('SERIAL_API_VISIBLE', True)\n config.mode_challenge_response(\n 'h:' + _rzfill(_base32_to_hex(base32_key), 40),\n type='HMAC',\n variable=True,\n require_button=bool(require_button),\n )\n try:\n self.parent.yk.write_config(config, slot=slot)\n tkMessageBox.showinfo(\n \"Success\",\n \"Successfully programmed YubiKey in slot %s.\" % slot\n )\n except (\n yubico.yubico_exception.YubicoError,\n yubico.yubico_exception.InputError,\n yubico.yubikey_usb_hid.usb.USBError\n ) as e:\n tkMessageBox.showerror(\"Error\", e)\n self._program_cancel()", "def get_confirmation():\n inp = PInput(\"#> \")\n\n inp.add_keyword(\"yes\")\n inp.add_keyword(\"no\")\n\n inp.ask()\n ans = inp.get_input()\n\n if ans == \"yes\":\n return True\n else:\n return False", "def proceed():\n c_print(\"********** PROCEED? **********\")\n # capture user input\n confirm = input(\" \" * 36 + \"(y/n) \")\n # quit script if not confirmed\n if confirm.lower() != \"y\":\n c_print(\"******* EXITING SCRIPT *******\")\n print(\"~\" * 80)\n exit()\n else:\n c_print(\"********* PROCEEDING *********\")", "def confirmEmail(self, secret_hash):\n\n\t\tthe_data = {};\n\t\tthe_data[\"field.utility\"] = secret_hash;\n\n\t\tresult = self.db.request(\"getOne\", the_data);\n\n\t\tif result:\n\t\t\tdata_prime = self.db.getData(result[\"_id\"], {\"status\": \"Ok\"});\n\n\t\t\tresult2 = self.db.request(\"update\", data_prime);\n\n\t\t\tif result2:\n\t\t\t\treturn \"Ok\";\n\t\t\telse:\n\t\t\t\treturn False;\n\n\t\telse:\n\t\t\treturn False;", "def kitchenConfirm(self, id, eta=0):\n if(eta == 0):\n json = None\n else: \n json = {\"eta\": eta}\n return self.__insertOrderHistory(id, \"kitchenConfirmed\", json)", "def test_reset_confirmation(self):\n self._create_program_and_course_enrollment(self.program_uuid, self.user)\n\n with self._replace_stdin('confirm'):\n call_command(self.command, self.program_uuid)\n\n self._validate_enrollments_count(0)", "def awaiting_payment(self):", "def test_yes_option_enabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n \"-y\",\n *password_option,\n )\n confirm_mock.assert_not_called()", "def you_should_be_able_to_confirm_and_close(driver):\n wait_on_element(driver, 0.5, 30, '//h1[contains(.,\"Test Changes\")]')\n driver.find_element_by_xpath('//mat-checkbox[@ix-auto=\"checkbox__CONFIRM\"]').click()\n driver.find_element_by_xpath('//button[@ix-auto=\"button__TEST CHANGES\"]').click()\n wait_on_element_disappear(driver, 1, 30, '//h6[contains(.,\"Please wait\")]')", "def pending_confirm_parameter(request, action, rating_decision_pk):\n\n hash = None\n\n if action:\n\n rating_decision_obj = RatingDecision.objects.get(pk=rating_decision_pk)\n\n rating_process_obj, created = Process.objects.get_or_create(\n rating_decision=rating_decision_obj)\n\n rating_job_id = format_reference_number(number=rating_decision_obj.id,\n object_type='rating_decision',)\n\n if action == 'setup_done':\n hash = 'setup_step_2'\n\n rating_decision_obj.process_step = 2\n rating_process_obj.setup_done = timezone.now()\n\n to_list = [\n rating_decision_obj.chair.email\n ]\n\n # Notify analysts and relationship manager\n cc_list = [\n rating_decision_obj.issuer.analyst.\n primary_analyst.email,\n rating_decision_obj.issuer.analyst.\n secondary_analyst.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n header = SETUP_HEADER.format(\n rating_decision_obj.issuer,\n rating_job_id,\n )\n body = SETUP_BODY % rating_decision_obj.chair.first_name\n\n # Send notification to chair\n send_email.delay(\n header=header,\n body=body,\n to=to_list,\n from_sender=None,\n cc=cc_list)\n\n elif action == 'pre_committee_done':\n hash = 'setup_step_3'\n\n rating_decision_obj.process_step = 3\n rating_process_obj.pre_committee_done = timezone.now()\n rating_decision_obj.chair_confirmed = True\n rating_decision_obj.date_time_committee_confirmed = True\n\n to_list = [\n rating_decision_obj.issuer.analyst.\n primary_analyst.email,\n rating_decision_obj.issuer.analyst.\n secondary_analyst.email,\n ]\n cc_list = [\n rating_decision_obj.issuer.relationship_manager.email,\n rating_decision_obj.chair.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n header = PRE_COMMITTEE_HEADER.format(\n rating_decision_obj.issuer,\n rating_decision_obj.chair.get_full_name(),\n rating_job_id,\n )\n\n # Send notification\n send_email.delay(\n header=header,\n body=PRE_COMMITTEE_BODY % rating_decision_obj.issuer.analyst.\n primary_analyst.first_name,\n to=to_list,\n from_sender=None,\n cc=cc_list,)\n\n elif action == 'analytical_phase_done':\n hash = 'setup_step_4'\n\n rating_decision_obj.process_step = 4\n rating_process_obj.analytical_phase_done = timezone.now()\n\n # Send notification to chair and members of committee\n to_list = [\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n rating_decision_obj.chair.email,\n ]\n cc_list = [\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n # Get all members\n committee_members = list(\n JobMember.objects.confirmed_members().filter(\n rating_decision=rating_decision_obj,\n group_id=1))\n for item in committee_members:\n to_list.append(item.member.email)\n\n local_dt = timezone.localtime(\n rating_decision_obj.date_time_committee,\n timezone.get_fixed_timezone(60))\n\n header = ANALYTICAL_PHASE_HEADER.format(\n rating_decision_obj.issuer,\n local_dt.strftime('%Y-%m-%d %H:%M'),\n rating_job_id,\n )\n\n send_email.delay(\n header=header,\n body=ANALYTICAL_PHASE_BODY,\n to=to_list,\n cc=cc_list,\n from_sender=None)\n\n elif action == 'post_committee_done':\n hash = 'setup_step_5'\n\n rating_decision_obj.process_step = 5\n rating_process_obj.post_committee_done = timezone.now()\n\n # Contains the name and email of the editor\n editor_obj = JobMember.objects.get(\n rating_decision=rating_decision_obj,\n group=Group.objects.get(pk=2))\n\n to_list = [\n editor_obj.member.email,\n ]\n cc_list = [\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n rating_decision_obj.chair.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n header = EDITOR_HEADER.format(\n rating_decision_obj.issuer,\n rating_job_id,\n )\n\n # Send email with link to admin control to editor\n send_email.delay(header=header,\n body=EDITOR_EMAIL % (\n editor_obj.member.first_name,\n rating_decision_obj.issuer.analyst.\n primary_analyst.first_name),\n to=to_list,\n cc=cc_list,\n from_sender=None,)\n\n elif action == 'editor_phase_done':\n \"\"\"Here, we're sending the draft report to the issuer.\"\"\"\n\n hash = 'setup_step_6'\n\n rating_decision_obj.process_step = 6\n\n # External analysis\n send_public_report(rating_decision_obj)\n\n # Send notification to chair\n to_list = [\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n header = \"{} | the draft report has been sent to the \" \\\n \"issuer\".format(rating_decision_obj.issuer,)\n\n send_email.delay(\n header=header,\n body=' ',\n to=to_list,\n from_sender=None,)\n\n # Set a timestamp when se sent the report to the issuer\n rating_decision_obj.date_time_communicated_issuer = timezone.now()\n rating_process_obj.editor_review_done = timezone.now()\n\n elif action == 'issuer_confirmation_phase_done':\n hash = 'setup_step_7'\n\n rating_decision_obj.process_step = 7\n\n to_list = [\n rating_decision_obj.chair.email,\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n to_list.append('[email protected]')\n\n header = \"{} | The issuer has confirmed the accuracy of the \" \\\n \"draft report for rating job {}\".format(\n rating_decision_obj.issuer,\n rating_job_id, )\n\n # Send notification\n send_email.delay(\n header=header,\n body='',\n to=to_list,\n from_sender=None,)\n\n rating_process_obj.issuer_confirmation_done = timezone.now()\n\n elif action == 'analyst_final_approval_phase_done':\n hash = 'setup_step_8'\n\n rating_process_obj.final_sign_off_analyst_done = timezone.now()\n rating_decision_obj.process_step = 8\n\n to_list = [\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n ]\n cc_list = [\n rating_decision_obj.chair.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n header = ANALYST_FINAL_APPROVAL_HEADER.format(\n rating_decision_obj.issuer\n )\n\n # Send notification to chair\n send_email.delay(\n header=header,\n body=ANALYST_FINAL_APPROVAL_BODY %\n rating_decision_obj.issuer.analyst.\n primary_analyst.first_name,\n to=to_list,\n from_sender=None,\n cc=cc_list)\n\n elif action == 'chair_final_approval_phase_done':\n hash = 'setup_step_9'\n\n rating_decision_obj.process_step = 9\n rating_process_obj.final_sign_off_chair_done = timezone.now()\n\n to_list = [\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n ]\n cc_list = [\n rating_decision_obj.chair.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n cc_list.append('[email protected]')\n\n header = CHAIR_FINAL_APPROVAL_HEADER.format(\n rating_decision_obj.issuer\n )\n\n # Send notification to analyst\n send_email.delay(header=header,\n body=CHAIR_FINAL_APPROVAL_BODY %\n rating_decision_obj.issuer.analyst.\n primary_analyst.first_name,\n to=to_list,\n from_sender=None,\n cc=cc_list,)\n\n elif action == 'publishing_phase_done':\n hash = \"\"\n\n rating_decision_obj.date_time_published = timezone.now()\n\n to_list = [\n rating_decision_obj.issuer.analyst.primary_analyst.email,\n rating_decision_obj.issuer.analyst.secondary_analyst.email,\n rating_decision_obj.chair.email,\n rating_decision_obj.issuer.relationship_manager.email,\n ]\n\n # We want to notify Compliance in production\n if os.environ['ENVIRONMENT_MODE'] == 'PROD':\n to_list.append('[email protected]')\n\n header = \"{} | the rating job has been finalized\".format(\n rating_decision_obj.issuer\n )\n\n # Send notification\n send_email.delay(header=header,\n body='If this a public rating, the publishing '\n 'process will now commence.',\n to=to_list,\n from_sender=None,)\n\n \"\"\"If there is a previous decision, flag it as non-current.\"\"\"\n try:\n RatingDecision.objects.filter(\n pk=rating_decision_obj.previous_rating.pk).update(\n is_current=False,)\n except AttributeError:\n pass\n\n rating_decision_obj.is_current = True\n rating_process_obj.process_ended = timezone.now()\n\n # Create a DecisionAttribute object that\n # summarizes the rating decision\n refresh_decision_attributes(rating_decision_obj)\n\n # Create decisions on the issue level\n update_issue_rating(rating_decision_obj)\n\n # Create a draft campaign with MailChimp\n if rating_decision_obj.rating_type.id == 1:\n run_create_campaign.delay(rating_decision_obj.id)\n\n rating_decision_obj.process_step = 10\n\n # Save changes\n rating_process_obj.save()\n rating_decision_obj.save()\n\n redirect_url = request.META.get('HTTP_REFERER', '/') + '#' + hash\n\n return http.HttpResponseRedirect(redirect_url)", "def officer_confirm_view(request, pk):\n challenge = OffChallenge.objects.get(id=pk)\n if request.user.id != challenge.officer.id:\n raise PermissionDenied # not the officer that gave the challenge\n\n requester_name = challenge.requester.get_full_name()\n form = ChallengeConfirmationForm(request.POST or None, instance=challenge)\n context = {\n 'challenge': challenge,\n 'requester_name': requester_name,\n 'form': form,\n }\n\n if form.is_valid():\n form.instance.reviewed = True\n form.save()\n # csec has already confirmed, and now officer confirms\n if challenge.officer_confirmed is True and challenge.csec_confirmed is True:\n send_challenge_confirm_email(request, form.instance, True)\n # csec has not already rejected, and now officer rejects\n elif challenge.officer_confirmed is False and challenge.csec_confirmed is not False:\n send_challenge_confirm_email(request, form.instance, False)\n # if neither is true, either need to wait for csec to review,\n # or csec has already rejected\n return redirect('/cand/reviewconfirm/{}'.format(pk))\n return render(request, \"candidate/challenge_confirm.html\", context=context)", "async def handle_tr_agree(self, msg, recv):\n\n assert msg is not None", "def confirm(self, prompt, default):\n raise NotImplementedError(NotImplementedMessage)", "def asking(self):\n return 'Sure.'", "def test_decision_maker_hand_tx_ready_for_signing(self):\n tx_message = TransactionMessage(\n performative=TransactionMessage.Performative.PROPOSE_FOR_SIGNING,\n skill_callback_ids=[PublicId(\"author\", \"a_skill\", \"0.1.0\")],\n tx_id=self.tx_id,\n tx_sender_addr=self.tx_sender_addr,\n tx_counterparty_addr=self.tx_counterparty_addr,\n tx_amount_by_currency_id={\"FET\": -20},\n tx_sender_fee=0,\n tx_counterparty_fee=0,\n tx_quantities_by_good_id={\"good_id\": 0},\n ledger_id=self.ledger_id,\n info=self.info,\n signing_payload={\"key\": b\"some_bytes\"},\n )\n self.decision_maker.handle(tx_message)\n assert not self.decision_maker.message_out_queue.empty()", "def confirm_conf(conf):\n print()\n print('Your configuration:')\n for key, value in conf.items():\n print(f'{key.title()}: {value}')\n if not yes_no('Is this correct?'):\n key = ask_options('Which would you like to change?', list(conf.keys()),\n hints=list(conf.values()))\n conf[key] = None\n return conf, False\n return conf, True", "def confirm_wouldyou(self, update, context):\n chat_id = update.effective_chat.id\n response_code = update.callback_query[\"data\"] # wouldyou_{yes|no}\n request_id = context.user_data[\"current_request\"]\n log.info(\"Wouldyou req:%s %s\", request_id, response_code)\n\n if response_code == \"wouldyou_yes\":\n # they want to keep returning to this beneficiary\n context.bot_data[request_id][\"would_return\"] = True\n else:\n context.bot_data[request_id][\"would_return\"] = False\n\n # Send the next question, asking if they have any special comments for future volunteers\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=c.MSG_FEEDBACK_FURTHER_COMMENTS % context.bot_data[request_id][\"beneficiary\"],\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=InlineKeyboardMarkup(k.further_comments_choices),\n )\n context.user_data[\"state\"] = c.State.EXPECTING_FURTHER_COMMENTS", "def post_key(self):\n # print(self.key)\n #Sending the key to the attacker.\n s.send(bytes(\"K\\n{}\".format(str(self.key,'utf-8')),'utf-8'))", "def confirm_yes():\r\n confirm = raw_input(\"Enter 'yes' to confirm: \")\r\n if confirm == 'yes':\r\n return True\r\n return False", "def req_CHECKPRESENT(self, key):\n # TODO: so we need to maintain mapping from urls to keys. Then\n # we could even store the filename within archive\n # Otherwise it is unrealistic to even require to recompute key if we\n # knew the backend etc\n lgr.debug(\"VERIFYING key %s\" % key)\n akey, afile = self._get_akey_afile(key)\n if self.get_contentlocation(akey):\n self.send(\"CHECKPRESENT-SUCCESS\", key)\n else:\n # TODO: proxy the same to annex itself to verify check for archive.\n # If archive is no longer available -- then CHECKPRESENT-FAILURE\n self.send(\"CHECKPRESENT-UNKNOWN\", key)", "def verify_decrypt_key(self):\r\n\t\tpercent_english = Dict_Control(self.my_code).check_key()\r\n\t\t#If more than half the words are english, the key will pass. \r\n\t\tif percent_english > 50:\r\n\t\t\tself.right_key = False\r\n\t\t#If the key does not pass, the program will give you a warning and prompt you for another key. \r\n\t\telse: \r\n\t\t\tprint(f\"After decryption, it looks like only {percent_english}% of your words are english, you may have entered the wrong key?\")", "def confirm_email_change(request, key):\r\n try:\r\n try:\r\n pec = PendingEmailChange.objects.get(activation_key=key)\r\n except PendingEmailChange.DoesNotExist:\r\n response = render_to_response(\"invalid_email_key.html\", {})\r\n transaction.rollback()\r\n return response\r\n\r\n user = pec.user\r\n address_context = {\r\n 'old_email': user.email,\r\n 'new_email': pec.new_email\r\n }\r\n\r\n if len(User.objects.filter(email=pec.new_email)) != 0:\r\n response = render_to_response(\"email_exists.html\", {})\r\n transaction.rollback()\r\n return response\r\n\r\n subject = render_to_string('emails/email_change_subject.txt', address_context)\r\n subject = ''.join(subject.splitlines())\r\n message = render_to_string('emails/confirm_email_change.txt', address_context)\r\n up = UserProfile.objects.get(user=user)\r\n meta = up.get_meta()\r\n if 'old_emails' not in meta:\r\n meta['old_emails'] = []\r\n meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])\r\n up.set_meta(meta)\r\n up.save()\r\n # Send it to the old email...\r\n try:\r\n user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)\r\n except Exception:\r\n log.warning('Unable to send confirmation email to old address', exc_info=True)\r\n response = render_to_response(\"email_change_failed.html\", {'email': user.email})\r\n transaction.rollback()\r\n return response\r\n\r\n user.email = pec.new_email\r\n user.save()\r\n pec.delete()\r\n # And send it to the new email...\r\n try:\r\n user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)\r\n except Exception:\r\n log.warning('Unable to send confirmation email to new address', exc_info=True)\r\n response = render_to_response(\"email_change_failed.html\", {'email': pec.new_email})\r\n transaction.rollback()\r\n return response\r\n\r\n response = render_to_response(\"email_change_successful.html\", address_context)\r\n transaction.commit()\r\n return response\r\n except Exception:\r\n # If we get an unexpected exception, be sure to rollback the transaction\r\n transaction.rollback()\r\n raise", "def confirm(self):\n self.automatically_detected=False\n self.save()", "async def steamkey(self, ctx, key):\n\n set_steam_key(key)\n await self.bot.say(\"The Steam API key has been successfully added! Delete the previous message for your own safety!\")", "def test_create_key():\n\n assert symmetric.create_key() != \"\"", "def payment_approval(self, house_cost: (int, float)):\n if self.money_available >= house_cost: # Person has enough available money to make a deal with Realtor\n self.money_available -= house_cost\n print(f'Payment from {self.name} was approved')\n return True\n print(f'{self.name} doesn\\'t have enough money to buy this house')\n return False", "def send_confirmation(self):\r\n c.user.email_validated = False\r\n c.user.confirmation_code = random_key(6)\r\n c.user._commit()\r\n emailer.confirmation_email(c.user)", "def confirm_further(self, update, context):\n response_code = update.callback_query[\"data\"] # wouldyou_{yes|no}\n request_id = context.user_data[\"current_request\"]\n log.info(\"No further comments req:%s %s\", request_id, response_code)\n self.finalize_request(update, context, request_id)", "def warehouse_officer_confirm_qty(self):\n if (\n self.approve_request_ids is None\n or self.approve_request_ids is False\n ):\n raise UserError(\"No line(s) defined!\")\n self._compute_confirm()\n for line in self.approve_request_ids:\n line._compute_state()\n if any(line.state != \"available\" for line in self.approve_request_ids):\n raise Warning(\n \"Please procure the items that are short in stock or process pending purchase agreements and try again!\"\n )\n else:\n self.state = 'transfer'", "def test_acknowledge_hmac_validation_failed(client):\n res = client.get(\n \"/v0/acknowledge?fp=splunk_82998ef6bb3db9dff3dsfdsfsdc\" \"&t=97244b15a21f45e002b2e913866ff7545510f9b08dea5241f\"\n )\n assert res.status == \"500 INTERNAL SERVER ERROR\"", "def test_unsuccessful_verification(self):\n for i in (-4, -3, 3, 4):\n description = \"TOTP verified for `i={0}`\".format(i)\n calculated = self.algorithm.calculate(self.device.secret, drift=i)\n confirmed = self.relate.verify(calculated, save=False)\n\n self.assertFalse(confirmed, description)\n\n self.relate.confirm = False", "def on_confirmation(self, ch, method, header, body):\n print \" [x] Received confirmation %r\" % (body,)\n self.now_playing(body)\n ch.basic_ack(delivery_tag=method.delivery_tag)", "def confirm_email(self):\n # The base class' implementation does nothing\n pass", "def approve(self, approver: str, to: str, amount, key: bytes):\n raw_tx = self.approve_build_transaction(approver, to, amount)\n signed_tx = self._sign(raw_tx, key)\n self.send_and_wait(signed_tx)", "def confirm_exit(self):\n return True", "def test_delay_by_proof(self):\n node, other = self.create_nodes(2)\n node.send_identity(other)\n\n # permit NODE\n proof_msg = self._mm.create_authorize([(node.my_member, self._community.get_meta_message(u\"protected-full-sync-text\"), u\"permit\"),\n (node.my_member, self._community.get_meta_message(u\"protected-full-sync-text\"), u\"authorize\")])\n\n # NODE creates message\n tmessage = node.create_protected_full_sync_text(\"Protected message\", 42)\n other.give_message(tmessage, node)\n\n # must NOT have been stored in the database\n other.assert_not_stored(tmessage)\n\n # OTHER sends dispersy-missing-proof to NODE\n _, message = node.receive_message(names=[u\"dispersy-missing-proof\"]).next()\n self.assertEqual(message.payload.member.public_key, node.my_member.public_key)\n self.assertEqual(message.payload.global_time, 42)\n\n # NODE provides proof\n other.give_message(proof_msg, node)\n\n # must have been stored in the database\n other.assert_is_stored(tmessage)", "def consent(s, eType, eVal):\n try:\n import maya.cmds as cmds # Is Maya active? Ask using their GUI\n answer = cmds.confirmDialog(t=eType.__name__, m=CONFIRM_MSG, b=(\"Yes\",\"No\"), db=\"Yes\", cb=\"No\", ds=\"No\")\n return \"Yes\" == answer\n except ImportError:\n return True # No means to ask? Ah well ...", "def remote_verifyKey(self, key, protocol):\r\n if self._authenticated.called:\r\n return Failure(InvalidKey('Only one guess is possible.'))\r\n\r\n if isinstance(protocol, Failure):\r\n self._authenticated.errback(protocol)\r\n else:\r\n if self._key != key:\r\n e = Failure(InvalidKey('Wrong key supplied.'))\r\n self._authenticated.errback(e)\r\n return e\r\n\r\n self._authenticated.callback(protocol)", "def test_confirm_fail_consent_oauth_token(self):\n # First perform an add request that creates the flow request with status 'PENDING'\n res = self._add_flow_request()\n confirm_id = res.json()['confirm_id']\n process_id = res.json()['process_id']\n callback_url = 'http://127.0.0.1/'\n\n self.client.login(username='duck', password='duck')\n res = self.client.get('/v1/flow_requests/confirm/?confirm_id={}&callback_url={}&action=add'.format(\n confirm_id, callback_url))\n self.assertRedirects(res, \"{}?process_id={}&success=false&error={}\".format(callback_url, process_id, ERRORS_MESSAGE['INTERNAL_GATEWAY_ERROR']),\n fetch_redirect_response=False)", "def confirmation(self, question, answer):\n confirm_flag = False\n while confirm_flag not in ['y', 'n']:\n confirm_flag = raw_input(question + ' [y/n]: ')\n if confirm_flag == 'y':\n print answer\n elif confirm_flag == 'n':\n print 'The user cancel the operation'\n exit()\n else:\n print 'The entry is not valid, please enter y or n.'\n return True", "def confirm_tend(self, before, after, tx):\n assert True", "def confirm(self, action):\n title = \"%s : P L E A S E C O N F I R M\" % action\n question_text = \"<html><b>%s - PLEASE CONFIRM.</b><br/>\"\\\n \"<br/>Do you want to %s %s recordings for the following project?\"\\\n \"<br/><br/>PROJECT : %s\"\\\n \"<br/>CLIENT : %s\"\\\n \"<br/>DATE : %s<br/></html>\" % (\n action.upper(),\n action,\n \" & \".join(self.selected_formats),\n self.recordings_table.project_details()[2],\n self.recordings_table.project_details()[3],\n self.recordings_table.project_details()[0]\n )\n\n self.hide()\n if action == 'upload':\n self.confirmation_dialog.setText(title, question_text)\n self.confirmation_dialog.exec_()\n self.show()\n\n if self.confirmation_dialog.cancelled:\n return (False, False)\n\n return (True, self.confirmation_dialog.immediate_upload)\n else:\n self.confirmation_dialog.showQuestion(title, question_text)\n self.show()\n return self.confirmation_dialog.copy_confirmed", "def test_yes_option_disabled(\n self, wait_tx_settled_mock, confirm_mock, do_transfer_mock\n ):\n password_option = self.get_password_args(self.PASSWORD)\n self.invoke(\n \"transfer\",\n self.LEDGER_ID,\n self.get_address(self.LEDGER_ID, self.PASSWORD),\n \"100000\",\n \"100\",\n *password_option,\n )\n confirm_mock.assert_called_once()", "def confirm(msg: str) -> bool:\n res = input(msg + \" (Y/n) > \")\n if res == 'Y' or res == 'y' or res == 'yes' or res == 'Yes' or res == \"\":\n return True\n return False", "def test_api_user_resend_confirmation_post(self):\n pass", "def send_verification(self):\n pass", "def confirmCall(self, activePlayer, action):\n # todo: raise notImplemented. should be overriden\n return False", "async def chat_completechallenge(self, event):\n await self.send_json(\n return_value(\n ACTION_APPROVE,\n event['label'],\n event['username'],\n MSG_ALERT,\n NO_MESSAGE\n )\n )", "def on_key_status(self, key):\n self.core.log.info(\"Key Manager key update\")\n client_proto = self.core.get_client_protocol(key.machine_id)\n if client_proto is not None:\n reply = ServerMsgFactory().create(kind=ServerMsgFactory.KIND_HANDSHAKE_PKEY_STATUS_RESP)\n reply.internal[\"payload\"] = key.status\n client_proto.sendMessage(ObjectGate(reply).pack(True), True)\n if key.status != KeyStore.STATUS_ACCEPTED:\n client_proto.dropConnection()", "def get_keys(self, update, context):\r\n self.SECRET_KEY = update.message.text\r\n update.message.reply_text(text=f'Новый ключ: {self.SECRET_KEY}')\r\n return ConversationHandler.END", "def private_warn(*arg):\n private = private_var.get()\n\n if private:\n response = tkmb.askokcancel(\n \"Are you sure?\",\n \"Do you really want to encrypt this message?\"\n )\n if not response:\n private_var.set(False)", "def test_approve(self):\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.approve(TOOLNAME,TOOLLICENSEDATA)", "def confirm():\n if not session.has_key('phone') or not session.has_key('confirmation_code'):\n flash(\"Please log in.\")\n return redirect(my_url('index'))\n if request.method == 'POST':\n if (request.form.has_key('code')\n and session['confirmation_code'] == request.form['code']):\n session['authed'] = True\n del session['confirmation_code']\n return redirect(my_url('accounts'))\n else:\n flash(\"Your confirmation code was invalid\")\n return redirect(my_url('index'))\n else:\n return render_template('confirm.html')", "def request_cb(self, target_user, request, ctx):\n\n if request == None:\n return\n\n if not validate({'r': str,\n 'uid': lambda s: type(s) == str and valid_uid(s) and s != self.my_uid,\n 'param': str}, request):\n debug('Key management: Broken payload: %s\\n' %(' '.join(payload)))\n return\n\n cmd = request['r']\n uid = request['uid']\n param = request['param']\n\n if uid == self.my_uid:\n return\n\n debug('Key management: got answer %s from %s\\n' %(cmd, uid))\n user = self.community.get_user(uid)\n\n if self.current['user'] and user != self.current['user']:\n warning('keymanagement: Protocol violation from %s: Current uid is %s\\n' %(nick, self.current['uid'].get('uid')))\n return {'r': self.KM_PROTOCOL_VIOLATION, 'uid': self.my_uid}\n\n if not self.check_answer(cmd):\n warning('keymanagement: Protocol violation from %s: request was %s but answer was %s' %(uid, self.current['state'], cmd))\n self.send_request(user, self.KM_PROTOCOL_VIOLATION, '')\n return\n\n self.key_exchange_gui.plugin_to_gui(user, cmd, False)\n\n payload = ''\n if cmd == self.KM_REQUEST_ACK:\n self.temp_key_watcher = self.gen_temp_key()\n self.temp_passphrase = self.gen_passphrase()\n debug('Key management: passphrase is %s\\n' %(self.temp_passphrase))\n return\n if cmd == self.KM_REQUEST_ANSWER_ACK:\n self.gen_temp_key()\n return\n elif cmd == self.KM_TEMP_KEY1:\n # Received temporery key: save it and send our temporary key\n # encrypted with the symmetric cipher\n temp_key = self.sym_dec(param, self.temp_passphrase)\n if temp_key and self.save_key(user, pub=temp_key, temp=True):\n send_cmd = self.KM_TEMP_KEY2\n payload = self.sym_enc(self.load_pub_key(self.myself, temp=True),\n self.temp_passphrase)\n if not payload:\n send_cmd = self.KM_ERROR\n payload = ''\n else:\n send_cmd = self.KM_ERROR\n payload = ''\n elif cmd == self.KM_PERM_KEY1:\n # Received counterpartys permanent key, so let's save it and send ours\n perm_key = self.asym_dec(param, self.key_path(self.myself, temp=True))\n if perm_key and self.save_key(user, pub=perm_key):\n send_cmd = self.KM_PERM_KEY2\n payload = self.asym_enc(self.load_pub_key(self.myself),\n self.key_path(user, temp=True))\n if not payload:\n send_cmd = KM.ERROR\n payload = ''\n else:\n send_cmd = KM_ERROR\n payload = ''\n elif cmd == self.KM_PERM_KEY_ACK:\n send_cmd = self.KM_FINISHED\n elif cmd == self.KM_FINISHED:\n # Successful key exchange\n self.current = {'user': None, 'state': None}\n self.community.announce_user_change(user) # update user state\n return\n elif cmd == self.KM_CANCEL:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_ERROR:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_PROTOCOL_VIOLATION:\n self.current = {'user': None, 'state': None}\n return\n elif cmd == self.KM_REQUEST_NACK:\n self.current = {'user': None, 'state': None}\n return\n\n self.current['state'] = send_cmd\n self.send_request(user, send_cmd, payload)", "def test_acknowledge_post(client):\n g.test_authorized_for = []\n res = client.post(\"/v0/acknowledge\", json=post_json_data)\n assert '{\"msg\":\"Thanks for acknowledging!\",\"status\":\"ok\"}' in res.data.decode(\"utf-8\")", "def confirm_email(self):\n self.active = True\n self.save()", "def confirm_email(request, confirmation_key):\n confirmation_key = confirmation_key.lower()\n profile = EmailConfirmation.objects.confirm_email(confirmation_key)\n \n return render_to_response('small/email_confirmed.html', {'profile': profile, 'prev':request.META.get('HTTP_REFERER','/')}, context_instance=RequestContext(request))", "def submit(request):\n if not request.user.is_authenticated():\n return proceed(request)\n # If dev has already agreed, continue to next step.\n user = UserProfile.objects.get(pk=request.user.id)\n if not user.read_dev_agreement:\n return redirect('submit.app.terms')\n return manifest(request)", "def check_api(submitted_key, users_key):\r\n if users_key != submitted_key:\r\n return False\r\n else:\r\n return True", "def test_private_key_set():\n\n loop = asyncio.get_event_loop()\n with aioresponses() as m:\n m.post('https://api.idex.market/returnNextNonce', payload=nonce_res, status=200)\n m.post('https://api.idex.market/cancel', payload=json_res, status=200)\n\n async def _run_test():\n client = await AsyncClient.create(api_key, address, private_key)\n await client.cancel_order('0xcfe4018c59e50e0e1964c979e6213ce5eb8c751cbc98a44251eb48a0985adc52')\n\n loop.run_until_complete(_run_test())", "def interactive_confirm_and_post(self, key, old_commit, new_prepared):\n written = False\n if old_commit.meta[\"key\"] != key:\n raise ValueError(\"Expected key '%s', got '%s'\" % (key, old_commit.meta[\"key\"]))\n\n # extract value for comparison and diffing\n new_prepared_split = self.__splitprepared(new_prepared)\n\n if old_commit.value != new_prepared_split[1]:\n # show diff\n self.__printdiff(\n old_commit.value if old_commit.value is not None else \"\",\n new_prepared_split[1],\n \"{:s} (current)\".format(key),\n \"{:s} (new)\".format(key)\n )\n\n # ask for confirmation\n usersaid = raw_input(\"Commit to %s (yes/no)? \" % self.client.commit_uri(key))\n\n while usersaid != \"yes\" and usersaid != \"no\":\n usersaid = raw_input(\"Please type 'yes' or 'no': \")\n\n if usersaid == \"yes\":\n print self.post_prepared_commit(key, new_prepared)\n written = True\n else:\n print \"OK, not commiting.\"\n\n else:\n print \"No change, not commiting.\"\n\n return written", "def get_key_input():\n return get_input(message='Please enter your master key:',\n secure=True, check_timer=False)", "def test_confirm_add_flow_request_confirmed_consent(self):\n self.client.login(username='duck', password='duck')\n # Gets the confirmation code installed with the test data\n c = ConsentConfirmation.objects.get(confirmation_id=CORRECT_CONFIRM_ID)\n res = self.client.get(\n '/v1/flow_requests/consents_confirmed/?success=true&consent_confirm_id={}'.format(CORRECT_CONFIRM_ID))\n\n redirect_url = '{}?process_id={}&success=true'.format(c.destination_endpoint_callback_url,\n c.flow_request.process_id)\n self.assertRedirects(res, redirect_url, fetch_redirect_response=False)\n flow_request = c.flow_request\n self.assertEqual(flow_request.status, FlowRequest.ACTIVE)\n channel = ConsentConfirmation.objects.get(confirmation_id=CORRECT_CONFIRM_ID).channel\n # It remain CR until the consent notification consumer gets the change\n self.assertEqual(channel.status, Channel.CONSENT_REQUESTED)", "def test_validate_yubikey(self):\n from_key = self.yk_rnd.from_key(self.yk_public_id, self.yk_key)\n self.assertTrue(pyhsm.yubikey.validate_yubikey_with_aead( \\\n self.hsm, from_key, self.aead.data, self.kh_validate))", "def test_confirm_booking(client):\n response = client.post(\n BOOKING_API_URL + '/confirm',\n data=dict(\n pickup_datetime=PICKUP_DATE,\n return_datetime=RETURN_DATE,\n car_id=1,\n ),\n content_type='multipart/form-data'\n )\n\n assert response.status_code == 200\n assert b'Your booking has been confirmed, thank you!' in response.data", "async def confirm(ctx, *args: discord.Member):\n await _confirm(args)" ]
[ "0.70200527", "0.6937028", "0.6796732", "0.65929675", "0.64999527", "0.64217055", "0.6304895", "0.626149", "0.6239152", "0.6135", "0.61015856", "0.6096368", "0.59683174", "0.5962593", "0.5954259", "0.59480214", "0.59259063", "0.58750486", "0.5863333", "0.58017457", "0.5787794", "0.57716835", "0.57701564", "0.57549465", "0.57465106", "0.5736814", "0.57339287", "0.572345", "0.5704009", "0.56680834", "0.56557673", "0.56459296", "0.5636649", "0.5625348", "0.562292", "0.56195414", "0.5609597", "0.5602104", "0.5601616", "0.55500597", "0.5549973", "0.55498564", "0.55464286", "0.5539456", "0.55344945", "0.5529853", "0.55271846", "0.5521784", "0.5512847", "0.55079335", "0.5499565", "0.549304", "0.54809964", "0.5471892", "0.5471355", "0.5468878", "0.5463189", "0.5458426", "0.54540193", "0.5453149", "0.5452547", "0.54385054", "0.5434279", "0.54230815", "0.54189855", "0.5410945", "0.5407468", "0.5395231", "0.5392268", "0.5383167", "0.5374625", "0.5366527", "0.5357379", "0.5356993", "0.5354864", "0.53439593", "0.53360206", "0.5335718", "0.53273535", "0.5326665", "0.5325145", "0.53229475", "0.53165346", "0.5309582", "0.53083044", "0.5301938", "0.52967983", "0.5290339", "0.5289858", "0.52869296", "0.5284076", "0.52753645", "0.5270015", "0.52686715", "0.5264299", "0.52640486", "0.5261487", "0.52551657", "0.5253544", "0.52515274" ]
0.63993216
6
Euclidean distance between two graph poses
def distance(pose1, pose2): return ( (pose1["pose"][3] - pose2["pose"][3]) ** 2 + (pose1["pose"][7] - pose2["pose"][7]) ** 2 + (pose1["pose"][11] - pose2["pose"][11]) ** 2 ) ** 0.5
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def graph_dist(i1: int, g1: nx.Graph, i2: int, g2: nx.Graph) -> t.Tuple[int, int, float]:\n space1, space2 = map(dict, map(mut_space, [g1, g2]))\n d = 0\n for k in set(list(space1) + list(space2)):\n if k in space1 and k in space2:\n d += len(set(space1[k]).symmetric_difference(set(space2[k])))\n continue\n if k in space1:\n d += len(set(space1[k]))\n if k in space2:\n d += len(set(space2[k]))\n return i1, i2, d", "def dist(gene1, gene2):\n return abs(len(gene1.goal) - len(gene2.goal))", "def compute_distance(node1, node2):\n return np.linalg.norm(node1 - node2)", "def euclidian_distance(stroke1, stroke2):\n\n x1 = np.array(stroke1.x)\n x2 = np.array(stroke2.x)\n y1 = np.array(stroke1.y)\n y2 = np.array(stroke2.y)\n\n d = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2)\n m = d - np.min(d)\n if np.mean(m) < 0:\n return 0, 0\n else:\n return np.mean(d), np.mean(m)", "def dist(a, b):\n return np.sum((a-b)**2.0)**.5", "def euclid_dist(p1, p2):\n \n return float(np.linalg.norm(np.array(p1)-np.array(p2)))", "def _dist(a, b):\n return torch.pow(a - b, 2).sum(-1)", "def dist(p0, p1):\n return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)", "def distanceTwoPoints(self,A,B):\n #productive\n # used by addNeedleToScene\n profprint()\n length = ( (A[0]-B[0])**2 + (A[1]-B[1])**2 + (A[2]-B[2])**2 ) ** 0.5\n return length", "def euclidean(p1, p2):\n return p1.distance(p2)", "def euclidean(x,y):\n\tassert (isinstance(x, BayesNet) and isinstance(y, BayesNet)), 'Must pass in BayesNet objects.'\n\tassert (x==y), 'Passed-in BayesNet objects are not structurally equal.'\n\n\tdistance = np.sum( np.sqrt( ( x.flat_cpt() - y.flat_cpt() )**2 ) )\n\treturn distance", "def CompareGraphsSpectrum(graph1, graph2):\n laplacian1 = nx.spectrum.laplacian_spectrum(graph1)\n laplacian2 = nx.spectrum.laplacian_spectrum(graph2)\n k1 = select_k(laplacian1)\n k2 = select_k(laplacian2)\n # take the fewer dimensions to describe the result\n k = min(k1, k2)\n # the similarity is the sum of the eukleidian distance of the most\n # important nodes\n similarity = sum((laplacian1[:k] - laplacian2[:k])**2)\n return similarity", "def dist(self, node_0, node_1):\n coord_0, coord_1 = self.coords[node_0], self.coords[node_1]\n return math.sqrt((coord_0[0] - coord_1[0]) ** 2 + (coord_0[1] - coord_1[1]) ** 2)", "def get_distance_over_path(G, path):\r\n node1 = str(path[0])\r\n node2 = str(path[-1])\r\n\r\n pos1 = G.nodes[node1]['pos']\r\n pos2 = G.nodes[node2]['pos']\r\n\r\n return np.sqrt((pos1[0] - pos2[0])**2 + (pos1[1] - pos2[1])**2)", "def dist(self,ipa_seg1,ipa_seg2):\n v1 = self.embed(ipa_seg1)\n v2 = self.embed(ipa_seg2)\n return np.sqrt(((v1-v2)**2).sum())", "def dist(v1, v2):\n return ( (v1[0] - v2[0])**2 + (v1[1] - v2[1])**2 )**0.5", "def dist(p1,p2):\n\n return sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)", "def dist(v1: vect2d, v2: vect2d) -> float:\n d = ((v2.x - v1.x)**2 + (v2.y - v1.y)**2) ** 0.5\n return d", "def eucl_dist(a, b):\n return np.sqrt( (a[0]-b[0])** 2 + (a[1]-b[1])** 2)", "def distance(p1,p2):\n return ((p1.x - p2.x)**2 + (p1.y - p2.y)**2)**0.5", "def distance(p0, p1):\n return( numpy.sqrt( (p0[0]-p1[0])**2 + \n (p0[1]-p1[1])**2 + \n (p0[2]-p1[2])**2 ) )", "def _get_distance(a, b):\n return np.sqrt(np.sum((a - b) ** 2))", "def dist(a,b): # compute distance between two points a & b\n return mag(sub(a,b))", "def dist(self, one, two):\n return np.sqrt((one[0] - two[0]) ** 2 + (one[1] - two[1]) ** 2)", "def distance(a, b):\n return math.sqrt((a.x - b.x) ** 2 + (a.y - b.y) ** 2)", "def distanceTwoPoints(self, A, B):\r\n # productive\r\n # used by addNeedleToScene\r\n if frequent: profprint()\r\n length = ((A[0] - B[0]) ** 2 + (A[1] - B[1]) ** 2 + (A[2] - B[2]) ** 2) ** 0.5\r\n return length", "def euclidDist(pair1,pair2):\n return ((pair1[0]-pair2[0])**2+(pair1[1]-pair2[1])**2)**0.5", "def Dist(p1,p2):\n x1, y1 = p1\n x2, y2 = p2\n return (((x1-x2)*(x1-x2)) + ((y1-y2)*(y1-y2)))**0.5", "def distance (p1,p2):\n return np.sqrt(np.sum(np.power(p2-p1,2)))", "def distance(P1, P2):\n return ((P1[0] - P2[0])**2 + (P1[1] - P2[1])**2) ** 0.5", "def distance(p1,p2):\n return ((p2.x - p1.x)*2 + (p2.y - p1.y))**0.5", "def get_euclidean_distance(p1, p2):\n return np.sqrt(np.power((p2[0] - p1[0]), 2) + np.power((p2[1] - p1[1]), 2))", "def euclidean_distance(x1, x2):\n return (x2[0] - x1[0])**2 + (x2[1] - x1[1])**2", "def dist(a, b):\n return math.sqrt(pow(a[0] - b[0], 2) + pow(a[1] - b[1], 2))", "def getDistance(self,p1,p2):\n return sum([(p1[i]-p2[i])**2 for i in range(2)])", "def calculate_distance(asteroid_1: Asteroid, asteroid_2: Asteroid) -> float:\n dy = asteroid_2.y - asteroid_1.y\n dx = asteroid_2.x - asteroid_1.x\n return math.sqrt(dy * dy + dx * dx)", "def distance(p1, p2):\n\n \"\"\"\n (p1[0] - p2[0]) ** 2 + \n (p1[1] - p2[1]) ** 2 + \n \"\"\"\n sum_all = 0\n for i, v in enumerate(p1):\n diff_squared = (v - p2[i]) ** 2\n sum_all += diff_squared\n return(math.sqrt(sum_all))", "def get_distance(p1, p2):\n return ((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2) ** 0.5", "def euclideanDistance(a, b):\n vec = [pow(a[i] - b[i], 2) for i in range(len(a)) if None not in [a[i],b[i]]]\n return (sum(vec) / len(vec)) if len(vec) > 0 else NaN", "def get_distance(self, resp1, resp2):\n feed_dict = {self.anchor: resp1}\n embed1 = self.sess.run(self.embed_anchor, feed_dict=feed_dict)\n\n feed_dict = {self.anchor: resp2}\n embed2 = self.sess.run(self.embed_anchor, feed_dict=feed_dict)\n\n return np.sqrt(np.sum((embed1-embed2)**2, 1))", "def distance(a, b):\n return (np.sum((a - b)**2))**0.5", "def distance(self, c1, c2):\r\n x = (c2.x - c1.x) ** 2\r\n y = (c2.y - c1.y) ** 2\r\n d = int(round(math.sqrt(x + y)))\r\n return d", "def euclidean_distance(x1: np.ndarray, x2: np.ndarray) -> float:\n return np.sqrt(np.square(x1 - x2).sum())", "def _compute_dist_cartesian(graph):\r\n for edge in graph.edges():\r\n node1, node2 = edge\r\n dx = np.abs(graph.nodes[node1]['xcoord'] - graph.nodes[node2]['xcoord'])\r\n dy = np.abs(graph.nodes[node1]['ycoord'] - graph.nodes[node2]['ycoord'])\r\n dist = np.round(np.sqrt(np.square(dx) + np.square(dy)), 5)\r\n graph.edges[node1, node2]['length'] = dist", "def getDistance(pos1, pos2):\r\n return ((pos1[0] - pos2[0]) ** 2 + (pos1[1] - pos2[1]) ** 2) ** 0.5", "def euclidean_distance_2(P1, P2):\r\n return (P1[0]-P2[0])**2+(P1[1]-P2[1])**2", "def dist(pt1, pt2):\n return np.sqrt((pt2[0]-pt1[0])**2 + (pt2[1]-pt1[1])**2)", "def dist(a: Point, b: Point):\n return (a.x - b.x) ** 2 + (a.y - b.y) ** 2", "def eucl_dist(x_0, y_0, x_1, y_1):\n return sqrt((x_1 - x_0)**2 + (y_1 - y_0)**2)", "def euclidean_distance(x1, x2):\n return np.sqrt(np.sum(np.square(np.subtract(x1, x2))))", "def euclidean_distance(a, b):\n return np.linalg.norm(a - b)", "def euclidean_distance(s1,s2): \n tmpsum = 0\n \n for index,value in enumerate(s1):\n tmpsum += (s1[index]-s2[index])**2\n \n return math.sqrt(tmpsum)", "def distance(p1, p2):\n return math.sqrt((math.pow((p2[0] - p1[0]), 2) + math.pow((p2[1] - p1[1]), 2)))", "def dist(pnt1, pnt2):\n return ((pnt2[0] - pnt1[0])**2 + (pnt2[1] - pnt1[1])**2 + (pnt2[2] - pnt1[2])**2)**0.5", "def distance(p1, p2):\n\treturn sqrt((p1[1]-p2[1])**2 + (p1[0]-p2[0])**2)", "def hellinger(x,y):\n\tassert (isinstance(x, BayesNet) and isinstance(y, BayesNet)), 'Must pass in BayesNet objects.'\n\tassert (x==y), 'Passed-in BayesNet objects are not structurally equal.'\n\n\tdistance = ( 1 / np.sqrt( 2 ) ) * np.sqrt( np.sum( ( np.sqrt( x.flat_cpt() ) - np.sqrt( y.flat_cpt() ) )**2) )\n\treturn distance", "def euclidean_dist(ss1, ss2):\n lat1, lon1 = ss1.centroid\n lat2, lon2 = ss2.centroid\n\n return sqrt((lat1 - lat2)**2 + (lon1 - lon2)**2)", "def _calc_distance(hmm1, hmm2, seqs2):\n p12 = hmm1.calc_loglikelihood(seqs2)\n p22 = hmm2.calc_loglikelihood(seqs2)\n # calc total number of elements in all sequences\n # TODO: consider the case when number of elements vary from seq to seq\n n_elements = len(seqs2) * len(seqs2[0])\n return (p22 - p12) / n_elements", "def dist_squared (a, b):\n return sum(map(lambda (x,y): (x-y)**2, zip(a, b)))", "def dist(first, other):\n if isinstance(first,FreeCAD.Vector) and isinstance(other,FreeCAD.Vector):\n return length(sub(first,other))", "def dist_2D(v1, v2):\n return ((v1[0]-v2[0])**2 + (v1[1]-v2[1])**2 )**(0.5)", "def dist2D(a, b):\n return ((a[0]-b[0])**2 + (a[1]-b[1])**2)**0.5", "def dist(pos1, pos2):\n a, b = pos1\n c, d = pos2\n \n return sqrt((a-c)**2 + (b-d)**2)", "def distance(p1, p2):\n return np.linalg.norm(p2-p1)", "def distance(a: Point, b: Point) -> float:\n return math.sqrt(math.pow(b.x - a.x, 2) + math.pow(b.y - a.y, 2))", "def compute_dist(p_1, p_2):\n return sqrt((p_2[0] - p_1[0])**2 + (p_2[1] - p_1[1])**2 +\n (p_2[2] - p_1[2])**2)", "def node_distance(self, node1, node2):\n if node1 == node2:\n return 0.0\n for i, (n1, n2) in enumerate(zip(self.paths[node1], self.paths[node2])):\n if n1 != n2:\n break\n else:\n i = min(len(self.paths[node1]), len(self.paths[node2]))\n return sum(self.path_dists[node1][i:]) + sum(self.path_dists[node2][i:])", "def euclidean_distance(x1, x2):\n return np.sqrt(np.sum(np.power(x1 - x2, 2)))", "def distance(self, other):\n xd, yd = self.x-other.x, self.y-other.y\n return math.sqrt(xd**2 + yd**2)", "def dist(a, b):\n x0, y0 = a # Destructuring assignment\n x1, y1 = b\n\n return math.sqrt((x1 - x0)**2 + (y1 - y0)**2)", "def euclidian_distance(x: np.arrays, y: np.arrays):\r\n diff = x - np.mean(y, axis=0)\r\n return np.sqrt(np.dot(diff.T, diff))", "def distance(p1, p2):\n return np.linalg.norm(np.array(p1) - np.array(p2))", "def distance(self, p1, p2):\n return math.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)", "def distance(p_1, p_2):\n return ((p_2[0] - p_1[0]) ** 2 + (p_2[1] - p_1[1]) ** 2 \\\n + (p_2[2] - p_1[2]) ** 2) ** 0.5", "def _get_dist(self, p1, p2): \r\n\r\n distance = np.sqrt(\r\n (p1[0] - p2[0]) ** 2 +\r\n (p1[1] - p2[1]) ** 2 +\r\n (p1[2] - p2[2]) ** 2)\r\n\r\n return distance", "def distance(p1, p2):\n return math.sqrt((p1[0]-p2[0])**2 + (p1[1]-p2[1])**2 + (p1[2]-p2[2])**2)", "def get_adjacency_distance(self, other):\n if self.size != other.size:\n raise ValueError(\"The permutations must be of the same size.\")\n self_adj_mat = self.get_adjacency_matrix()\n other_adj_mat = other.get_adjacency_matrix()\n n_adj = 0\n for i in xrange(self.size):\n for j in xrange(self.size):\n if i == j:\n continue\n if self_adj_mat[i, j] * other_adj_mat[i, j] == 1:\n n_adj += 1\n d = self.size - n_adj - 1\n return d", "def distance(self,other):\n return math.sqrt((self.x - other.x)**2 +(self.y - other.y)**2)", "def distance(a,b):\n return np.sqrt( (x(a)-x(b))**2 + (y(a)-y(b))**2 )", "def _pdist(a, b):\n a, b = np.asarray(a), np.asarray(b)\n if len(a) == 0 or len(b) == 0:\n return np.zeros((len(a), len(b)))\n a2, b2 = np.square(a).sum(axis=1), np.square(b).sum(axis=1)\n r2 = -2. * np.dot(a, b.T) + a2[:, None] + b2[None, :]\n r2 = np.clip(r2, 0., float(np.inf))\n return r2", "def _pdist(a, b):\n a, b = np.asarray(a), np.asarray(b)\n if len(a) == 0 or len(b) == 0:\n return np.zeros((len(a), len(b)))\n a2, b2 = np.square(a).sum(axis=1), np.square(b).sum(axis=1)\n r2 = -2. * np.dot(a, b.T) + a2[:, None] + b2[None, :]\n r2 = np.clip(r2, 0., float(np.inf))\n return r2", "def dist(self, n1, n2):\n if n2.identifier in self.edges[n1.identifier]:\n return self.edges[n1.identifier][n2.identifier]\n else:\n return float(\"inf\")", "def distance(pt1, pt2):\n return (pt1[0] - pt2[0]) ** 2 + (pt1[1] - pt2[1]) ** 2", "def euclidean_distance(x, y):\n return sqrt(sum(pow(a - b, 2) for a, b in zip(x, y)))", "def euclidean_distance(p1, p2):\n distance = 0\n for i in range(len(p1)-1):\n distance += (p1[i]-p2[i])**(2)\n return sqrt(distance)", "def distance(p1, p2):\n\n return sqrt(((p2[0] - p1[0])**2) + ((p2[1] - p1[1])**2))", "def calculate_distance(self, other):\n return math.sqrt((self.center[0] - other.center[0]) ** 2 + (self.center[1] - other.center[1]) ** 2)", "def euclidean_distance(p1, p2):\n dist = np.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2)\n return dist", "def dist(self, a, b, l):\n # works for non-arrays\n return sum( ((i-j)/k)**2 for i,j,k in zip(a, b, l) )", "def distance(p1, p2):\n return sqrt((p1[0]-p2[0])**2 + (p1[1]-p2[1])**2)", "def distance(a,b): \r\n return math.sqrt((a[0] - b[0])**2 + (a[1] - b[1])**2)", "def distance(brd1,brd2):\n\n step=brd1[1,0]-brd1[0,0]\n return np.sum(np.abs(brd1[:,1]-brd2[:,1]))*step", "def distance(self, pt1, pt2):\r\n # productive #frequent\r\n if frequent: profprint()\r\n d = ((float(pt1[0]) - float(pt2[0])) ** 2 + (float(pt1[1]) - float(pt2[1])) ** 2 + (float(pt1[2]) - float(pt2[2])) ** 2) ** 0.5\r\n return d", "def _dist(A, B):\n return np.sqrt(np.einsum(\"ijk->ij\", (A[:, None, :] - B) ** 2))", "def distance_between(self, first_node_object, second_node_object):\n\n (first_column, first_row) = first_node_object\n (second_column, second_row) = second_node_object\n\n return numpy.sqrt((first_row - second_row) ** 2 +\n (first_column - second_column) ** 2)", "def distance(self, other):\n x_diff_sq = (self.x-other.x)**2\n y_diff_sq = (self.y-other.y)**2\n return (x_diff_sq + y_diff_sq)**0.5", "def get_distance(pt1,pt2):\r\n x1 = pt1[1]\r\n y1 = pt1[0]\r\n x2 = pt2[1]\r\n y2 = pt2[0]\r\n d = np.sqrt((x2-x1)**2 + (y2-y1)**2)\r\n return d", "def e_score(self, other):\n sum_distances = 0.0\n num_dists = 0.0\n for c1 in other.l_child:\n for c2 in other.r_child:\n sum_distances += _fast_norm_diff(c1, c2)\n num_dists += 1.0\n\n return -sum_distances / num_dists", "def find_difference(seg1, seg2):\n letter_score = []\n for c1,c2 in zip(seg1, seg2):\n letter_score.append(float(len(breadth_first(key_graph, c1, c2)) - 1))\n return sum(letter_score)/len(letter_score)", "def distance(self, other):\n ...", "def distance(self, first_tape, second_tape):\n pairs = zip(first_tape, second_tape)\n return math.sqrt(abs(sum(map((lambda n: self.subsq(*n)), pairs))))" ]
[ "0.7292896", "0.7110288", "0.6867631", "0.68615735", "0.6761212", "0.6698261", "0.66788447", "0.6668662", "0.6659703", "0.66497093", "0.6615427", "0.66116714", "0.66099334", "0.6598748", "0.65919584", "0.65918255", "0.65908754", "0.6572982", "0.6572364", "0.6497199", "0.6496156", "0.6495265", "0.6494313", "0.64905334", "0.6485524", "0.6484251", "0.6482878", "0.647817", "0.64742154", "0.64686596", "0.6467548", "0.6463098", "0.64593184", "0.6456715", "0.6452807", "0.64497846", "0.64475554", "0.6446301", "0.6439913", "0.6438248", "0.6430404", "0.6424478", "0.64206845", "0.64185655", "0.6413095", "0.64089525", "0.64077127", "0.6400137", "0.6397607", "0.63968587", "0.637047", "0.63673496", "0.6364599", "0.6361232", "0.6355051", "0.63520277", "0.6349689", "0.6348868", "0.6348643", "0.63482076", "0.63480914", "0.6345093", "0.63429856", "0.6340095", "0.6339812", "0.6339043", "0.63348866", "0.6333138", "0.633048", "0.63258046", "0.6321539", "0.63165087", "0.6305989", "0.62877655", "0.6286062", "0.6284658", "0.62712234", "0.62699616", "0.6263701", "0.62622195", "0.62622195", "0.62595034", "0.625551", "0.6253768", "0.62531084", "0.62529874", "0.6250255", "0.624048", "0.6240249", "0.62393904", "0.62353164", "0.62350726", "0.62341505", "0.62037206", "0.62030834", "0.6194426", "0.6190303", "0.61902183", "0.61892176", "0.61877465", "0.61848354" ]
0.0
-1
Build a graph from a connectivity json file
def open_graph(scan_id): infile = "%s%s_connectivity.json" % (connectivity_dir, scan_id) G = nx.Graph() with open(infile) as f: data = json.load(f) for i, item in enumerate(data): if item["included"]: for j, conn in enumerate(item["unobstructed"]): if conn and data[j]["included"]: assert data[j]["unobstructed"][i], "Graph should be undirected" G.add_edge( item["image_id"], data[j]["image_id"], weight=distance(item, data[j]), ) return G
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def open_graph(connectDir, scan_id):\n infile = \"%s%s_connectivity.json\" % (connectDir, scan_id)\n G = nx.Graph()\n with open(infile) as f:\n data = json.load(f)\n for i, item in enumerate(data):\n if item[\"included\"]:\n for j, conn in enumerate(item[\"unobstructed\"]):\n if conn and data[j][\"included\"]:\n assert data[j][\"unobstructed\"][i], \"Graph should be undirected\"\n G.add_edge(\n item[\"image_id\"],\n data[j][\"image_id\"],\n weight=distance(item, data[j]),\n )\n return G", "def json_to_nx(filename):\n\n with open(filename, \"r\") as file_name:\n imported_file = json.load(file_name)\n\n nx_graph = json_graph.node_link_graph(imported_file)\n\n return(nx_graph)", "def load_graph(json_file, directed=False):\n\n data = ''\n\n with open(json_file, 'r') as json_file:\n data = json.loads(json_file.read())\n\n return DiGraph(data) if directed else Graph(data)", "def build_graph():\n file = open(\"../data/data.json\", \"r\")\n data = json.load(file)\n node_dict = {}\n for id in data:\n node_dict[id] = Node(data[id][\"name\"], data[id][\"product\"], data[id][\"production_volume\"])\n for id in data:\n current_node = node_dict[id]\n for costumer_id in data[id][\"costumers\"]:\n current_node.costumers.append(node_dict[str(costumer_id)])\n current_node.out_edge_capacity_drop[node_dict[str(costumer_id)].name] = 0\n for supplier_id in data[id][\"suppliers\"]:\n current_node.suppliers.append(node_dict[str(supplier_id)])\n current_node.in_edge_capacity_drop[node_dict[str(supplier_id)].name] = 0\n return node_dict", "def reconstruct_network_from_dgraph_json(data):\n G = nx.MultiDiGraph(crs=ox.settings.default_crs)\n for node in data:\n if \"location\" in node:\n attributes = node.copy()\n attributes[\"x\"] = attributes[\"location\"][\"coordinates\"][0]\n attributes[\"y\"] = attributes[\"location\"][\"coordinates\"][1]\n attributes.pop(\"location\", 0)\n attributes.pop(\"connects_to\", 0)\n G.add_node(node[\"uid\"], **attributes)\n for node in data:\n if \"connects_to\" in node:\n node_uid = node[\"uid\"]\n if isinstance(node[\"connects_to\"], list):\n for neighbor in node[\"connects_to\"]:\n neighbor_uid = neighbor[\"uid\"]\n if neighbor_uid in G.nodes:\n G.add_edge(node_uid, neighbor_uid)\n else:\n neighbor_uid = node[\"connects_to\"][\"uid\"]\n if neighbor_uid in G.nodes:\n G.add_edge(node_uid, neighbor_uid)\n return G", "def read(cls, inputfilename):\n\n # import json\n # with open(inputfilename, 'w') as infile:\n # data = json.load(infile)\n # g = nx.readwrite.json_graph.node_link_graph(data)\n # return cls(network = g)\n return cls(network=nx.read_gpickle(inputfilename))", "def json_to_neo4j(filename):\n authenticate(ENV[\"DB_URL\"], ENV[\"DB_USERNAME\"],ENV[\"DB_PASSWORD\"]) # Accessing the NEO4J server\n neo4j_graph = Graph()\n string_to_instance_mapping = dict()\n\n with open(filename, \"r\") as f:\n json_data = json.load(f)\n for node in json_data[\"nodes\"]:\n node_instance = Node(node[\"type\"], id=node[\"id\"])\n string_to_instance_mapping[node[\"id\"]] = node_instance\n for link in json_data[\"links\"]:\n source_node_instance = string_to_instance_mapping[link[\"source\"]]\n target_node_instance = string_to_instance_mapping[link[\"target\"]]\n edge = Relationship(source_node_instance, \"MAPS TO\", target_node_instance)\n neo4j_graph.create(edge)", "def read_graph(filename):\n\n print(\"\\n\\n========== Loading graph: \" + filename + '==================')\n edges = {}\n\n inFile = open(filename)\n for line in inFile:\n roadInfo = line.split()\n\n # Skip blank lines, read in contents from non-empty lines.\n if (len(roadInfo) > 0):\n srcCity = roadInfo[0]\n destCity = roadInfo[1]\n\n if srcCity in edges:\n edges[srcCity] = edges[srcCity] + [destCity]\n else:\n edges[srcCity] = [destCity]\n\n if destCity in edges:\n edges[destCity] = edges[destCity] + [srcCity]\n else:\n edges[destCity] = [srcCity]\n\n print(\" done.\\n\")\n return edges", "def build_graph(file_name):\n graph = MyGraph()\n with open(file_name, 'r') as fin:\n line = fin.readline().replace('\\n', '')\n while line != \"\":\n vals = line.split(':')\n graph.add_node(vals[0], pos=(int(vals[1]),int(vals[2])))\n line = fin.readline().replace('\\n', '')\n dest = fin.readline().replace('\\n','').split('\\t')\n line = fin.readline().replace('\\n', '')\n edges = []\n while line != '':\n node_info = line.split('\\t')\n src = node_info[0]\n for node in range(1,len(node_info)):\n if node_info[node] != '':\n if (dest[node],src) not in edges:\n edges.append((src,dest[node], node_info[node]))\n line = fin.readline().replace('\\n','')\n for edge in edges:\n graph.add_edge(edge[0], edge[1], weight=int(edge[2]))\n\n return graph", "def ReadGraph(inputFileName):\n inputFile = open(inputFileName)\n jsonGraphArray = json.load(inputFile)\n graph = Graph.Graph()\n graph.load_from_json(jsonGraphArray)\n inputFile.close()\n return graph", "def read_graph(path):\n edge_list = pd.read_csv(path).values.tolist()\n graph = nx.from_edgelist(edge_list)\n return graph", "def load_from_json(self, file_name: str) -> bool:\n\n try:\n with open(file_name, \"r\") as f:\n dict_graph = json.load(f)\n dw_graph1 = DiGraph()\n for nodes in dict_graph[\"Nodes\"]:\n try:\n position = (nodes[\"pos\"]).split(\",\") # Give a string of the position\n pos_tuple = tuple(float(i) for i in position)\n id = nodes[\"id\"] # Give a the node id\n dw_graph1.add_node(id, pos_tuple)\n except KeyError:\n id = nodes[\"id\"] # Give a the node id\n dw_graph1.add_node(id, None)\n except AttributeError:\n id = nodes[\"id\"] # Give a the node id\n dw_graph1.add_node(id, None)\n\n for edges in dict_graph[\"Edges\"]:\n src = edges[\"src\"]\n weight = edges[\"w\"]\n dest = edges[\"dest\"]\n dw_graph1.add_edge(src, dest, weight)\n\n self.dw_graph = dw_graph1\n return True\n\n except IOError as e:\n print(e)\n return False", "def read_toml(filename: str) -> Graph:\n\n file_load = toml.load(filename)\n contents = file_load[\"routers\"]\n address = dict()\n for item in contents:\n address[item['address']] = item['name']\n neighbors = contents[0]['neighbors']\n address_copy = address.copy()\n\n for item in neighbors:\n new_dict = item['address']\n name = address_copy[new_dict]\n item['name'] = name\n for item in contents:\n n = item['neighbors']\n \n\n data = Graph() #Trying to add dictionary values to data so it can be called in find path function.\n data.set_vertex(item['name'])\n for i in n:\n i['name'] = address[i['address']]\n data.add_edge(item['name'], i['name'], i['cost'])\n return data", "def load_from_json(self, file_name: str) -> bool:\n flag = True\n try:\n with open(file_name, 'r') as jsonFile:\n load = json.load(jsonFile)\n graphJson = DiGraph()\n for node in load[\"Nodes\"]:\n if \"pos\" in node:\n posJ = tuple(map(float, str(node[\"pos\"]).split(\",\")))\n graphJson.add_node(node_id=node[\"id\"], pos=posJ)\n else:\n graphJson.add_node(node_id=node[\"id\"])\n for edge in load[\"Edges\"]:\n graphJson.add_edge(id1=edge[\"src\"], id2=edge[\"dest\"], weight=edge[\"w\"])\n self._graph = graphJson\n # print(\"load successes\")\n except Exception as e:\n print(e)\n print(\"load failed\")\n flag = False\n finally:\n return flag", "def load_digraph_json(f: Union[IO, os.PathLike, str]) -> nx.DiGraph:\n # Load the object\n if isinstance(f, (os.PathLike, str)):\n with open(f, 'r', encoding='utf-8') as file:\n json_obj = json.load(file)\n else:\n json_obj = json.load(f)\n\n # Obtain the NetworkX graph\n graph = node_link_graph(json_obj, directed=True, multigraph=False)\n return graph", "def build_graph(self):\n self.import_tree(ZOO_PATH, self.import_zoo, self.verify_zoos)\n self.import_tree(WILD_PATH, self.import_wild, self.verify_wilds)\n self.import_tree(PANDA_PATH, self.import_redpanda, self.verify_pandas)\n self.import_tree(MEDIA_PATH, self.import_media, self.verify_media)", "def load_network(adj_network):\n fin = open(adj_network, \"r\")\n graph = collections.defaultdict(list)\n for line in tqdm(fin):\n line = line.replace(\"\\n\", \"\")\n args = line.split(\"\\t\")\n node = args[0].replace(\"(\", \"\")\n node = node.replace(\")\", \"\")\n node = int(node)\n assert node not in graph\n graph[node] = list(map(float, args[1:]))\n return graph", "def read_graph(filename, directed=True):\n if not directed:\n G = nx.Graph()\n else:\n G = nx.DiGraph()\n with open(filename) as f:\n for line in f:\n d = line.split()\n G.add_edge(int(d[0]), int(d[1]))\n print('Read Graph')\n return G", "def from_config(dictionary):\n nodes = {}\n\n for node_name in dictionary:\n nodes[node_name] = Node(node_name)\n for node_name in dictionary:\n for second_node_data in dictionary[node_name]:\n connect_one_way(nodes[node_name], nodes[second_node_data[0]], second_node_data[1])\n return Graph(nodes.values())", "def create_social_graph(file):\n social_graph = NonDirectionalGraph(\"SocialGraph\")\n with open(file, \"rt\") as f:\n data = f.readlines()\n n_friendship = 0 # Represents the number of friendships in the graph in each iteration\n highest_n_friendship = 0 # Captures the highest record of n_friendship in the graph\n highest_n_neighbors_per_node_dict = {} # Captures the highest record of friendship per node\n for line in data:\n split_line = line.split()\n if \"became\" in split_line: # \"became\" is in lines where persons become connected\n for name in [split_line[0], split_line[2]]:\n # The following if statement makes sure to instantiate the node and adds it to the graph\n if name not in social_graph:\n node = Node(name)\n social_graph.add_node(node)\n highest_n_neighbors_per_node_dict[name] = 0 ##\n social_graph.add_edge(split_line[0],split_line[2]) # Adds a connection between the nodes\n n_friendship += 1 # Updates the number of friendships\n # The following for loop updates the highest number of friends (neighbors) if it changes\n for name in [split_line[0], split_line[2]]:\n if len(social_graph.nodes[name].neighbors) > highest_n_neighbors_per_node_dict[name]:\n highest_n_neighbors_per_node_dict[name] = len(social_graph.nodes[name].neighbors)\n elif \"cancelled\" in split_line: # \"became\" is in lines where persons become disconnected\n social_graph.remove_edge(split_line[0], split_line[2])\n n_friendship -= 1 # Updates the number of friendships\n # In case any of the words \"cancelled\" or \"became\" is in the line\n else:\n print(\"Unrecognized line\")\n # The following for loop updates the highest number of friendship if it changes\n if n_friendship > highest_n_friendship:\n highest_n_friendship = n_friendship\n return social_graph, highest_n_friendship, highest_n_neighbors_per_node_dict", "def build_graph(filepath):\n graph = defaultdict(list)\n with open(filepath, 'r') as file:\n for edge in file:\n head, tail = edge.split()\n graph[head].append(tail)\n return graph", "def make_graph_from_file(filename):\n file = open(filename, \"r\")\n lines = file.readlines()\n file.close()\n\n # Check if it is a graph or digraph\n graph_or_digraph_str = lines[0].strip() if len(lines) > 0 else None\n if graph_or_digraph_str != \"G\" and graph_or_digraph_str != \"D\":\n raise Exception(\"File must start with G or D.\")\n is_bidirectional = graph_or_digraph_str == \"G\"\n\n g = Graph()\n\n # Add all vertices\n for vertex_key in lines[1].strip(\"() \\n\").split(\",\"):\n g.add_vertex(vertex_key)\n\n # Add all edges\n for line in lines[2:]:\n # Split components of edge\n new_edge = line.strip(\"() \\n\").split(\",\")\n if len(new_edge) < 2 or len(new_edge) > 3:\n raise Exception(\"Lines adding edges must include 2 or 3 values\")\n\n # Get vertices\n vertex1, vertex2 = new_edge[:2]\n\n # Get weight if it exists\n weight = int(new_edge[2]) if len(new_edge) == 3 else None\n\n # Add edge(s)\n g.add_edge(vertex1, vertex2, weight)\n if is_bidirectional:\n g.add_edge(vertex2, vertex1, weight)\n\n return g\n # Check if first line is 'G' or 'D' and store the value. If neither, raise an exception\n # For each vertex id in first line, add a vertex to the graph\n # For each of the following lines:\n # Extract the vertex ids and the (optional) weight, and add an edge to the graph\n # If it is a Graph and not a Digraph, add another edge in the opposite direction\n # Raise an exception if line contains too many (or too few) item\n raise Exception(f\"File must begin with G or D, found {firstline}\")", "def test_read_json1():\n s = JsonSource()\n g = s.parse(os.path.join(RESOURCE_DIR, 'valid.json'))\n nodes = {}\n edges = {}\n for rec in g:\n if rec:\n if len(rec) == 4:\n edges[(rec[0], rec[1])] = rec[3]\n else:\n nodes[rec[0]] = rec[1]\n\n assert len(nodes.keys()) == 6\n assert len(edges.keys()) == 5\n\n n = nodes['MONDO:0017148']\n assert 'id' in n and n['id'] == 'MONDO:0017148'\n assert n['name'] == 'heritable pulmonary arterial hypertension'\n assert n['category'][0] == 'biolink:Disease'\n\n e = edges[('HGNC:11603', 'MONDO:0017148')]\n assert e['subject'] == 'HGNC:11603'\n assert e['object'] == 'MONDO:0017148'\n assert e['predicate'] == 'biolink:related_to'\n assert e['relation'] == 'RO:0004013'", "def build_from_file(self, topology_file, topology_format):\n with open(topology_file) as infile:\n for line in infile:\n if line.startswith(\"#\"):\n continue\n else:\n if topology_format == 0:\n x = line.split(\"\\n\")[0].split(\"|\")\n as1 = int(x[0])\n as2 = int(x[1])\n relationship = int(x[2])\n else:\n x = line.split(\"\\n\")[0].split(\"\\t\")\n if x[2] == \"p2c\":\n as1 = int(x[0])\n as2 = int(x[1])\n relationship = -1\n elif x[2] == \"c2p\":\n as1 = int(x[1])\n as2 = int(x[0])\n relationship = -1\n elif x[2] == \"p2p\":\n as1 = int(x[1])\n as2 = int(x[0])\n relationship = 0\n else:\n continue\n\n if not self.has_edge(as1, as2):\n self.add_edge(as1, as2, relationship=relationship, as1=as1, as2=as2)", "def load_nav_graphs(scans):\n\n def distance(pose1, pose2):\n \"\"\" Euclidean distance between two graph poses \"\"\"\n return (\n (pose1[\"pose\"][3] - pose2[\"pose\"][3]) ** 2\n + (pose1[\"pose\"][7] - pose2[\"pose\"][7]) ** 2\n + (pose1[\"pose\"][11] - pose2[\"pose\"][11]) ** 2\n ) ** 0.5\n\n graphs = {}\n for scan in scans:\n with open(\"data/connectivity/%s_connectivity.json\" % scan) as f:\n G = nx.Graph()\n positions = {}\n data = json.load(f)\n for i, item in enumerate(data):\n if item[\"included\"]:\n for j, conn in enumerate(item[\"unobstructed\"]):\n if conn and data[j][\"included\"]:\n positions[item[\"image_id\"]] = np.array(\n [item[\"pose\"][3], item[\"pose\"][7], item[\"pose\"][11]]\n )\n assert data[j][\"unobstructed\"][\n i\n ], \"Graph should be undirected\"\n G.add_edge(\n item[\"image_id\"],\n data[j][\"image_id\"],\n weight=distance(item, data[j]),\n )\n nx.set_node_attributes(G, values=positions, name=\"position\")\n graphs[scan] = G\n return graphs", "def from_dict_of_lists(graph,directed=False):", "def load_nav_graphs(scans):\n\n def distance(pose1, pose2):\n \"\"\" Euclidean distance between two graph poses \"\"\"\n return (\n (pose1[\"pose\"][3] - pose2[\"pose\"][3]) ** 2\n + (pose1[\"pose\"][7] - pose2[\"pose\"][7]) ** 2\n + (pose1[\"pose\"][11] - pose2[\"pose\"][11]) ** 2\n ) ** 0.5\n\n graphs = {}\n for scan in scans:\n with open(\"connectivity/%s_connectivity.json\" % scan) as f:\n G = nx.Graph()\n positions = {}\n data = json.load(f)\n for i, item in enumerate(data):\n if item[\"included\"]:\n for j, conn in enumerate(item[\"unobstructed\"]):\n if conn and data[j][\"included\"]:\n positions[item[\"image_id\"]] = np.array(\n [item[\"pose\"][3], item[\"pose\"][7], item[\"pose\"][11]]\n )\n assert data[j][\"unobstructed\"][\n i\n ], \"Graph should be undirected\"\n G.add_edge(\n item[\"image_id\"],\n data[j][\"image_id\"],\n weight=distance(item, data[j]),\n )\n nx.set_node_attributes(G, values=positions, name=\"position\")\n graphs[scan] = G\n return graphs", "def load_graph(self, filename):\n try:\n file_extention = list(filename.split(\".\"))[-1]\n if file_extention == \"gml\":\n self.graph = nx.read_gml(filename)\n if file_extention == \"adjlist\":\n self.graph = nx.read_adjlist(filename)\n if file_extention == \"yaml\":\n self.graph = nx.read_yaml(filename)\n except Exception as e:\n print(\"Error in loading Graph file: The error is\", e)", "def read_file():\n\tgraph = {}\n\twith open('data/SCC.txt', 'r') as f:\n\t\told_index = '1'\n\t\tadjacency_list = []\n\t\tfor line in f:\n\t\t\tdata = line.split()\n\t\t\tnew_index = data[0]\n\t\t\tif old_index != new_index:\n\t\t\t\tgraph[old_index] = {'adj_nodes': adjacency_list, 'is_explored': False}\n\t\t\t\told_index = new_index\n\t\t\t\tadjacency_list = []\n\t\t\tadjacency_list.append(data[1])\n\t\tgraph[old_index] = {'adj_nodes': adjacency_list, 'is_explored': False}\n\n\tfor i in range(1, NUM_VERT + 1):\n\t\tif graph.get(str(i), False) is False:\n\t\t\tgraph[str(i)] = {'adj_nodes': [], 'is_explored': False}\n\treturn graph", "def parse_dependency_graph(fn):\n nodes = {}\n edges = []\n\n with open(fn) as f:\n for line in f:\n if line.startswith(\"digraph {\") or line.startswith(\"}\"):\n continue\n if line.startswith(\"\\t\\t\"):\n # edge\n n1, n2 = map(int, line.split(\"->\"))\n edges.append((n1,n2))\n if n1 not in nodes:\n nodes[n1] = None\n if n2 not in nodes:\n nodes[n2] = None\n elif line.startswith(\"\\t\"):\n # node\n node, rest = line.split(None, 1)\n node = int(node)\n _, jobname = rest.split(\"\\\"\", 1)\n jobname = jobname.strip()\n nodes[node] = jobname\n\n return nodes, edges", "def load_graph(file_name, directed=True):\n G = nx.DiGraph() if directed else nx.Graph()\n with open(file_name, \"r\") as f:\n for line in f:\n tokens = line.split()\n u = int(tokens[0])\n v = int(tokens[1])\n if len(tokens) > 2:\n w = float(tokens[2])\n G.add_edge(u, v, weight=w)\n else:\n G.add_edge(u,v)\n return G", "def multi_edge():\n from networkx.readwrite import json_graph\n import networkx as nx\n import autonetkit\n # returns a house graph\n data = {'directed': False,\n 'graph': [],\n 'links': [{'_ports': {'r4': 2, 'r5': 1},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 1},\n {'_ports': {'r2': 3, 'r4': 1},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 3},\n {'_ports': {'r2': 4, 'r4': 3},\n 'raw_interfaces': {},\n 'source': 0,\n 'target': 3},\n {'_ports': {'r3': 3, 'r5': 2},\n 'raw_interfaces': {},\n 'source': 1,\n 'target': 4},\n {'_ports': {'r1': 1, 'r2': 1},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 3},\n {'_ports': {'r1': 3, 'r2': 5},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 3},\n {'_ports': {'r1': 2, 'r3': 1},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r1': 4, 'r3': 4},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r1': 5, 'r3': 5},\n 'raw_interfaces': {},\n 'source': 2,\n 'target': 4},\n {'_ports': {'r2': 2, 'r3': 2},\n 'raw_interfaces': {},\n 'source': 3,\n 'target': 4}],\n 'multigraph': True,\n 'nodes': [{'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r4 to r2', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r4 to r5', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r4 to r2', 'id': 'eth2'}},\n 'asn': 2,\n 'device_type': 'router',\n 'id': 'r4',\n 'label': 'r4',\n 'x': 675,\n 'y': 300},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r5 to r4', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r5 to r3', 'id': 'eth1'}},\n 'asn': 2,\n 'device_type': 'router',\n 'id': 'r5',\n 'label': 'r5',\n 'x': 675,\n 'y': 500},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r1 to r2', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r1 to r2', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r1 to r3', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r1',\n 'label': 'r1',\n 'x': 350,\n 'y': 400},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r2 to r1', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r2 to r3', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r2 to r4', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r2 to r4', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r2 to r1', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r2',\n 'label': 'r2',\n 'x': 500,\n 'y': 300},\n {'_ports': {0: {'category': 'physical', 'description': None},\n 1: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth0'},\n 2: {'category': 'physical', 'description': 'r3 to r2', 'id': 'eth1'},\n 3: {'category': 'physical', 'description': 'r3 to r5', 'id': 'eth2'},\n 4: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth3'},\n 5: {'category': 'physical', 'description': 'r3 to r1', 'id': 'eth4'}},\n 'asn': 1,\n 'device_type': 'router',\n 'id': 'r3',\n 'label': 'r3',\n 'x': 500,\n 'y': 500}]}\n graph = json_graph.node_link_graph(data)\n anm = autonetkit.anm.NetworkModel()\n g_in = anm.add_overlay(\"input\")\n g_in._replace_graph(nx.MultiGraph(graph))\n # TODO: check if should build overlays here rather than clone in?\n g_phy = anm[\"phy\"]\n g_phy._replace_graph(graph)\n return anm", "def read_file(network_filename, user_by_city_filename=None):\n graph = read_dictlist_from_file(network_filename)\n\n gg = Graph(directed=False) # new Graph object\n\n user_id_map = {} # storing new id info\n new_id = 0\n for user_id in graph:\n temp_users = []\n temp_users.append(user_id)\n for friend in graph[user_id]:\n temp_users.append(friend)\n for id1 in temp_users:\n if id1 not in user_id_map:\n user_id_map[id1] = new_id\n gg.add_vertex() # index for this vertex will be new_id\n new_id += 1\n if id1 > user_id:\n gg.add_edge(gg.vertex(user_id_map[user_id]),\n gg.vertex(user_id_map[id1]))\n print \"Done reading the graph.\"\n if user_by_city_filename is None:\n return (gg, None)\n if user_by_city_filename is not None:\n cities = read_dict_from_file(user_by_city_filename)\n # Adding vertex property as city\n city_prop = gg.new_vertex_property(\"int\")\n for user_id in cities:\n city_prop[gg.vertex(user_id_map[user_id])] = cities[user_id]\n print \"Done reading the city.\"\n return (gg, city_prop)", "def read_graph(filename):\n return nx.read_edgelist(filename, create_using=nx.DiGraph(), nodetype=str)", "def _build_graph(self):\n pass", "def build_graph(self):\n pass", "def read_graph(file_name):\r\n with open(file_name, 'r') as f:\r\n lines = f.readlines()\r\n first_line = lines[0].strip().split()\r\n no_vertices = int(first_line[0])\r\n new_graph = UndirectedGraph(no_vertices)\r\n for line in lines[1:]:\r\n if line == \"\":\r\n continue\r\n line = line.strip().split()\r\n _from, _to, _cost = int(line[0]), int(line[1]), int(line[2])\r\n new_graph.add_edge(_from, _to, _cost)\r\n return new_graph", "def nx_graph_from_dot_file(dot_file_path):\n # this does not understand dot statements like X->Y,Z;\n # nx_graph = nx.nx_pydot.read_dot(dot_file_path)\n\n nodes, edges = DotTool.read_dot_file(dot_file_path)\n g = nx.DiGraph()\n g.add_edges_from(edges)\n\n return g", "def dependency_parse_to_graph(filename):\n data = ''\n dtree = []\n with open(filename, 'r') as f:\n for line in f:\n if line[0] != '#':\n if 'root' in line:\n elements = line.split('\\t')\n if elements[7] == 'root':\n elements[7] = 'ROOT'\n line = '\\t'.join(elements)\n data += line\n if line == '\\n':\n dg = DependencyGraph(data.decode('utf8'))\n dtree.append(dg)\n data = ''\n return dtree", "def loadDataZachary(fileName):\n\n \"Initialize a graph\"\n G = nx.Graph()\n\n \"Open file\"\n f = open(fileName)\n\n line = f.readline().rstrip(\"\\n\").rstrip(\"\\r\")\n while line:\n if(line[0]!=\"%\"):\n ls =line.split(' ')\n num,nums=int(ls[0]),int(ls[1])\n G.add_edge(num,nums)\n line = f.readline().rstrip(\"\\n\").rstrip(\"\\r\")\n\n \"Closing the file\"\n f.close()\n\n return G, 'Zachary'", "def graph_from_dot(dot_file):\n (graph, ) = pydot.graph_from_dot_file(str(dot_file))\n return graph", "def get_graph(path: str) -> nx.Graph:\n with open(path, 'r') as f:\n list_of_edges = [line.strip().split() for line in f.readlines()]\n g = nx.Graph()\n g.add_edges_from(list_of_edges)\n return g", "def parse_graphml_file(filename: str, digraph=True):\n graphml_graph = nx.read_graphml(filename)\n if digraph:\n graphml_graph = graphml_graph.to_directed()\n\n return graphml_graph", "def load_graph(file_name):\r\n citizens = []\r\n f = open(file_name, 'r')\r\n number_citizens = int(f.readline())\r\n \r\n # creates the citizen's list.\r\n for i in range(number_citizens):\r\n # creates citizen object\r\n citizen = Citizen(i)\r\n citizens.append(citizen)\r\n\r\n # we need this second loop because we cannot create the list of friends \r\n # if we don't have the whole list of citizens in memory.\r\n for citizen in citizens:\r\n # loads basic infor\r\n inf_list = f.readline().split(';')\r\n citizen.location = int(inf_list[1])\r\n citizen.influence_level = int(inf_list[2])\r\n citizen.proactivity_level = inf_list[3]\r\n \r\n # loads opinions\r\n opinions_list = f.readline().split(';')\r\n opinions = {}\r\n \r\n for op in opinions_list[:-1]:\r\n cat_weight = op.split(':')\r\n cat = int(cat_weight[0])\r\n weight = float(cat_weight[1])\r\n idea = Idea(1,'',cat, weight)\r\n opinions[cat] = idea\r\n\r\n citizen.opinions = opinions\r\n \r\n # loads friends \r\n friends_ids_list = f.readline().split(';')\r\n friends = []\r\n for friend_id in friends_ids_list[:-1]:\r\n # note that we match the position of the citizen in the citizens list with its id.\r\n friends.append(citizens[int(friend_id)])\r\n \r\n citizen.friends = friends\r\n \r\n f.close()\r\n \r\n return citizens", "def add_from_json(self, location):\r\n with open(location) as file:\r\n data = json.load(file)\r\n for metros in data[\"metros\"]:\r\n self.vertices[metros[\"code\"]] = Vertex(metros)\r\n for routes in data[\"routes\"]:\r\n start = routes[\"ports\"][0]\r\n destination = routes[\"ports\"][1]\r\n distance = routes[\"distance\"]\r\n self.edges[start].append(Edge(distance, start, destination))\r\n self.edges[destination].append(Edge(distance, destination, start))", "def read_graph(args):\n dataset = pd.read_csv(args.features_path).values.tolist()\n edges = {}\n edges[\"positive_edges\"] = [edge[0:2] for edge in dataset if edge[2] == 1]\n edges[\"negative_edges\"] = [edge[0:2] for edge in dataset if edge[2] == -1]\n edges[\"ecount\"] = len(dataset)\n edges[\"ncount\"] = len(set([edge[0] for edge in dataset]+[edge[1] for edge in dataset]))\n return edges", "def from_dot(path):\n return nx.Graph(nx.drawing.nx_agraph.read_dot(path))", "def read_graph(filename, node_index_one=0, node_index_two=1):\n tsv = csv.reader(open(filename), delimiter='\\t')\n return make_graph(tsv, node_index_one, node_index_two)", "def load_network(file_name):\n with open(file_name) as file:\n data = json.load(file)\n\n cost_fn = getattr(sys.modules[__name__], data[\"cost_func\"])\n act_fn = getattr(sys.modules[__name__], data[\"act_func\"])\n metric = getattr(sys.modules[__name__], data[\"metric\"])\n\n network = Network([1, 1], act_func=act_fn, cost_func=cost_fn, metric=metric)\n network.layers_num = data[\"layers_num\"]\n network.weights = [np.array(w) for w in data[\"weights\"]]\n network.biases = [np.array(b) for b in data[\"biases\"]]\n\n return network", "def read_graph(filename):\n with open(filename) as f:\n g = eval(f.read())\n return g", "def process_input(input_path):\n\n # Parse lines from input file into list\n with open(input_path, 'r') as input_file:\n lines = input_file.readlines()\n\n # Declare component lists and helper variables\n vertex_map = {} # Mapping of named vertices to indices, handles duplicate connections\n idx = 0\n edges = [] # List of (src, dst) tuples\n weights = [] # Weight of each edge\n\n for line in lines:\n # Parse each line of csv or text file\n if input_path.endswith('.csv'):\n parts = line.split(',')\n else:\n parts = line.split()\n\n # Add source vertex to list of vertices\n src = parts[0]\n if src not in vertex_map:\n vertex_map[src] = idx\n idx += 1\n\n # Add destination vertex to list of vertices\n dst = parts[1]\n if dst not in vertex_map:\n vertex_map[dst] = idx\n idx += 1\n\n # Add integer representation of edges to list of connections\n edges.append((vertex_map[src], vertex_map[dst]))\n weights.append(parts[2])\n\n # Get definite list of vertices\n vertices = vertex_map.keys()\n\n # Print graph information\n vprint(str(len(vertices)) + ' vertices')\n vprint(str(len(edges)) + ' edges')\n\n # Build IGraph representation of network\n graph = ig.Graph(edges, directed=False)\n graph.es['weight'] = [weights[e] for e in range(len(graph.es))]\n\n return graph, vertices", "def test_read_json2():\n s = JsonSource()\n g = s.parse(os.path.join(RESOURCE_DIR, 'valid.json'), provided_by='Test JSON')\n nodes = {}\n edges = {}\n for rec in g:\n if rec:\n if len(rec) == 4:\n edges[(rec[0], rec[1])] = rec[3]\n else:\n nodes[rec[0]] = rec[1]\n\n assert len(nodes.keys()) == 6\n assert len(edges.keys()) == 5\n\n n = nodes['MONDO:0017148']\n assert 'id' in n and n['id'] == 'MONDO:0017148'\n assert n['name'] == 'heritable pulmonary arterial hypertension'\n assert n['category'][0] == 'biolink:Disease'\n assert 'Test JSON' in n['provided_by']\n\n e = edges[('HGNC:11603', 'MONDO:0017148')]\n assert e['subject'] == 'HGNC:11603'\n assert e['object'] == 'MONDO:0017148'\n assert e['predicate'] == 'biolink:related_to'\n assert e['relation'] == 'RO:0004013'\n assert 'Test JSON' in e['provided_by']", "def _parse_ai2d_rst_json(data):\n # Separate dictionaries for each layer from the JSON dictionary\n grouping_dict_from_json = data['grouping']\n conn_dict_from_json = data['connectivity']\n rst_dict_from_json = data['rst']\n\n # Create the grouping graph using the nx.jit_graph function\n grouping_graph = nx.json_graph.jit_graph(grouping_dict_from_json,\n create_using=nx.DiGraph())\n\n # Check if connectivity annotation exists\n if conn_dict_from_json is not None:\n\n # Create connectivity graph manually\n connectivity_graph = nx.DiGraph()\n\n # Load nodes and edges\n nodes = conn_dict_from_json['nodes']\n edges = conn_dict_from_json['edges']\n\n # Add nodes manually to the connectivity graph\n for node in nodes:\n\n connectivity_graph.add_node(node[0], kind=node[1]['kind'])\n\n # Add edges manually to the connectivity graph\n for e in edges:\n\n connectivity_graph.add_edge(e[0], e[1], kind=e[2]['kind'])\n\n else:\n\n connectivity_graph = None\n\n # Create the RST graph using the nx.jit_graph function\n rst_graph = nx.jit_graph(rst_dict_from_json,\n create_using=nx.DiGraph())\n\n # Return all three graphs\n return grouping_graph, connectivity_graph, rst_graph", "def loadNetworkFromFile(self, file):\r\n for line in open(file, 'r'):\r\n fromVertex, toVertex, capacity = map(int, line.split())\r\n self.addEdge(fromVertex, toVertex, capacity)", "def readGraphFromYAMLFile(self, filename):\n self.G = nx.read_yaml(filename)\n # TODO: buiild up the indexes !!!", "def read_graph(graph_path):\n print(\"\\nTarget matrix creation started.\\n\")\n graph = nx.from_edgelist(pd.read_csv(graph_path).values.tolist())\n graph.remove_edges_from(graph.selfloop_edges())\n return graph", "def graph_from_file(self,\n filename,\n delimiter,\n source_label,\n target_label,\n data_source=None,\n source_attributes=[],\n target_attributes=[]):\n with open(filename) as f:\n reader = csv.DictReader(f, delimiter=delimiter)\n data = list(reader)\n return graph_from_dict(data, source_label, target_label, data_source,\n source_attributes, target_attributes)", "def file_parse():\n\n\tfilename = input(\"Enter the file path for your graph: \")\n\ttarget = open(filename, 'r')\n\n\ttarget_lines = [] \t# List of lines from target file\n\t\n\t# Grab the graph count and node/edge count for the first graph\n\ti = 0\n\tfor line in target:\n\t\tif i == 0:\n\t\t\tgraph_count = int(line)\n\t\telif i == 1:\n\t\t\tnode_count = int(line)\n\t\telif i == 2:\n\t\t\tedge_count = int(line)\n\t\telse:\t\n\t\t\ttarget_lines.append(line.strip('\\n'))\n\t\ti += 1\n\n\treturn graph_create(target_lines, graph_count, node_count, edge_count)", "def read_file(path):\n\tG = nx.Graph()\n\n\twith open(path, 'r') as in_file:\n\t\tfor line in in_file:\n\t\t\tcontents = line.split(\" \")\n\t\t\tu = int(contents[0])\n\t\t\tv = int(contents[1])\n\t\t\tstreet_type = int(contents[2])\n\t\t\ttime = int(contents[3])\n\t\t\tlength = int(contents[4])\n\t\t\tcost = 1/float(length)\n\t\t\t\n\t\t\tG.add_node(u)\n\t\t\tG.add_node(v)\n\t\t\tif street_type is 1:\n\t\t\t\tG.add_edge(u, v, street_type=street_type, time=time, length=length, cost=cost)\n\t\t\telse:\n\t\t\t\tG.add_edge(u, v, street_type=street_type, time=time, length=length, cost=cost)\n\t\t\t\tG.add_edge(v, u, street_type=street_type, time=time, length=length, cost=cost)\n\n\treturn G", "def _construct_graph(self):\n raise NotImplementedError", "def populate_graph(self):", "def read_graph_g2o(filename):\n Edge = namedtuple(\n 'Edge', ['Type', 'fromNode', 'toNode', 'measurement', 'information'])\n edges = []\n nodes = {}\n with open(filename, 'r') as file:\n for line in file:\n data = line.split()\n\n if data[0] == 'VERTEX_SE2':\n nodeId = int(data[1])\n pose = np.array(data[2:5], dtype=np.float32)\n nodes[nodeId] = pose\n\n elif data[0] == 'VERTEX_XY':\n nodeId = int(data[1])\n loc = np.array(data[2:4], dtype=np.float32)\n nodes[nodeId] = loc\n\n elif data[0] == 'EDGE_SE2':\n Type = 'P'\n fromNode = int(data[1])\n toNode = int(data[2])\n measurement = np.array(data[3:6], dtype=np.float32)\n uppertri = np.array(data[6:12], dtype=np.float32)\n information = np.array(\n [[uppertri[0], uppertri[1], uppertri[2]],\n [uppertri[1], uppertri[3], uppertri[4]],\n [uppertri[2], uppertri[4], uppertri[5]]])\n edge = Edge(Type, fromNode, toNode, measurement, information)\n edges.append(edge)\n\n elif data[0] == 'EDGE_SE2_XY':\n Type = 'L'\n fromNode = int(data[1])\n toNode = int(data[2])\n measurement = np.array(data[3:5], dtype=np.float32)\n uppertri = np.array(data[5:8], dtype=np.float32)\n information = np.array([[uppertri[0], uppertri[1]],\n [uppertri[1], uppertri[2]]])\n edge = Edge(Type, fromNode, toNode, measurement, information)\n edges.append(edge)\n\n else:\n print('VERTEX/EDGE type not defined')\n\n # compute state vector and lookup table\n lut = {}\n x = []\n offset = 0\n for nodeId in nodes:\n lut.update({nodeId: offset})\n offset = offset + len(nodes[nodeId])\n x.append(nodes[nodeId])\n x = np.concatenate(x, axis=0)\n\n # collect nodes, edges and lookup in graph structure\n graph = Graph(x, nodes, edges, lut)\n print('Loaded graph with {} nodes and {} edges'.format(\n len(graph.nodes), len(graph.edges)))\n\n return graph", "def create_d3json(jsonfile=\"static/data/graph.json\"):\n gateway = None\n devices = defaultdict(list)\n colors = {}\n\n jsdata = HomeNetwork.read_sqlite3_current(jsondata=True)\n\n devices[\"nodes\"] = json.loads(jsdata)\n\n for anode in devices[\"nodes\"]:\n\n if \"osfamily\" in anode and anode[\"osfamily\"] not in colors:\n red = 0\n green = random.randint(-100, 255)\n blue = random.randint(-25, 236)\n colors[str(anode[\"osfamily\"])] = \"rgb({}, {}, {})\".format(red, green, blue)\n\n elif \"osfamily\" not in anode:\n anode[\"osvendor\"] = \"None\"\n red = 0\n green = random.randint(0, 2200)\n blue = random.randint(0, 9600)%255\n colors[anode[\"osfamily\"]] = \"rgb({}, {}, {})\".format(red, green, blue)\n\n anode[\"color\"] = colors[str(anode[\"osfamily\"])]\n \n if anode[\"gateway\"] == \"Y\":\n gateway = anode[\"ip\"]\n continue\n\n devices[\"links\"].append({\"source\": \"\", \"target\": anode[\"ip\"]})\n\n for link in devices[\"links\"]:\n link[\"source\"] = gateway\n\n if jsonfile.startswith(\"/\"):\n jsonfile = jsonfile[1:]\n\n with open(jsonfile, 'w') as jswrt:\n jswrt.write(json.dumps(devices, indent=4))", "def create_graph(path):\n f = open(path, 'r')\n g = nx.DiGraph()\n\n # Velikost mnozice\n n = int(f.readline().split(\" \")[0])\n\n # Dodamo vsa vozlisca v graf\n for i in range(n):\n g.add_node(i+1)\n\n # Gremo cez vse primerjave in dodamo povezave v graf\n for line in f:\n u, v = line.strip().split(\" \")\n u, v = int(u), int(v)\n g.add_edge(u, v)\n\n return g", "def nx_to_json(graph, filename):\n graph_data = json_graph.node_link_data(graph)\n\n with open(filename, \"w\") as f:\n json.dump(graph_data, f, indent=4)", "def deserialize(self, filename):\n f = open(filename)\n edges_tails = []\n nodes = []\n # first pass adds incoming edges to nodes\n for line in f:\n if '->' in line: # edge\n edge = self.edge_class()\n tail_ids, head_id = edge.deserialize(line)\n nodes[head_id].add_incoming(edge)\n edges_tails.append((edge, tail_ids))\n else: # node\n node = self.node_class()\n node.deserialize(line)\n assert node.id == len(nodes), 'nodes shall appear in order'\n nodes.append(node)\n # second pass adds tail nodes to edges\n for edge, tail_ids in edges_tails:\n for nid in tail_ids:\n edge.add_tail(nodes[nid])\n f.close()\n # make a toposorted hypergraph\n hg = Hypergraph(nodes[0])\n hg.nodes = nodes\n for node in hg:\n node.hg = hg\n for edge in hg.edges():\n edge.hg = hg\n hg.tasks_done.add('topo_sort')\n return hg", "def load_graph(input_file=None, input_list=None):\n G = nx.Graph()\n if input_file:\n with open(input_file, 'r') as file:\n for line in file.readlines():\n line = line.strip().split(\" \")\n G.add_edge(line[0], line[1])\n elif input_list:\n G.add_edges_from(input_list)\n return G", "def readLinkoJson(file):\n with open(file, 'r') as jsonFile:\n preLinko = json.load(jsonFile)\n\n linko = Linkograph([], preLinko[0])\n\n for entry in preLinko[1:]:\n linko.append((set(entry[0]), set(entry[1]), set(entry[2])))\n linko.uuids.append(entry[3])\n\n return linko", "def init_map(json_map):\n source = json_map['connections']['source']\n target = json_map['connections']['target']\n price = json_map['connections']['price']\n critical = json_map['locations']['critical']\n n = len(source)\n\n peking_map = Graph()\n\n # Populate graph.\n for i in range(n):\n peking_map.add_edge(source[i], target[i], price[i])\n\n for i in critical:\n peking_map.update_critical(i)\n\n return peking_map", "def read_graph():\n return nx.read_edgelist('edges.txt.gz', delimiter='\\t')", "def makeGraph(self):\n self.floorGraph = graph.Graph()\n file = open(\"edges.csv\")\n edges = file.readlines()\n for edge in edges:\n params = edge.split(\",\")\n self.floorGraph.addEdge(params[0],params[1],float(params[2]))\n self.floorGraph.addEdge(params[1],params[0],float(params[2]))", "def load_graph(self, path):\n if path.split('.')[-1]=='gexf':\n self.graph = nx.read_gexf(path)\n else:\n self.graph = nx.read_gpickle(path)", "def __buildByFile( self, file ):\r\n file = open( file, 'r' ) # abre arquivo em modo leitura\r\n # retorna uma lista com strings separadas para cada linha lida do arquivo:\r\n lines = [ line.rstrip( '\\n' ).split( ' ' ) for line in file.readlines() ]\r\n # apenas converte todos os numeros de coordenadas da lista para inteiros:\r\n lines = [ [ int(line[0]), int(line[1]), int(line[2]) ] for line in lines ]\r\n # cria e adiciona todos os vertices a lista:\r\n for line in lines:\r\n self.vertexes.append( Vertex ( line[0] ) )\r\n self.matrix[ line[0] ] = {}\r\n \r\n # calculando os custos e criando todas as arestas do grafo:\r\n v = [] # criando copia da lista de vertices:\r\n for vertex in self.vertexes: v.append( vertex )\r\n \r\n for vertexIni in self.vertexes:\r\n if vertexIni in v:\r\n v.remove( vertexIni )\r\n coordinates = ( lines[ vertexIni.label-1 ][1], lines[ vertexIni.label-1 ][2] )\r\n for vertexEnd in v:\r\n distance = math.sqrt( ( lines[ vertexIni.label-1 ][1] - lines[ vertexEnd.label-1 ][1] )**2 + ( lines[ vertexIni.label-1 ][2] - lines[ vertexEnd.label-1 ][2] )**2 )\r\n edge = Edge( vertexIni, vertexEnd, distance )\r\n self.edges.append( edge )\r\n vertexIni.adjacenses.append( edge )\r\n edge = Edge( vertexEnd, vertexIni, distance ) #add esse\r\n vertexEnd.adjacenses.append( edge )\r\n self.matrix[ vertexIni.label ][ vertexEnd.label ] = distance\r\n self.matrix[ vertexEnd.label ][ vertexIni.label ] = distance\r\n self.sortEdges( vertexIni.adjacenses )\r\n \r\n self.sortEdges( self.edges ) # ordenando todas as arestas do grafo\r\n print 'grafo clique com', len( self.vertexes ), 'cidades criado.'", "def assemble_graph():\n response = request.body.read().decode('utf-8')\n body = json.loads(response)\n stmts_json = body.get('statements')\n stmts = stmts_from_json(stmts_json)\n ga = GraphAssembler(stmts)\n model_str = ga.make_model()\n res = {'model': model_str}\n return res", "def read_graph():\n return nx.read_edgelist('edges_new.txt', delimiter='\\t')", "def load_graph(graph_url): # Function Provided By instructor - Grabs a specific graph from the internet and converts it to a form we can use\n graph_file = urllib2.urlopen(graph_url) # sets graph_file var to the file downloaded by urlopen\n graph_text = graph_file.read() # invokes read on the file downloaded\n graph_lines = graph_text.split('\\n')\n graph_lines = graph_lines[ : -1]\n\n print \"Loaded graph with\", len(graph_lines), \"nodes\"\n\n answer_graph = {}\n for line in graph_lines:\n neighbors = line.split(' ')\n node = int(neighbors[0])\n answer_graph[node] = set([])\n for neighbor in neighbors[1 : -1]:\n answer_graph[node].add(int(neighbor))\n\n print \"Finished processing Out-Degrees\"\n\n return answer_graph", "def graph():\n return jsonify(app.config[\"jsonified\"])", "def readAdjacencyGraph(self,filename):\n try:\n for line in open(filename,'r'):\n incoming,outgoing=line.strip().split(\":\")\n no_outgoing=outgoing.split(\",\")\n self.adjacencyMetadata[incoming]=dict(zip(no_outgoing,range(len(no_outgoing))))\n if incoming not in self.adjacency.keys():\n self.adjacency[incoming]=None\n for item in no_outgoing:\n if item not in self.adjacency.keys():\n self.adjacency[item]=None\n except Exception as e:\n raise", "def read_graph(filename):\n with open(filename, 'r') as file: # open the file\n # read the number of nodes and number of edges\n num_nodes, num_edges = DataIO.__preprocess_line(file.readline())\n graph = GraphProcessing.construct_null_graph(num_nodes) # construct a null graph\n for line in file.readlines(): # for every line in the file\n preprocessed_line = DataIO.__preprocess_line(line) # preprocess the line\n if preprocessed_line: # if the preprocessed line is not a null string\n # read the first and second node and the edge weight\n source_node, terminal_node, weight = preprocessed_line\n graph[source_node][terminal_node] = weight\n graph[terminal_node][source_node] = weight\n return graph # return the final graph", "def graph_reader(path):\n graph = nx.from_edgelist(pd.read_csv(path).values.tolist())\n graph.remove_edges_from(graph.selfloop_edges())\n return graph", "def import_graph(cls, filename, node_cls=GraphNode):\n with open(filename, 'r') as file:\n num_nodes = None\n graph = {}\n for line in file:\n if num_nodes is None:\n num_nodes = int(line)\n graph = {id_: node_cls(id_) for id_ in range(1, num_nodes + 1)}\n else:\n m, n, dist = line.split(' ')\n m = int(m)\n n = int(n)\n dist = float(dist)\n graph[m].neighbours[n] = graph[n]\n graph[n].neighbours[m] = graph[m]\n graph[m].distances[n] = dist\n graph[n].distances[m] = dist\n return graph", "def gen_graph(self):", "def build_graph(graph_dict): \n #make networkX graph\n G = nx.Graph()\n G.add_nodes_from(graph_dict.keys())\n for key in graph_dict:\n for i in range(len(graph_dict[key])):\n G.add_edge(key,graph_dict[key][i])\n return G", "def build_graph(self):\n raise NotImplementedError", "def ConstrDict(raw_data):\n if (path.exists(\"processed_out.txt\") and\n path.exists(\"processed_in.txt\")):\n with open(\"processed_out.txt\") as out:\n global out_edges\n out_edges = pickle.load(out)\n with open(\"processed_in.txt\") as fin:\n global in_edges\n in_edges = pickle.load(fin)\n print len(in_edges.keys())\n with open(\"nodes.txt\") as n:\n global nodes\n nodes = pickle.load(n)\n print \"nodes: \", len(nodes)\n else:\n # read each line and construct a dictionary to store\n # sources and destinations\n for line in raw_data: \n splitted_line = line.split()\n # source is the first element in a line, the rest of elements\n # are destinations\n threshold = 10000\n src, dests = splitted_line[0], splitted_line[1:threshold]\n # if src is not in the dictionary, create a key-value pair for\n # this src\n out_edges.setdefault(src, set())\n\n # put all destinations into the list of the corresponding src\n out_edges[src].update(set(dests))\n\n # construct a set to store all nodes appearing\n nodes.add(src)\n nodes.update(set(dests))\n\n # create the list of inedges for each node\n for i in out_edges[src]:\n in_edges.setdefault(i, set())\n in_edges[i].add(src)\n\n nodes = list(nodes)\n # shuffle the order of nodes\n shuffle(nodes)\n\n with open(\"processed_out.txt\", \"wb\") as out:\n pickle.dump(out_edges, out)\n with open(\"processed_in.txt\", \"wb\") as fin:\n pickle.dump(in_edges, fin)\n with open(\"nodes.txt\", \"wb\") as n:\n pickle.dump(nodes, n)\n\n\n # construct edge list\n for src, dests in out_edges.iteritems():\n pairs = [(src, dest) for dest in dests if (src, dest) not in\n exists]\n edges.extend(pairs)", "def load_graph(filename):\n with tf.gfile.FastGFile(filename, 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n tf.import_graph_def(graph_def, name='')", "def load_graph(filename):\n with tf.gfile.FastGFile(filename, 'rb') as f:\n graph_def = tf.GraphDef()\n graph_def.ParseFromString(f.read())\n tf.import_graph_def(graph_def, name='')", "def load_graph(path):\r\n \r\n # initialise dictionary\r\n graph_dict = {}\r\n \r\n # open File for reading\r\n graph_file = open(path,'r')\r\n \r\n # initialise list of all the lines\r\n graph_lines = graph_file.readlines() \r\n print (\"Loaded graph has\", len(graph_lines), \"nodes\")\r\n # For all lines get node and edges and put them to SET\r\n for line in graph_lines:\r\n line = line.rstrip('\\n')\r\n neighbors = line.split(' ')\r\n #node = int(neighbors[0])\r\n node = neighbors[0]\r\n graph_dict[node] = set([])\r\n for neighbor in neighbors[1:]:\r\n #graph_dict[node].add(int(neighbor))\r\n graph_dict[node].add(neighbor)\r\n return graph_dict", "def load_graph(graph_path):\n graph = nx.from_edgelist(pd.read_csv(graph_path).values.tolist())\n graph.remove_edges_from(graph.selfloop_edges())\n return graph", "def test_read_json_compressed():\n s = JsonSource()\n g = s.parse(os.path.join(RESOURCE_DIR, 'valid.json.gz'), compression='gz')\n nodes = {}\n edges = {}\n for rec in g:\n if rec:\n if len(rec) == 4:\n edges[(rec[0], rec[1])] = rec[3]\n else:\n nodes[rec[0]] = rec[1]\n\n assert len(nodes.keys()) == 6\n assert len(edges.keys()) == 5\n\n n = nodes['MONDO:0017148']\n assert 'id' in n and n['id'] == 'MONDO:0017148'\n assert n['name'] == 'heritable pulmonary arterial hypertension'\n assert n['category'][0] == 'biolink:Disease'\n\n e = edges[('HGNC:11603', 'MONDO:0017148')]\n assert e['subject'] == 'HGNC:11603'\n assert e['object'] == 'MONDO:0017148'\n assert e['predicate'] == 'biolink:related_to'\n assert e['relation'] == 'RO:0004013'", "def get_graph(filename, data_folder):\n g = nx.MultiGraph()\n with open(data_folder + \"/\" + filename) as fp:\n line = fp.readline()\n while line:\n (o, d, t, e) = line.split()\n g.add_edge(int(o), int(d), start=int(t), duration=int(e))\n line = fp.readline()\n return g", "def __init__(self, graph_json, merges=None):\n self.initialize_layers(graph_json[\"years\"])\n self.create_nodes(graph_json[\"articles\"], merges)\n self.create_edges(graph_json[\"edges\"])", "def test_graph_creation(self):\n with open(os.path.join(data_dir, 'sample_graph.pkl'), 'rb') as f:\n truth_graph = pickle.load(f)\n f.close()\n output_graph = geojson_to_graph(os.path.join(data_dir,\n 'sample_roads.geojson'))\n\n assert nx.is_isomorphic(truth_graph, output_graph)", "def build_graph(self):\n for node in self.graph.nodes():\n self.c2py[node] = PyNode(node)\n for _input in node.inputs():\n if _input not in self.c2py:\n self.c2py[_input] = PyNode(_input, True)\n if _input in self.forward_edge:\n self.forward_edge[_input].append(node)\n else:\n self.forward_edge[_input] = [node]\n for output in node.outputs():\n if output not in self.c2py:\n self.c2py[output] = PyNode(output, True)\n if node in self.forward_edge:\n self.forward_edge[node].append(output)\n else:\n self.forward_edge[node] = [output]", "def load_from_json(self, file_name: str) -> bool:\n try:\n with open(file_name, 'r') as f:\n data = json.loads(f.read())\n self.__g = DiGraph.from_dict(data)\n return True\n except:\n traceback.print_exc()\n return False", "def graph(self):\n\n def start_graph():\n return \"digraph services {\\n\\n\"\n\n def end_graph(graph_string):\n graph_string += \"\\n}\\n\"\n return graph_string\n\n def start_cluster(graph_string, cluster_id, cluster_name):\n graph_string += \"subgraph cluster_%s {\\n\" % cluster_id\n graph_string += \" label = \\\"%s\\\";\\n\" % cluster_name\n return graph_string\n\n def end_cluster(graph_string):\n graph_string += \"\\n}\\n\"\n return graph_string\n\n def add_path(graph_string, from_node, to_node, protocol, port):\n if not from_node.name:\n cidr_blocks = [subnetwork.cidr_block for subnetwork in from_node.subnetworks]\n from_name = \",\".join(cidr_blocks)\n from_network_name = \"external\"\n else:\n from_name = from_node.name\n from_network_name = from_node.network.name\n path_template = \"\\\"%s (%s)\\\" -> \\\"%s (%s)\\\" [ label=\\\"(%s:%s)\\\" ];\\n\"\n graph_string += path_template % (from_name, from_network_name, to_node.name,\n to_node.network.name, protocol, port)\n return graph_string\n\n def add_node(graph_string, node_name, network_name):\n graph_string += \" \\\"%s (%s)\\\";\\n\" % (node_name, network_name)\n return graph_string\n\n def group_paths_by_network(paths_info):\n net_to_path = {}\n for path in paths_info:\n if path.network.name not in net_to_path:\n net_to_path[path.network.name] = []\n net_to_path[path.network.name].append(path)\n return net_to_path\n\n def group_services_by_network(services_info):\n net_to_service = {}\n for service_info in services_info:\n if service_info.network.name not in net_to_service:\n net_to_service[service_info.network.name] = []\n net_to_service[service_info.network.name].append(service_info)\n return net_to_service\n\n # First group paths and services by network\n paths_info = self.paths.list()\n net_to_path = group_paths_by_network(paths_info)\n services_info = self.service.list()\n net_to_service = group_services_by_network(services_info)\n networks_info = self.network.list()\n\n graph_string = start_graph()\n cluster_id = 0\n for network_info in networks_info:\n\n # Skip networks with no name for now\n if not network_info.name:\n continue\n\n # Each network is a \"cluster\" in graphviz terms\n graph_string = start_cluster(graph_string, cluster_id, network_info.name)\n cluster_id += 1\n\n # If the network is empty just make a placeholder node\n if network_info.name not in net_to_service and network_info.name not in net_to_path:\n graph_string = add_node(graph_string, \"Empty Network\", network_info.name)\n graph_string = end_cluster(graph_string)\n continue\n\n # Otherwise, add all the services and path in this network\n if network_info.name in net_to_service:\n for service_info in net_to_service[network_info.name]:\n graph_string = add_node(graph_string, service_info.name,\n service_info.network.name)\n graph_string = end_cluster(graph_string)\n\n # We do all paths outside the cluster so that public CIDRs will show up outside the\n # networks.\n if network_info.name in net_to_path:\n for path_info in net_to_path[network_info.name]:\n graph_string = add_path(graph_string, path_info.source, path_info.destination,\n path_info.protocol, path_info.port)\n\n graph_string = end_graph(graph_string)\n return graph_string", "def main(infilename):\n graph = pydot.graph_from_dot_file(infilename)\n nodes = get_nodes_info(graph)\n load_links(graph, nodes)\n propagate_lineage(nodes)\n dump_status(nodes)", "def loadgraph(self, path):\n\n raise NotImplementedError", "def load_graph(graph_url):\n graph_file = urllib2.urlopen(graph_url)\n graph_text = graph_file.read()\n graph_lines = graph_text.split('\\n')\n graph_lines = graph_lines[ : -1]\n \n print \"Loaded graph with\", len(graph_lines), \"nodes\"\n \n answer_graph = {}\n for line in graph_lines:\n neighbors = line.split(' ')\n node = int(neighbors[0])\n answer_graph[node] = set([])\n for neighbor in neighbors[1 : -1]:\n answer_graph[node].add(int(neighbor))\n\n return answer_graph", "def load_graph(graph_url):\n graph_file = urllib2.urlopen(graph_url)\n graph_text = graph_file.read()\n graph_lines = graph_text.split('\\n')\n graph_lines = graph_lines[ : -1]\n \n print \"Loaded graph with\", len(graph_lines), \"nodes\"\n \n answer_graph = {}\n for line in graph_lines:\n neighbors = line.split(' ')\n node = int(neighbors[0])\n answer_graph[node] = set([])\n for neighbor in neighbors[1 : -1]:\n answer_graph[node].add(int(neighbor))\n\n return answer_graph" ]
[ "0.7250749", "0.7151497", "0.69580156", "0.6932821", "0.6580459", "0.6554932", "0.6549572", "0.6462024", "0.6431498", "0.6411141", "0.6403866", "0.6394087", "0.63446313", "0.63325745", "0.63108885", "0.6290227", "0.6284171", "0.62591064", "0.6194136", "0.6182282", "0.6135531", "0.61315125", "0.6120831", "0.60799265", "0.6069941", "0.60648096", "0.6061506", "0.60591054", "0.605206", "0.6038242", "0.6005129", "0.59979236", "0.5991929", "0.59883344", "0.59836566", "0.5969323", "0.59656906", "0.59617895", "0.595877", "0.5943298", "0.59394", "0.5938847", "0.59278154", "0.59260106", "0.592196", "0.5913896", "0.59126985", "0.591043", "0.5892564", "0.5885405", "0.58810604", "0.5865318", "0.5862877", "0.5857356", "0.5856645", "0.5849202", "0.5841117", "0.58262444", "0.5806976", "0.5804126", "0.5803747", "0.57942575", "0.5786851", "0.57800245", "0.5777593", "0.57703286", "0.57619756", "0.57617205", "0.5758807", "0.5753572", "0.5752655", "0.5748736", "0.57436883", "0.57367694", "0.5735533", "0.5730873", "0.57299674", "0.57232624", "0.57200754", "0.5719434", "0.5714079", "0.5713206", "0.57128376", "0.57050884", "0.5700774", "0.5683603", "0.5683603", "0.5674822", "0.5672025", "0.5664446", "0.5663413", "0.5660134", "0.56456316", "0.5642113", "0.564122", "0.56410366", "0.5636699", "0.5634991", "0.5629453", "0.5629453" ]
0.7102555
2
.mfk play Begin the game ||| [+/] Edit the mfk user list ||| marry fuck kill Give the verdict ||| score Retrieves the score of a user
def mfk(inp, db=None): db_init(db) inp = inp.lower() m_regex = re.compile(r"marry\s([a-zA-Z0-9_]+\s|[a-zA-Z0-9_]+)").finditer(inp) f_regex = re.compile(r"fuck\s([a-zA-Z0-9_]+\s|[a-zA-Z0-9_]+)").finditer(inp) k_regex = re.compile(r"kill\s([a-zA-Z0-9_]+|[a-zA-Z0-9_]+\s)").finditer(inp) if inp.startswith('play'): #play game return play(db) if inp.startswith('+'): #add member nick = inp[2:] exists = db.execute("select user from mfk where user=?" ,(nick,)).fetchall() if exists: return "User has already been added." db.execute("insert into mfk values(?, ?, ?, ?)",(nick, 0, 0, 0)) db.commit() return "Added." if inp.startswith('-'): #remove member nick = inp[2:] exists = db.execute("select user from mfk where user=?" ,(nick,)).fetchall() if not exists: return "User doesn't exist." db.execute("delete from mfk where user=?",(nick,)) db.commit() return "Removed." updated = False for nick in m_regex: nick = nick.group() nick = nick[6:].strip(' ') update('marry',nick,db) updated = True for nick in f_regex: nick = nick.group() nick = nick[5:].strip(' ') update('fuck',nick,db) updated = True for nick in k_regex: nick = nick.group() nick = nick[5:].strip(' ') update('kill',nick,db) updated = True if inp.startswith('score'): #get score nick = inp[6:] return score(nick,db) if updated: return "That makes sense." return "Unexpected input."
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enter_game_scores():\n pass", "def update_score():\n pass", "async def leaderboard(self, ctx):\r\n rank = 1\r\n strlist = []\r\n for k, v in sorted(player.items(), key=lambda x: expose(x[1]), reverse=True): # operator.itemgetter(1)\r\n position = str(rank) + '. ' + k\r\n while len(position) < 25:\r\n position += '\\u00A0'\r\n position += ' | ' + str(round(expose(v), 2)) + u\"\\u000A\"\r\n strlist.append(position)\r\n rank += 1\r\n indx = 10\r\n table = '\\u200b'.join(strlist[indx-10:indx])\r\n header = ('\\u00A0' * 3) + 'User' + ('\\u00A0' * 20) + 'Rating' + u\"\\u000A\"\r\n divider = '_' * 33 + u\"\\u000A\"\r\n\r\n msg = await ctx.send('```' + u\"\\u000A\" + header + divider + table + divider + u\"\\u000A\" '```')\r\n await msg.add_reaction('⬅')\r\n await msg.add_reaction('➡')\r\n\r\n def check(reaction, user):\r\n return user != msg.author and str(reaction.emoji == '➡' or '⬅')\r\n while True:\r\n try:\r\n reaction, user = await bot.wait_for('reaction_add', timeout=60.0, check=check)\r\n if reaction.emoji == '➡':\r\n await msg.remove_reaction(reaction.emoji, user)\r\n if indx < rank-1:\r\n indx += 10\r\n table = '\\u200b'.join(strlist[indx - 10:indx])\r\n await msg.edit(content='```' + u\"\\u000A\" + header + divider + table + divider + u\"\\u000A\" '```')\r\n if reaction.emoji == '⬅':\r\n await msg.remove_reaction(reaction.emoji, user)\r\n if indx > 10:\r\n indx -= 10\r\n table = '\\u200b'.join(strlist[indx - 10:indx])\r\n await msg.edit(content='```' + u\"\\u000A\" + header + divider + table + divider + u\"\\u000A\" '```')\r\n except:\r\n await msg.remove_reaction('⬅', msg.author)\r\n await msg.remove_reaction('➡', msg.author)\r\n break", "def update_activity_points(self, user_id,score):\n \n print(\"score :\"+str(score))\n\n if score<0:\n self.execute(TABELLE['activity_points']['update']['loose'],(score,user_id,))\n else:\n self.execute(TABELLE['activity_points']['update']['win'],(score, user_id,))", "def increase_score(self):\n\n old_score = self.get_score()\n new_score = old_score + 1\n sql = \"UPDATE Users SET score = ? WHERE username = ?\"\n self.conn.execute(sql, (new_score, self.username))\n self.conn.commit()", "def set_user_module_score(self, user, score, max_score, comment=u\"\"):\r\n if score is not None and max_score is not None:\r\n scaled_score = score * max_score\r\n else:\r\n scaled_score = None\r\n\r\n self.system.rebind_noauth_module_to_user(self, user)\r\n\r\n # have to publish for the progress page...\r\n self.system.publish(\r\n self,\r\n 'grade',\r\n {\r\n 'value': scaled_score,\r\n 'max_value': max_score,\r\n 'user_id': user.id,\r\n },\r\n )\r\n self.module_score = scaled_score\r\n self.score_comment = comment", "def say_scores(score0, score1):\n print(\"Player 0 now has\", score0, \"and Player 1 now has\", score1)\n return say_scores", "def update_score(score, role):\n if role == 'winner':\n score = score + 1\n if role == 'loser':\n score = score - 1\n return score", "def updateScore(score):\n return score + 1", "def show_score(self):\n self._pause = True # pause the game when you check the score\n score_list = self.get_high_score(self._filename) # get the record\n top = tk.Toplevel() # create a Toplevel\n top.title('Score Board')\n # create a text label for notification\n title = tk.Label(top, text='High Scored Player in This Level', width=70)\n title.pack(side=tk.TOP, ipady=1)\n if score_list is None: # check whether the record is empty\n tk.Label(top, text='No record in this level yet!', width=70).pack(side=tk.TOP, ipady=1)\n else: # if not empty\n for record in score_list: # shows up all the detail\n tk.Label(top, text=record[0] + ' : ' + record[1]).pack(side=tk.TOP, ipady=1)", "def update_score(self, match, i):\n self.match_views.update_score(match)\n score_p1 = input(f\"Veuillez rentrer le score de \"\n f\"{match[0][0]['first_name']} \"\n f\"{match[0][0]['last_name']} (1/0.5/0) \")\n score_p2 = input(f\"Veuillez rentrer le score de \"\n f\"{match[1][0]['first_name']} \"\n f\"{match[1][0]['last_name']} (1/0.5/0) \")\n self.validate_score(score_p1, score_p2, match, i)\n new_score = float(score_p1)\n new_score2 = float(score_p2)\n return new_score, new_score2", "def make_move(self,request):\n game = get_by_urlsafe(request.urlsafe_game_key, Game)\n user = game.user.get()\n if game.game_over:\n raise endpoints.ForbiddenException('Illegal action: Game is already over.')\n guess_Word = list(game.target)\n score = []\n [score.append('*') for i in range(len(guess_Word))]\n guess = request.guess.upper()\n # Validation of user entries\n if guess.isalpha() == False:\n msg = 'Please dont enter a number.'\n game.add_game_history(msg,guess)\n elif len(guess) != 1:\n msg = 'Please enter only one character.'\n game.add_game_history(msg,guess)\n # If user didn't get the correct answer. Substract 1.\n else:\n if guess not in guess_Word:\n game.attempts_remaining -=1\n if game.attempts_remaining > 0:\n [set_score_at(score,guess_Word,i) for i in game.correct]\n msg = \"Incorrect, you have %i attempts remaining. %s \" % (game.attempts_remaining, ''.join(score))\n game.add_game_history(msg,guess)\n else:\n msg = \"Game Over!. The answer was %s. Game Over \" % ''.join(guess_Word)\n user.loss +=1\n user.win_ratio = self.analyze_guess(user.win, user.loss)\n user.put()\n game.add_game_history(msg,guess)\n game.end_game()\n elif guess in guess_Word:\n [game.correct.append(i) for i in range(len(guess_Word)) if guess_Word[i] == guess and i not in game.correct]\n game.put()\n [set_score_at(score,guess_Word,i) for i in game.correct]\n msg = ''.join(score)\n game.add_game_history(msg,guess)\n if len(game.correct) == len(guess_Word):\n user.win +=1\n user.win_ratio = self.analyze_guess(user.win, user.loss)\n user.put()\n game.end_game(True)\n msg = \"You've won. The word was %s \" % ''.join(game.target)\n game.add_game_history(msg,guess)\n return game.to_form(msg)", "def leaderboard():\n \n global score_dictinary\n data = []\n fields = []\n scores = []\n names = []\n users = []\n i=0\n \n #Reads the winners from a mongo database \n read_mongo(scores, names)\n \n #Sorts the list in descending order\n quicksort(scores, names, 0, len(scores) - 1)\n \n #Joins the names and scores arrays together\n while i < len(scores):\n users.append(names[i] + \" \" + scores[i])\n i += 1\n \n users = (reversed(users))\n \n return render_template(\"leaderboard.html\", users=users)", "def mc_update_scores(scores, board, player):\n status = board.check_win()\n if status == provided.DRAW:\n pass\n if status == player:\n for row in range(board.get_dim()):\n for col in range(board.get_dim()):\n znak = board.square(row, col)\n helper(True, znak, player, scores, row, col)\n \n \n if status == provided.switch_player(player):\n for row in range(board.get_dim()):\n for col in range(board.get_dim()):\n znak = board.square(row, col)\n helper(False, znak, player, scores, row, col)\n \n \n return None", "def _update_score(self) -> None:\n\n # setting new score by iterating over players\n self.score_play[self.n_play_turns, ] = [\n self._score_table[(\n self.contract.level,\n self.contract.suit,\n self.tricks[i],\n self.contract.player_vulnerability[i],\n int(self.contract.double + self.contract.redouble)\n )]\n for i in range(NUM_PLAYERS)\n ]", "def scores(phenny, input):\n\tglobal scores_file\n\tglobal scores_dict\n\tfor nick in scores_dict:\n\t\tstrscore = str(scores_dict[nick])\n\t\tstr_say = nick + \": \" + strscore\n\t\tphenny.say(str_say)", "def score(self):", "def add_score(score):\n global SCORE\n SCORE = SCORE + score\n # update the display\n mvaddstr(1, 2, \"Score:\", color_pair(HEADING_COLOUR) | A_BOLD)\n mvaddstr(1, 9, \"%d\" % SCORE, color_pair(TEXT_COLOUR) | A_BOLD)", "def setScore(self, score=None):\r\n self._score = score\r\n self.ids[\"_scoreDisplayer\"].displayScore(score) \r\n self.ids[\"_emailSender\"]._score = self._score\r\n self.ids[\"_scoreSaver\"]._score = self._score\r\n self.ids[\"_MidiPlayer\"]._score = self._score", "async def set_game_score(self, user_id: base.Integer, score: base.Integer,\n force: typing.Union[base.Boolean, None] = None,\n disable_edit_message: typing.Union[base.Boolean, None] = None,\n chat_id: typing.Union[base.Integer, None] = None,\n message_id: typing.Union[base.Integer, None] = None,\n inline_message_id: typing.Union[base.String,\n None] = None) -> types.Message or base.Boolean:\n payload = generate_payload(**locals())\n result = await self.request(api.Methods.SET_GAME_SCORE, payload)\n\n if isinstance(result, bool):\n return result\n\n return types.Message(**result)", "def create_score(self):\n try:\n with open(\"data/save.txt\", \"r\") as file:\n\n dictionary = json.load(file)\n if not \"Actual Username\" in dictionary:\n raise KeyError(\"The Actual Username key is missing !\")\n else:\n # Get the name of the actual user\n user = dictionary[\"Actual Username\"]\n # Get the higher score of the user\n\n max_number = 0\n for elem in dictionary[user]:\n if elem > max_number:\n max_number = elem\n else:\n continue\n except FileNotFoundError:\n print('File not found !')\n except IOError:\n print('Error IO.')\n\n # Create the score in game\n self.score_label = self.canevas.create_text(0, 0, text=\"Score : \" + str(self.score), font=(\"Arial\", 15),\n anchor=NW)\n\n # Create the actual level in game\n self.actual_level = self.canevas.create_text(400, 0, text=\"Level \" + str(self.brick.counter),\n font=(\"Arial\", 15), anchor=N)\n\n # Create label best score\n self.best_score_label = self.canevas.create_text(800, 0, text=\"Best Score : \" + str(max_number),\n font=(\"Arial\", 15), anchor=NE)\n\n # Create label user\n self.user_name = self.canevas.create_text(0, 600, text=\"Username : \" + user, font=(\"Arial\", 15), anchor=SW)\n\n # Create label lives\n self.lives_label = self.canevas.create_text(800, 600, text=\"Lives : \" + str(self.life), font=(\"Arial\", 15),\n anchor=SE)", "def exercise5():\n player1 = easygui.enterbox('Enter first player name:', ' ', '')\n player2 = easygui.enterbox('Enter second player name:', ' ', '')\n score = easygui.integerbox('Enter required winning score:', '', '', 1, 2 ** 31)\n winner = scorekeeper( player1, player2, score)\n easygui.msgbox('{} wins!'.format(winner), '', 'OK')", "def score_game(self):\r\n players = self.player_control.get_players()\r\n ###game_control = self.game_control\r\n ###if game_control is not None:\r\n ### game_control.set_vals() # Update any changed game control settings\r\n if len(players) == 0:\r\n return # No players\r\n n_top_score = 0\r\n top_score = players[0].get_score()\r\n for player in players:\r\n if player.get_score() > top_score:\r\n top_score = player.get_score()\r\n for player in players:\r\n player_score = player.get_score()\r\n if player_score == top_score:\r\n n_top_score += 1\r\n \r\n for player in players:\r\n player_score = player.get_score()\r\n player_played = player.get_played()\r\n player_ties = player.get_ties()\r\n player_wins = player.get_wins()\r\n new_played = player_played+1\r\n player.set_played(new_played)\r\n player.set_prop(\"played\")\r\n if player_score == top_score:\r\n if n_top_score > 1:\r\n new_ties = player_ties + 1\r\n player.set_ties(new_ties)\r\n player.set_prop(\"ties\")\r\n else:\r\n new_wins = player_wins + 1\r\n player.set_wins(new_wins)\r\n player.set_prop(\"wins\")\r\n self.update_score_window()", "async def changescore(self, ctx, num: int, *, user: discord.Member):\n self.data[ctx.guild.id]['score'][user.id] += num\n return await ctx.send(f\"{user}'s score has been changed to {self.data[ctx.guild.id]['score'][user.id]}.\")", "def disp_score():", "def to_score(self):\n self._bottom_tab(2)\n self._goto(\"score\")", "def enter_player_score(player):\n score = 2\n while score > 1 or score < 0:\n score = view.enter_player_view(player.player_first_name)\n try:\n score = float(score)\n except ValueError:\n score = 2\n view.message('erreur')\n continue\n else:\n if score < 0:\n view.message('negatif')\n continue\n if score > 1:\n view.message('superieur')\n continue\n player.total_score += score\n return score", "def main():\n name = input('Enter your Name: ')\n playagain = \"yes\"\n if playagain == \"yes\":\n intro()\n intro_end()\n choice1_end()\n part_1()\n choice2 = attack_or_run()\n part_1_1(choice2)\n scorex = encounter_1(choice2)\n part_1_2(choice2)\n part_1_3()\n print('Do you want to view the Leader Board??: ')\n lb_input = input().lower()\n if lb_input == 'yes' or 'y':\n if str(scorex)[0] == '-':\n leaderboard(score=0, username=name)\n else:\n leaderboard(score=scorex, username=name)\n play()\n else:\n play()", "def scammerScore(userID):\n\tres = getMoreStuff(userID)\n\treturn res", "async def on_message(message):\n\n # we do not want the bot to reply to itself\n if message.author == client.user:\n return\n\n # intializes a scores object\n hiscores = Scores(message)\n\n if message.content.startswith('!hello'):\n msg = 'Hello {0.author.mention}'.format(message)\n await message.channel.send(msg)\n\n # get the command without !\n command = message.content.split()[0][1:]\n\n # retrieve the score of a player\n if message.content.startswith('!') and command in SKILLS:\n\n # retrieve the username that comes after the !level command and set underscores\n username = message.content.split()[1:]\n username = '_'.join(username)\n\n # get scores\n await hiscores.show_score(username, command)\n\n if message.content.startswith('!compare'):\n\n # get skill\n skill = message.content.split()[1]\n\n # check if the skill is valid, if not we compare based on total level and experience\n if not skill in SKILLS:\n\n # get the players\n players = ' '.join(message.content.split()[1:])\n players = players.split(' - ')\n\n for i, player in enumerate(players):\n players[i] = player.replace(' ', '_')\n\n # compare the players on total level if nothing is given\n await hiscores.compare(players, 'total')\n\n else:\n\n # get the players after the skill\n players = ' '.join(message.content.split()[2:])\n players = players.split(' - ')\n\n for i, player in enumerate(players):\n players[i] = player.replace(' ', '_')\n\n print(players)\n print(skill)\n # compare the players on total level if nothing is given\n await hiscores.compare(players, skill)\n\n\n if message.content.startswith('!pok'):\n msg = 'Heb je m al Marc?'.format(message)\n await message.channel.send(msg)", "def test_new_user_returns_score(self):\r\n observed = highscore.highscore(self.fixture_player, self.fixture_score_high)\r\n self.assertEquals(observed, self.fixture_score_high)", "def score(self):\n score_message = {\n 'Onewins': \"\\nThe Winner is Player 1!\",\n 'Twowins': \"\\nThe Winner is Player 2!\",\n 'Tie': \"\\nTie! Looks like everyone's a winner!\",\n 'Nowinner': \"\\nYikes, neither of you win!\"\n }\n if self.pone_score > self.ptwo_score:\n print(score_message['Onewins'])\n elif self.pone_score < self.ptwo_score:\n print(score_message['Twowins'])\n elif self.pone_score == 0 and self.ptwo_score == 0:\n print(score_message['Nowinner'])\n else:\n print(score_message['Tie'])", "def start_the_game(self):\n print(f'{self.user_name.get_value()}, Do the job here!')\n return ('jeu')", "def end_game(self, user):\r\n self.game_over = True\r\n self.put()\r\n # Add the game to the score 'board'\r\n score = Score(user=user, date=date.today(), won=True,\r\n guesses=self.turn)\r\n score.put()\r\n winner = user.get()\r\n print \"winner: \", winner.name\r\n winner.wins += 1\r\n winner.winloss_ratio = float(\r\n winner.wins / (winner.wins + winner.losses))\r\n winner.total_guesses += self.turn\r\n winner.put()\r\n if self.player1 == user:\r\n loser_key = self.player2\r\n else:\r\n loser_key = self.player1\r\n score = Score(user=loser_key, date=date.today(), won=False,\r\n guesses=self.turn)\r\n score.put()\r\n loser = loser_key.get()\r\n loser.losses += 1\r\n loser.winloss_ratio = float(loser.wins / (loser.wins + loser.losses))\r\n loser.total_guesses += self.turn\r\n loser.put()", "def run_quiz(quiz_list):\n score = 0\n # Go through each item in the list\n for turn in quiz_list:\n # Use the index of the items in the list to set to variables. Items in lists always start from 0 so item 0\n # is the first, item 1 is the second etc.\n foreign = turn[1]\n english = turn[2]\n # Prompt for input on the screen. Set whatever is typed in to the variable 'attempt'. This uses '%s' as a placeholder\n # in the input string. This is translated to the value of the variable 'foreign'\n attempt = input(\"What is the English word for %s?\\n\" % foreign)\n # Test to see if the attempt matched the English word\n if attempt == english:\n # If it did incrememnt the score\n print(\"Correct!\")\n score = score + 1\n else:\n # If it didn't then show what the correct word is\n print(\"Incorrect! The English word for '%s' is '%s'\\n\" % (foreign, english))\n #endif\n #endfor\n # Return the score to the caller of the function\n return(score)", "def showtopscores(self):\n top_scores = LeaderBoard.gettopscorerslist(CURRENT_GAME_LEVEL)\n level_string = \"\"\n if CURRENT_GAME_LEVEL == DifficultyLevel.ExpertLevel:\n level_string = \"Expert level\"\n elif CURRENT_GAME_LEVEL == DifficultyLevel.BeginnerLevel:\n level_string = \"Beginner level\"\n else:\n level_string = \"Intermediate level\"\n leaderboard = \"Rank\".ljust(10) + \"Player Name\".ljust(30) + \"Score\".ljust(10) + '\\n'\n print leaderboard,\n rank = 1\n for score in top_scores:\n score = str(rank).ljust(10) + score\n print score,\n leaderboard = leaderboard + score\n rank = rank + 1\n QtGui.QMessageBox.about(self, \"Leaderboard for \" + level_string, leaderboard)", "def update_score_from_cmd(self, new_score, prev_score):\r\n if new_score is None:\r\n return # No change\r\n \r\n player = new_score[0]\r\n score = new_score[1]\r\n player.set_score(score)", "def enter_matches_score(match_list):\n for match in match_list:\n view.show(\"{} vs {}\".format(match.player_blanc.player_first_name,\n match.player_black.player_first_name))\n score_blanc = enter_player_score(match.player_blanc)\n match.score_blanc = score_blanc\n score_black = enter_player_score(match.player_black)\n match.score_black = score_black", "def table(score):\r\n\t\r\n\tprint(\"Congratulation!!! You have enough points to get into Best Results Board\")\r\n\tplayer_score = (input(\"\\nPlease enter your name: \")) + \":\" + str(score)\r\n\t\r\n\tf = open(\"pickle_board.txt\", 'a')\r\n\tf.write(player_score)\r\n\tf.close()\r\n\tprint(\"Your score was added!\")", "def main():\n display, clock = game.init_pygame()\n highscores = HighScores(display, clock)\n highscores.run()", "def set_high_score(self):\n # pop a window for player's name\n answer = simpledialog.askstring(\"Cheers\", \"What's your name?\",\n parent=self._master)\n if answer is not None:\n # set the player's record(str)\n record = '==' + self._filename + '==\\n' \\\n + answer + ':' + str(self._player.get_score()) + '\\n'\n else: # if didn't have the name\n messagebox.showinfo(\"Notice\", \"Not gonna take this on the score board.\")\n return False # skip the method\n # save the record\n path = os.path.abspath(os.path.dirname(sys.argv[0])) + \"\\\\HighScores\\\\\" + self._filename\n if not os.path.exists(path): # check whether have the file\n open(path, 'w') # if not, create\n with open(path, 'r+') as fin: # write record into the file\n fin.read()\n fin.write(record)\n fin.close()", "def update_score_board(self):\n score = ''\n for key, value in self.model.game_score.items():\n score += key + \"-\" + str(value) + ':'\n if self.view.score_board_entry.get():\n self.view.score_board_entry.delete(0, tkinter.END)\n self.view.score_board_entry.insert('1', score)", "def update_scoreboard(self):\n self.clear()\n self.goto(-(WIDTH//6), (HEIGHT//2-30))\n self.write(self.l_score, align = 'center', font = ('Courier', 20, 'normal'))\n self.goto((WIDTH//6), (HEIGHT//2-30))\n self.write(self.r_score, align = 'center', font = ('Courier', 20, 'normal'))", "def commit_score(self):\n\n # Update the player's total score and total roll count\n self._total_score += self._current_score", "async def finish_game(self, ctx: commands.Context, score: dict):\n # Print out scoreboard, and winner, then give the winners their points\n scoreboard = sorted(score.items(), key=lambda x:x[1], reverse=True)\n result = \"**SCOREBOARD:**\\n\"\n condition = min(len(scoreboard), 9)\n\n # Find up to the 9th place\n # ranking_index[i] will give the index of the first person in the (i+1)th place\n ranking_index = [0]\n\n for i in range(1, len(scoreboard)):\n if scoreboard[i][1] == scoreboard[i-1][1]:\n continue\n ranking_index.append(i)\n if i >= condition: # Only want up to the 9th place\n break\n else:\n ranking_index.append(len(scoreboard))\n \n # Construct results of game and send to channel\n for i in range(len(ranking_index) - 1):\n result += (self.word_placing[i] \n + f\" place - {scoreboard[i][1]} points - \"\n + ', '.join(str(x[0]) for x in scoreboard[ranking_index[i]:ranking_index[i+1]])\n + '\\n')\n\n # Give points to everyone in first place\n for person in scoreboard[ranking_index[0]:ranking_index[1]]:\n self.data[ctx.guild.id]['score'][person[0].id] += 1\n\n result += \"Players in first place have earned one point each.\"\n await ctx.send(result)\n # Clear the database\n self.games_info[ctx.guild.id] = gamesDict()", "def get_score(self):\n\n sql = \"SELECT score FROM Users WHERE username = '\" + self.username + \"'\"\n self.cursor.execute(sql)\n return self.cursor.fetchall()[0][0]", "def scores(bot, update):\n chat_id = update.message.chat_id\n bot.send_message(chat_id,\n chats[chat_id].displayScores(),\n parse_mode=ParseMode.MARKDOWN,\n isgroup=True)", "def update_score(self, engine, *args):\n #pdb.set_trace()\n self.score_label.text = \"Gold: {}/{}\".format(str(engine.score),\n str(engine.win_score))", "def scoreGame(self):\n # create valueLs[card1,card2,...], pass it to sumHandReturnPoints(valueLs) or twoCardReturnPoints(valueLs)\n scoreLs = []\n ### Score of row\n for rowKey in self.table:\n valueLs = self.table[rowKey]\n points = self.sumHandReturnPoints(valueLs)\n scoreLs.append(points)\n\n ### Score of 4-card column\n for offset in range(0,3): # 0,1,2\n tmpLs = []\n for rowKey in self.table:\n valueLs = self.table[rowKey]\n if len(valueLs) == 5:\n iterStart = 1\n else:\n iterStart = 0\n card = valueLs[iterStart+offset]\n tmpLs.append(card)\n points = self.sumHandReturnPoints(tmpLs)\n scoreLs.append(points) \n\n ### Score of 2-card column\n #(1) 1st column\n valueLs1 = self.table['row1']\n valueLs2 = self.table['row2']\n tmpLs = []\n tmpLs.append(valueLs1[0].get_rank())\n tmpLs.append(valueLs2[0].get_rank())\n points = self.twoCardReturnPoints(tmpLs)\n scoreLs.append(points)\n #(2) 5th column\n valueLs3 = self.table['row1']\n valueLs4 = self.table['row2']\n tmpLs = []\n tmpLs.append(valueLs3[-1].get_rank())\n tmpLs.append(valueLs4[-1].get_rank())\n points = self.twoCardReturnPoints(tmpLs)\n scoreLs.append(points) \n\n ### Add up scoreLs\n sumPoints = 0\n for points in scoreLs:\n sumPoints += points\n return sumPoints", "def game():\n indexes = set_index_list(-1)\n score = 0\n a = get_person(indexes, -1)\n while True:\n b = get_person(indexes, a)\n print(game_art.logo)\n if score != 0:\n print(f\"You're right! Current score: {score}\")\n print('Compare A:', end=' ')\n followers_a = display_person(game_data.data[a])\n print(game_art.vs)\n print('Against B:', end=' ')\n followers_b = display_person(game_data.data[b])\n choice = input(\"Who has more followers? Type 'A' or 'B': \").lower()\n correct = compare_choice(followers_a, followers_b, choice == 'a')\n clear_screen()\n if correct:\n score += 1\n else:\n return score\n a = b", "def draw_score(self, DISP, points:int):\r\n\r\n Text_Surf_Obj = self.text_input.font_object.render('HIGH SCORE', True, Colors.colors['WHITE'])\r\n Score_Surf_Obj = self.text_input.font_object.render(self.name + ' ' + str(points), True, Colors.colors['WHITE']) \r\n index = self.highscore_counter - 1\r\n\r\n # When the highscore_counter reaches zero the current Player has the highest Score\r\n if self.highscore_counter == 0:\r\n index = 0\r\n\r\n highscore_name = self.names[index] #< The Name of the Player with the next possible Highscore\r\n highscore = str(self.scores[index]) #< The Score of the Player with the next possible Highscore\r\n\r\n # Checks if the Points from the current Player are greater then the next best Highscore\r\n if points > self.scores[index]:\r\n\r\n # Decreases the highscore_counter by 1 when the highscore_counter > 0\r\n if self.highscore_counter > 0:\r\n self.highscore_counter -= 1\r\n \r\n # If the current Player already has the highest score, his name and score will be printed on the display\r\n elif self.highscore_counter == 0:\r\n highscore = str(points)\r\n highscore_name = self.name\r\n\r\n # The rest of the function is making the output on the screen, for further details what the functions do visit https://www.pygame.org/docs/\r\n High_Score_Surf_Obj = self.text_input.font_object.render(highscore_name+ ' ' + highscore, True, Colors.colors['WHITE'])\r\n Textrec = Text_Surf_Obj.get_rect()\r\n score_rec = Score_Surf_Obj.get_rect()\r\n highscore_rec = High_Score_Surf_Obj.get_rect()\r\n windowsize = DISP.get_size()\r\n Textrec.centerx = windowsize[0] - highscore_rec.width // 2 - 3 * self.grid_size\r\n Textrec.top = 0\r\n score_rec.left = 3 * self.grid_size\r\n score_rec.top = self.grid_size\r\n highscore_rec.right = windowsize[0] - 3 * self.grid_size\r\n highscore_rec.top = self.grid_size\r\n DISP.blit(Text_Surf_Obj, Textrec)\r\n DISP.blit(Score_Surf_Obj, score_rec)\r\n DISP.blit(High_Score_Surf_Obj, highscore_rec)", "def main():\n game = Game(TIMES, HARDNESS)\n game.start()\n game.print_score()", "def scoreboard():\n box = font.render(\"Kills: \" + str(score), True, WHITE) # Rendering font for the scoreboard\n screen.fill(BROWN, rect=box.get_rect(topright=(635, 20))) # Filling the background of the scoreboard with brown\n screen.blit(box, (550, 20)) # Putting the scoreboard in the top right corner of the screen\n pygame.display.update() # Updating the screen", "def mc_update_scores(scores, board, mach_player):\n sqrs = [(row_num, col_num) for row_num in range(board.get_dim()) for col_num in range(board.get_dim())]\n if board.check_win() == mach_player:\n for sqr in sqrs:\n if board.square(sqr[0], sqr[1]) == mach_player:\n scores[sqr[0]][sqr[1]] += SCORE_CURRENT\n elif board.square(sqr[0], sqr[1]) == provided.switch_player(mach_player):\n scores[sqr[0]][sqr[1]] -= SCORE_OTHER\n elif board.check_win() == provided.switch_player(mach_player):\n for sqr in sqrs:\n if board.square(sqr[0], sqr[1]) == mach_player:\n scores[sqr[0]][sqr[1]] -= SCORE_CURRENT\n elif board.square(sqr[0], sqr[1]) == provided.switch_player(mach_player):\n scores[sqr[0]][sqr[1]] += SCORE_OTHER", "def start_game(self):\n user_name = self.user_name.get()\n self.row, self.column = self.get_size_of_grid()\n self.user_symbol, self.cpu_symbol = self.get_symbols()\n if user_name == \"\" or self.row == 0 or self.user_symbol is None:\n messagebox.showwarning(\"Warning!\", \"Please complete all the fields!\")\n return\n # The connection to the database in order to rtetrieve the data is done.\n is_registered = False\n connection, cursor = GUI.open_database_connection()\n cursor.execute(\"select * from player\")\n for row in cursor:\n if row[0] == user_name:\n is_registered = True\n self.user_score = int(row[1])\n self.cpu_score = int(row[2])\n\n if is_registered is False:\n cursor.execute(\"insert into player values (%s, %s, %s)\", (user_name, 0, 0))\n GUI.close_database_connection(connection, cursor)\n # After checking the case if the current user hadn't played the game before, it is added to the database\n\n self.frame_gameplay = Frame(self.root, bd=4)\n self.frame_game_entry.pack_forget()\n self.frame_gameplay.pack()\n\n GUI.insert_empty_space(self.frame_gameplay, 0, 10)\n # The purpose of the below labels is for \"design\n score_label = Label(self.frame_gameplay, text=\"Score:\")\n score_label.grid(row=0, column=11)\n\n label_user = Label(self.frame_gameplay, text=user_name)\n label_cpu = Label(self.frame_gameplay, text=\"CPU\")\n label_cpu.grid(row=1, column=12)\n label_user.grid(row=0, column=12)\n\n lbl_user_score = Label(self.frame_gameplay, text=str(self.user_score))\n lbl_cpu_score = Label(self.frame_gameplay, text=str(self.cpu_score))\n lbl_user_score.grid(row=0, column=13)\n lbl_cpu_score.grid(row=1, column=13)\n\n funny_label = Label(self.frame_gameplay, text=\"Play Obstruction!\")\n funny_label.grid(row=0, column=0, columnspan=3)\n\n GUI.insert_empty_space(self.frame_gameplay, 1, 0)\n # The true gameplay starts now!\n self.upload_board()", "def vanilaScore(self,attended,state,W):", "async def highscore(self, ctx, num: PositiveInt=5):\n num = min(num, len(self.data[ctx.guild.id]['score']), 9)\n if not num:\n return await ctx.send(\"Nobody has a score yet.\")\n lst = sorted(self.data[ctx.guild.id]['score'].items(), key=lambda x:x[1], reverse=True)\n result = str()\n for i in range(num):\n result += f\"{self.bot.get_user(lst[i][0])} - {lst[i][1]} points\\n\"\n return await ctx.send(result)", "def fix_scores(cls,req):\n# if from_uid==1: \n print(\"resetting scores to zero...\")\n cls.list(asObjects=False,sql=\"update `%s`.pages set score=0\" % cls.Config.database)\n print(\"adding play scores ...\")\n# for i in cls.Play.list(where= (\"uid>=%s\" % req.from_uid) if (req.from_uid>1) else \"\"):\n for i in cls.Play.list():\n try: \n tob=cls.get(i.page)\n tob.score=tob.score+i.times\n tob.flush()\n# except Exception as e:\n# print \"ERROR with \",i.page,' : ',e\n except:\n print(\"deleting %s play(s) for missing track %s\" % (i.times,i.page))\n i.delete() # delete invalid plays (presumably the track is already deleted)\n print(\"calculating summary scores\")\n cls.fix_summary_scores(req)\n print(\"done\")\n return \"all scores reset from plays table\"", "def leaderboard(score=None, username=None):\n if score and username != None:\n with open(\"leaderboard.csv\", \"a\", newline='') as file:\n fields = ['score', 'name']\n writer = csv.DictWriter(file, fieldnames=fields)\n writer.writerow({'score' : score, 'name' : username})\n\n with open(\"leaderboard.csv\", \"r\") as file:\n sortlist = []\n reader = csv.reader(file)\n for i in reader:\n sortlist.append(i)\n for ind, value in enumerate(sortlist):\n if ind != 0:\n value[0] = int(value[int(0)])\n\n for ind, value in enumerate(sortlist):\n print(value)\n\n\n for i in range(555):\n for i in range(len(sortlist)-1):\n if i != 0:\n if sortlist[i][0] < sortlist[i+1][0]:\n change = sortlist[i]\n sortlist[i] = sortlist[i+1]\n sortlist[i+1] = change\n for i in range(len(sortlist)-1):\n print(sortlist[i])\n else:\n print('Leaderboard has been Created Play/Win a session to create a new Leaderboard')", "def hire(update, context):\n #answer query and load users\n context.bot.answer_callback_query(update.callback_query.id)\n data = update.callback_query.data\n usernames = re.match(r'-3\\S*-(\\S+)-(\\S+)', data)\n username1, username2 = usernames.group(1), usernames.group(2)\n user1, user2 = um.load_user_data([username1, username2])\n #prevent user from executing if status is not 2\n if user1[\"user_status\"] != \"2\":\n return None\n #variable to control actions\n approved_action = []\n #list of possible actions to take depending on button pressed (callback_query.data)\n if \"-3-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['soldier']['price'])):\n reply_markup = mc.show_user_options(user1[\"username\"], user2[\"username\"], 5, [\"Soldiers\", \"Warriors\", \"Knights\", \"Flee\", \"Back\"], [\"3.1\", \"3.2\", \"3.3\", \"flee\", \"reshow_main\"])\n context.bot.editMessageText(chat_id=user1[\"userid\"], message_id=update.callback_query.message.message_id, text=\"Which unit do you wish to hire?\", reply_markup=reply_markup)\n elif \"-3.1-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['soldier']['price'])):\n reply_markup = mc.show_user_options(user1[\"username\"], user2[\"username\"], 5, [\"5\", \"10\", \"15\", \"Flee\", \"Back\"], [\"3.1.1\", \"3.1.2\", \"3.1.3\", \"flee\", \"3\"])\n context.bot.editMessageText(chat_id=user1[\"userid\"], message_id=update.callback_query.message.message_id, text=\"How many Soldiers do you wish to hire? (20 Gold, 5 Attack Damage each)\", reply_markup=reply_markup)\n elif \"-3.2-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['warrior']['price'])):\n reply_markup = mc.show_user_options(user1[\"username\"], user2[\"username\"], 5, [\"5\", \"10\", \"15\", \"Flee\", \"Back\"], [\"3.2.1\", \"3.2.2\", \"3.2.3\", \"flee\", \"3\"])\n context.bot.editMessageText(chat_id=user1[\"userid\"], message_id=update.callback_query.message.message_id, text=\"How many Warriors do you wish to hire? (50 Gold, 10 Attack Damage each)\", reply_markup=reply_markup)\n elif \"-3.3-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['knight']['price'])):\n reply_markup = mc.show_user_options(user1[\"username\"], user2[\"username\"], 5, [\"5\", \"10\", \"15\", \"Flee\", \"Back\"], [\"3.3.1\", \"3.3.2\", \"3.3.3\", \"flee\", \"3\"])\n context.bot.editMessageText(chat_id=user1[\"userid\"], message_id=update.callback_query.message.message_id, text=\"How many Knights do you wish to hire? (100 Gold, 20 Attack Damage each)\", reply_markup=reply_markup)\n elif \"-3.1.1-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['soldier']['price'])):\n approved_action = [\"-3.1.1-\", \"You chose to hire <b>5</b> Soldiers!\"]\n elif \"-3.1.2-\" in data and gm.check_gold(context.bot, user1[\"username\"], 10*int(config['soldier']['price'])):\n approved_action = [\"-3.1.2-\", \"You chose to hire <b>10</b> Soldiers!\"]\n elif \"-3.1.3-\" in data and gm.check_gold(context.bot, user1[\"username\"], 15*int(config['soldier']['price'])):\n approved_action = [\"-3.1.3-\", \"You chose to hire <b>15</b> Soldiers!\"]\n elif \"-3.2.1-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['warrior']['price'])):\n approved_action = [\"-3.2.1-\", \"You chose to hire <b>5</b> Warriors!\"]\n elif \"-3.2.2-\" in data and gm.check_gold(context.bot, user1[\"username\"], 10*int(config['warrior']['price'])):\n approved_action = [\"-3.2.2-\", \"You chose to hire <b>10</b> Warriors!\"]\n elif \"-3.2.3-\" in data and gm.check_gold(context.bot, user1[\"username\"], 15*int(config['warrior']['price'])):\n approved_action = [\"-3.2.3-\", \"You chose to hire <b>15</b> Warriors!\"]\n elif \"-3.3.1-\" in data and gm.check_gold(context.bot, user1[\"username\"], 5*int(config['knight']['price'])):\n approved_action = [\"-3.3.1-\", \"You chose to hire <b>5</b> Knights!\"]\n elif \"-3.3.2-\" in data and gm.check_gold(context.bot, user1[\"username\"], 10*int(config['knight']['price'])):\n approved_action = [\"-3.3.2-\", \"You chose to hire <b>10</b> Knights!\"]\n elif \"-3.3.3-\" in data and gm.check_gold(context.bot, user1[\"username\"], 15*int(config['knight']['price'])):\n approved_action = [\"-3.3.3-\", \"You chose to hire <b>15</b> Knights!\"]\n if approved_action != []:\n um.switch_user_status(user1, user2, \"3\", \"2\")\n context.bot.deleteMessage(chat_id=user1[\"userid\"], message_id=update.callback_query.message.message_id)\n context.bot.send_message(chat_id=user1[\"userid\"], text=approved_action[1], parse_mode=ParseMode.HTML)\n gm.hire(context.bot, user1[\"username\"], user2[\"username\"], approved_action[0][1:6])\n elif \"-3-\" in data or \"-3.1-\" in data or \"-3.2\" in data or \"-3.3-\" in data:\n pass\n else:\n context.bot.send_message(chat_id=user1[\"userid\"], text=\"You do not have enough gold!\")\n return None", "def record_score(self, answer, score):\n f = open('scores.txt', 'r+')\n new_first_line = str(answer) + ' ' + str(score) + '\\n'\n if not f.read(1):\n f.write(new_first_line)\n else:\n first_line = f.readline().split()\n if int(first_line[-1]) < score:\n f.seek(0)\n whole = f.readlines()\n f.seek(0, 0)\n f.write(new_first_line)\n for line in whole:\n f.writelines(line)\n else:\n f.write(new_first_line)\n f.close()", "def __show_scoreboard(self):\n self.clear_screen()\n\n print('\\n' * 2, end=\"\")\n for line in self.__fame:\n print((\" \" * 5) + line, end=\"\")\n print('\\n' * 2, end=\"\")\n\n with open(\"mastermind/assets/scores.json\", \"r\") as data:\n board = list(load(data).items())\n\n space = \" \" * 11\n print(f\"{space}RANK {'PLAYER':<30}\" +\n f\"{'TIME':>7} (seconds){'POINTS':>29}\\n\")\n\n lines_printed = 0\n for idx, entry in enumerate(board[:10]):\n lines_printed += 1\n space = \" \" * 10\n n = idx + 1\n year, month, day, time = entry[0].split(\" \")\n points = entry[1][\"points\"]\n playtime = entry[1][\"playtime\"]\n player = entry[1][\"player\"]\n\n print(f\"{space}{n:>4}. {player:<30}\" +\n f\"{playtime:>7,.2f}{points:>36}/15\")\n\n lines = \"\\n\" * (12 - lines_printed)\n print(f\"{lines}{space}\", end=\"\")\n sleep(.25)\n self.cool_print(\"Press ENTER to return to player menu.\",\n newline=False, margin=0)\n input()", "def update_leaderboard(self, name, score):\n \n # Remove ranking no 10 if given name not in list.\n if name not in self.history_score.keys():\n self.history_score.popitem()\n \n # Add the highscore to leaderboard.\n self.history_score[name] = score\n \n # Sort the leaderboard.\n self.history_score = dict(sorted(self.history_score.items(), key=lambda x:x[1]))\n with open(\"Leaderboard.csv\", 'w') as f:\n for key in self.history_score.keys():\n f.write(\"%s, %s\\n\"%(key, self.history_score[key]))", "def game_is_over(self):\r\n player_name = self.input('Please enter your name to log score!') + ' '\r\n scores_list = []\r\n try:\r\n # Read the lines of a file into a list\r\n with open('scores.txt', 'r') as scores:\r\n for line in scores:\r\n scores_list.append(line.strip())\r\n # Add the player's score\r\n scores_list.append(player_name + str(self.tr.black_disks))\r\n # Sort the list based on scores\r\n scores_list = sorted(scores_list,\r\n key=lambda x: x[-2:],\r\n reverse=True)\r\n # Just a quick Print out of the top 5 high scores:\r\n if len(scores_list) >= 5:\r\n print('Top scores:')\r\n for item in range(5):\r\n print(scores_list[item])\r\n # Write the list in lines to the file\r\n with open('scores.txt', 'w') as scores:\r\n for line in scores_list:\r\n scores.write(line + '\\n')\r\n except BaseException:\r\n print('Scores tracking file not found :(')\r\n return", "def check_answer(answer, session):\n \n print(\"check_answer: \", answer)\n\n attributes = session[\"attributes\"]\n print(\"atts\\n\", attributes)\n scores = attributes[\"scores\"]\n curr_round = attributes[\"current_round\"]\n curr_player = attributes[\"current_player\"]\n quest_index = attributes[\"question_index\"]\n sess_questions = attributes[\"sess_questions\"]\n print(\"before curr item\")\n curr_item = quiz.list_fragen[sess_questions[quest_index]]\n print(\"curr_item created\")\n\n if curr_item.evaluate(answer):\n text = get_reaction(\"pos\")\n result = 1\n else:\n text = get_reaction(\"neg\")\n result = 0\n text += \"<s>Die richtige Antwort war \" + curr_item.get_ans_str() + \"</s>\"\n print(\"evaluation done\")\n print(\"scores \", scores)\n print(\"curr_player \", curr_player)\n\n scores[str(curr_player)] += result\n print(\"score updated\")\n if curr_round == TOTAL_ROUNDS and curr_player == len(scores):\n text += get_final_score(scores)\n print(scores)\n attributes[\"scores\"] = scores\n return response(speech_response=text,should_end_session=True,\\\n card_text= ABSCHIED_CARD_TEXT +\"\\n\" + \\\n build_card_content(attributes=attributes, add_frage=False))\n\n curr_player += 1\n print(\"player updated\")\n if curr_player > len(scores):\n curr_player = 1\n curr_round += 1\n quest_index += 1\n \n attributes[\"scores\"] = scores \n attributes[\"current_round\"] = curr_round\n attributes[\"current_player\"] = curr_player\n attributes[\"question_index\"] = quest_index\n\n frage = ask_question(quest_index, attributes)\n text += frage\n \n attributes[\"current_question\"] = frage\n attributes[\"speech_output\"] = text\n attributes[\"reprompt_text\"] = frage\n\n return response(text, should_end_session=False, reprompt_text=frage, \\\n attributes=attributes)", "def update_scores(self, score):\n self.result_list.append(score)\n\n if self.best_score == 0 and self.worst_score == 0:\n self.best_score = score\n self.worst_score = score\n\n if score < self.best_score:\n self.best_score = score\n\n if score > self.worst_score:\n self.worst_score = score", "def draw_score():\n global score, hi_score, score2, win\n if game_on2:\n score2 += 1 / 30\n score2 = round(score2, 3)\n # if score % 30 == 0:\n # score2 += 1\n camera.draw(\"Time: \" + str(score2), 30, \"black\", camera.x + 250, 30)\n if hi_score < 1000000000000000:\n camera.draw('Hi ' + str(hi_score), 30, \"black\", camera.x + 150, 30)\n if win:\n draw_hi_score()", "def give_round_scores(list_of_players):\n print(\"\\nThe round has ended !\\nWe shall now unveil the cards and the scores!\")\n\n for player in list_of_players:\n cards = [card.name for card in player.cards]\n cards_string = \" \"\n for card in cards:\n cards_string += card + \", \"\n cards_string = cards_string[:-2]\n print(\"\\n{} has these cards: \".format(player.name), cards_string)\n print(\"{} has a score of {}\".format(player.name, player.score()))\n final_scores = [player.score() for player in list_of_players]\n min_score = min(final_scores)\n winners_index = [i for i, x in enumerate(final_scores) if x == min_score]\n if len(winners_index) == 1:\n index_winner = winners_index[0]\n winner = list_of_players[index_winner]\n print(winner.name, \"won the round with a score of {}\".format(winner.score()))\n if len(winners_index) > 1:\n print(\"It's a tie!\")\n winners_names = \"\"\n winners = [list_of_players[i] for i in winners_index]\n for winner in winners:\n winners_names += winner.name\n print(winners_names, \"won the round with a score of \", str(min_score))", "def win(self):\n self.score += 1\n self.ids['score'].text = 'SCORE: ' + str(self.score)", "async def leaderboard(self, ctx):\n\t\t# TODO: convert to buttons whenever I get around to 3.5 support\n\t\traw = await self.config.all_users()\n\t\tstocks = set()\n\t\tfor uid, data in raw.items():\n\t\t\tstocks = stocks.union(set(data['stocks'].keys()))\n\t\ttry:\n\t\t\tstock_data = await self._get_stock_data(list(stocks))\n\t\texcept ValueError as e:\n\t\t\treturn await ctx.send(e)\n\t\tprocessed = []\n\t\tfor uid, data in raw.items():\n\t\t\ttotal = 0\n\t\t\tfor ticker, stock in data['stocks'].items():\n\t\t\t\tif ticker not in stock_data:\n\t\t\t\t\tcontinue\n\t\t\t\ttotal += stock['count'] * stock_data[ticker]['price']\n\t\t\tif not total:\n\t\t\t\tcontinue\n\t\t\tprocessed.append((uid, total))\n\t\tprocessed.sort(key=lambda a: a[1], reverse=True)\n\t\tresult = ''\n\t\tfor idx, data in enumerate(processed, start=1):\n\t\t\tuid, total = data\n\t\t\tuser = self.bot.get_user(uid)\n\t\t\tif user:\n\t\t\t\tuser = user.name\n\t\t\telse:\n\t\t\t\tuser = '<Unknown user `{uid}`>'\n\t\t\tresult += f'{idx}. {total} - {user}\\n'\n\t\tpages = [f'```md\\n{x}```' for x in pagify(result, shorten_by=10)]\n\t\tif not pages:\n\t\t\tawait ctx.send('Nobody owns any stocks yet!')\n\t\t\treturn\n\t\tc = DEFAULT_CONTROLS if len(pages) > 1 else {\"\\N{CROSS MARK}\": close_menu}\n\t\tawait menu(ctx, pages, c)", "def your_score(score):\n value = score_font.render(\"Your Score: \" + str(score), True, green)\n dis.blit(value, [0, 0])", "def score_update(scoreboard, compare):\r\n if compare == 'Victory':\r\n scoreboard['W'] += 1\r\n elif compare == 'Defeat':\r\n scoreboard['L'] += 1\r\n elif compare == 'Tie':\r\n scoreboard['T'] += 1", "def update_score(self, mark):\n if mark == 'X':\n self.model.game_score[self.model.player_1] += 1\n else:\n self.model.game_score[self.model.player_2] += 1", "def scorer(self, current_score):\r\n text = self.field.render(\"Score: \" + str(current_score // 2), True, BLACK_COLOUR)\r\n win.blit(text, (0, 0))", "def update_score(self, score: int) -> int:\n self.score += score\n return self.score", "def update_stats(self):\n self.scoreText = pygame.font.Font(FONT, 20)\n\n #update score\n textsurface = self.scoreText.render((\"Score: \"+str(self.current_score)), False, BLUE)\n self.screen.blit(textsurface,(5,5))\n\n #update high score\n if self.highest_score <= self.current_score:\n self.highest_score = self.current_score\n #To write highest score to file\n filename = \"highscore.txt\"\n file = open(filename,\"w\")\n file.write(str(self.highest_score))\n file.close()\n\n #Display High Score\n textsurface = self.scoreText.render((\"Highest Score: \"+str(self.highest_score)), False, BLUE)\n self.screen.blit(textsurface,(230,5))\n\n #Display Life Text\n textsurface = self.scoreText.render(\"Lives: \", False, BLUE)\n self.screen.blit(textsurface,(570,5))\n\n #Shows lifes left\n for i in range(self.lives):\n self.live = pygame.image.load(\"./images/ship.png\").convert_alpha()\n self.live = pygame.transform.scale(self.live , (20, 20))\n self.screen.blit(self.live, (670+(i*25), 7))\n\n #Mute Button\n button=pygame.image.load(\"./images/mutebutton.png\")\n button=pygame.transform.scale(button,(30,30))\n self.screen.blit(button, (750,5))", "def liste_triee():\n\n global liste_username_score\n\n liste_username_score = sorted(liste_username_score, reverse = True, key=score_trie)", "def view_scores(jenni, input):\n scores.view_scores(jenni, input)", "def rmpoint(phenny, input):\n\tglobal scores_dict\n\tif input.group(2) == \" \" or input.group(2) == \"\" or str(input.group(2)) == None or str(input.group(2)) == \"\" or input.group(2) == None:\n\t\tphenny.say(\"I'm sorry, \" + str(input.nick) + \". I'm afraid I can't add that user!\")\n\telse:\n\t\tnick_addpoint = input.group(2)\n\n\tif input.nick == nick_addpoint:\n\t\tphenny.say(\"I'm sorry, \" + str(input.nick) + \". I'm afraid I can't do that!\")\n\telse:\n\t\tnick_addpoint = nick_addpoint.lower()\n\t\tif nick_addpoint in scores_dict:\n\t\t\tscores_dict[nick_addpoint] -= 1\n\t\t\tscores_file = open(\"scores.txt\", \"w\")\n\t\t\tpickle.dump(scores_dict, scores_file)\n\t\t\tphenny.say(nick_addpoint + \": \" + str(scores_dict[nick_addpoint]))\n\t\t\tscores_file.close()\n\t\telse:\n\t\t\tphenny.say(\"I'm sorry, \" + str(input.nick) + \". I'm afraid I can't do that!\")", "def set_game_auth_score(self, _score: Address) -> None:\n if self.msg.sender == self.owner:\n self._game_auth_score.set(_score)", "def game_over(self, user_quit=\"\"):\r\n scores = self.scores\r\n game_winner = (scores[\"p1\"] > scores[\"p2\"] and \"One\" or \"Two\")\r\n score_list = list(scores.values())\r\n score_difference = max(score_list) - min(score_list)\r\n print(user_quit and \"\\nYou quit the game.\" or \"Game over!\")\r\n print(f\"\\nFINAL SCORES:\"\r\n f\"\\nPlayer One Total Score: {scores['p1']}\"\r\n f\"\\nPlayer Two Total Score: {scores['p2']}\"\r\n f\"\\n\\nRESULTS:\")\r\n print(\r\n scores[\"p1\"] != scores[\"p2\"] and\r\n f\"** Player {game_winner} Wins the Game\"\r\n f\" by {score_difference}\"\r\n f\" Point{score_difference > 1 and 's' or ''}! **\\n\" or\r\n \"** TIE: Both Players Scored the Same! **\"\r\n )\r\n\r\n # Asks User if they Want to Restart the Game:\r\n def restart_game():\r\n \"\"\"Asks user if they want to restart the game.\r\n If yes, resets self.score.\r\n If not, thanks player and exits.\r\n It also checks for valid input.\"\"\"\r\n restart = input(\"Would you like to play again?\\n(y/n):\")\r\n if restart.lower() == \"y\" or restart.lower() == \"yes\":\r\n print(\"\\nStarting New Game!\\n\")\r\n self.scores = {\"p1\": 0, \"p2\": 0} # Restarts scores\r\n self.play_game() # Restarts game\r\n elif restart.lower() == \"n\" or restart.lower() == \"no\":\r\n print(\"\\nThank you for playing!\")\r\n else: # Checks for valid input:\r\n print(\"Oops! Seems like you misspelled something.\"\r\n \"\\nMake sure your response is: y, yes, n or no.\")\r\n return restart_game()\r\n\r\n # If Player Typed \"quit\", Game Ends Without Restart Option:\r\n if not user_quit:\r\n restart_game()\r\n exit()", "def results():\n \n global score\n\n if score <= 3:\n print(f\"You got {score} out of 10 questions correct. It's clear you don't know much or care about Linkin Park.\")\n elif score <= 6 > 3:\n print(f\"You got {score} out of 10 questions correct. You know quite a bit about Linkin Park, nice.\")\n elif score <= 9 > 6:\n print(f\"You got {score} out of 10 questions correct. You must be a fan of Linkin Park. That's great!\")\n elif score == 10:\n print(f\"You got {score} out of 10 questions correct. You must be a superfan! We should be friends.\")\n else:\n print(\"Huh? You somehow broke the program... that's weird.\")", "def main(self):\n \n print(\"Welcome to Speedy Gonzales!\")\n print(\"Press 'a' to move your left leg, 'k' to move your right leg.\")\n print(\"Ready?\")\n \n start = time.time()\n self.score = 0\n self.previous_key = None\n \n def on_key_release(key):\n try:\n if key == keyboard_0.Key.esc:\n print(\"Good Bye!\")\n listener.stop()\n exit()\n \n if key.char == \"a\" or key.char=='k':\n msg = \" . \" if key.char=='a' else \" .\"\n print(msg)\n # Main loop to run the program.\n if key.char != self.previous_key:\n self.score = self.score + 1\n self.previous_key = key.char\n else:\n # Hit 'a' or 'k' two times in a row, fell for 2s.\n self.previous_key = None\n print(\"Ops! You fell down on your face!\")\n time.sleep(1)\n print(\"Stand up!\")\n time.sleep(1)\n print(\"Continue running!\")\n \n # Finish running.\n if self.score >= 40:\n listener.stop()\n end = time.time()\n score = round(end - start, 2)\n print(\"Congrats! You have run 100m in {}!\".format(score))\n \n print(\"Check the previous winner...\")\n self.print_leaderboard() \n \n score_flag = self.get_highscore(score)\n if score_flag == 0:\n # Not in leaderboard.\n print(\"Thanks for playing.\")\n else:\n if score_flag == 1:\n # In top 10 leaderboard.\n winner_msg = \"\\n\\nYou have achieved top 10! \"\n else:\n # Get champion.\n winner_msg = \"\\n\\nYou are the champion! \"\n \n # Get winner's name and update leaderboard.\n name = input(winner_msg + \"Please enter your name:\" )\n self.update_leaderboard(name, score)\n \n print(\"Lets check the new leaderboard.... \\n\\n\\n\\n\\n\")\n time.sleep(1)\n self.print_leaderboard() \n \n \n # Ask for new game.\n in_flag = input(\"New game? (Y/N)\")\n if in_flag.upper() == 'Y':\n self.main()\n else:\n print(\"Good Bye!\")\n exit()\n \n except AttributeError:\n print(key)\n\n with keyboard_0.Listener(on_release = on_key_release, suppress=True) as listener:\n listener.join()", "def update_overall_leaderboard(request):\n try:\n user = UserProfile.objects.order_by('-overall_score')\n count = UserProfile.objects.all().count()\n entries = OverallLeaderboard.objects.all().count()\n \n if entries > 0:\n over = OverallLeaderboard.objects.all().delete()\n i = 0\n while i < count:\n over = OverallLeaderboard()\n over.username = user[i].user\n over.overall_points_earned = user[i].overall_score\n over.save()\n i += 1\n \n data = {'msg':''}\n messages.success(request, \"Overall Leaderboard updated successfully.\")\n return render_to_response('my_admin_tools/menu/background_task.html',data,context_instance=RequestContext(request))\n except:\n msg = traceback.format_exc()\n data = {'msg':msg}\n messages.error(request, \"Update Overall Leaderboard failed.\")\n return render_to_response('my_admin_tools/menu/background_task.html',data,context_instance=RequestContext(request))", "def updateScore(self, player: int) -> None:\n\n if player == 1:\n self._score[0] += 1\n elif player == 2:\n self._score[1] += 1\n\n # logging\n logger.info(\"Player {winner} has scored a goal. Score: {score}\", winner=player, score=str(self._score))", "def game_over(score, ship1, ship2, timer):\r\n ship1.lives = 0\r\n letters = []\r\n r = screen.get_rect()\r\n # if username has not been entered yet\r\n if s.username == \"\":\r\n while True:\r\n enter_pressed = helpers.get_name(letters) # get username from player\r\n # show a game over screen\r\n game_over_image = pg.image.load(\"images/game over.jpg\").convert()\r\n game_over_image = pg.transform.scale(game_over_image, screen.get_size())\r\n screen.blit(game_over_image, (0, 0))\r\n # display username as player types it\r\n helpers.display_text(\r\n s.username, \"centerx\", \"centery\", r.centerx, r.centery + 150\r\n )\r\n helpers.display_text(\r\n \"Enter Name:\", \"centerx\", \"centery\", r.centerx, r.centery + 100\r\n )\r\n # show player's final score on screen\r\n helpers.display_text(\r\n (\"Score {0}\".format(str(int(timer ** 2) + score))),\r\n \"centerx\",\r\n \"centery\",\r\n r.centerx,\r\n r.centery,\r\n )\r\n pg.display.update()\r\n\r\n # once player submits username, save their score to a file\r\n if enter_pressed:\r\n helpers.save_score(s.username, int(timer ** 2 + score))\r\n break\r\n\r\n else:\r\n helpers.save_score(s.username, int(timer ** 2 + score))\r\n\r\n # get all saved scores / usernames from the file\r\n f = open(\"names.txt\", \"r\")\r\n data = json.load(f)\r\n f.close\r\n\r\n # leaderboard stores a sorted list of (username, score) tuples from all previous players\r\n leaderboard = []\r\n for key, value in data.items():\r\n leaderboard.append((key, value))\r\n leaderboard = sorted(leaderboard, key=itemgetter(1), reverse=True)\r\n\r\n screen.blit(pg.image.load(\"images/lb_bg.png\"), (0, 0))\r\n n = 120\r\n y_offset = 33\r\n\r\n # goes through top 10 entries in the leaderboard and shows them on screen\r\n for i, user in enumerate(leaderboard):\r\n if i != 9:\r\n message = \" \" + str(i + 1) + \": \" + str(user[0])\r\n else:\r\n message = str(i + 1) + \": \" + str(user[0])\r\n message = (\r\n message.strip()\r\n .replace(\"'\", \"\")\r\n .replace(\")\", \"\")\r\n .replace(\"(\", \"\")\r\n .replace(\",\", \" :\")\r\n )\r\n helpers.display_text(\r\n \"LEADERBOARD\", \"centerx\", \"centery\", r.centerx, r.centery - 200\r\n )\r\n helpers.display_text(message, \"left\", \"top\", 30, r.centery - n)\r\n\r\n n -= y_offset\r\n pg.display.update()\r\n if i == 9:\r\n break\r\n\r\n while True:\r\n for event in pg.event.get():\r\n if event.type == pg.MOUSEBUTTONDOWN:\r\n main()", "def run(self):\n print \"Welcome to the BlackJack game ......\" # print help function if needed\n deckObj = Deck()\n deckObj.shuffle()\n while(not self.checkGameComplete()):\n self.displayGame()\n card = deckObj.deal()\n # ask user for move\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n while(not isPass):\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n # update table\n self.updateTableAndDiscardLs(position,card)\n ### Score Game\n self.displayGame()\n score = self.scoreGame()\n print 'Congratulations! Your final score is:'\n print score\n print 'Game is done... Thank you!'", "def update_score(self, score_point: int):\r\n self._score_point = score_point\r\n self._update_score() # change the visual display of points for the player\r", "def main():\n position = (0,1)\n score = 0\n filename = input(\"Please enter the name of the level file (e.g. level1.txt): \")\n level = load_level(filename)\n checkpoint=0 \n\n while True:\n print (\"Score:\", score)\n print_level(level,position)\n \n direction = input(\"Please enter an action (enter '?' for help): \")\n if direction in (RIGHT,LEFT):\n position = move(level,position,direction)\n elif direction == \"a\":\n level = attack(level,position)\n elif direction == \"?\":\n print (HELP_TEXT)\n elif direction == \"q\":\n return\n elif direction == \"n\": \n position = saved_position\n score = saved_score\n continue\n \n tile,level = tile_status(level,position)\n if tile == CHECKPOINT:\n saved_position = position\n saved_score = score\n checkpoint+=1\n if tile == MONSTER:\n if checkpoint != 0:\n position = saved_position\n score = saved_score\n continue\n break\n elif tile == GOAL:\n return\n elif tile == COIN:\n score +=1", "def add_score(self, score_to_add):\n self.score += score_to_add\n if self.score // self.level >= 20:\n self.level += 1\n self.speed *= self.factor\n # Also redraw all pixels because they now change color\n self.screen.grid()\n self.screen.block()\n self.screen.next()\n # Refresh the data on screen\n self.screen.data()", "def updateScore(self, score):\n self.__score += score", "def score(self):\n return self.client.call('GET', self.name + 'score')", "def add_score():\n json_data = request.get_json()\n exercise_id = json_data.get(\"exercise_id\")\n score = json_data.get(\"score\")\n user_id = session.get(\"email\")\n fm.add_attempt(exercise_id, score, user_id)\n\n msg = \"Attempt added. Exercise ID: {} Score: {}\"\\\n .format(exercise_id, score)\n app.logger.info(msg)\n return jsonify(dict(result=\"success\"))", "def draw_new_score(self):\n pygame.event.get() #clear event list? Otherwise it skips\n self.screen.fill((0, 0, 0))\n #sessionsurf = self.f24.render(\"Session %d, Game %d/%s\"%(self.session_number, self.game_number, self.config[\"games_per_session\"]), True, (255,255,255))\n # sessionrect = sessionsurf.get_rect()\n # sessionrect.centerx = self.SCREEN_WIDTH / 2\n # sessionrect.y = 100\n # self.screen.blit(sessionsurf, sessionrect)\n gamesurf = self.f36.render(\"Game %d\" % (self.current_game), True, (255, 255, 0))\n gamerect = gamesurf.get_rect()\n gamerect.centery = self.SCREEN_HEIGHT / 16 * 2\n gamerect.centerx = self.SCREEN_WIDTH / 2\n self.screen.blit(gamesurf, gamerect)\n pygame.draw.line(self.screen, (255, 255, 255), (self.SCREEN_WIDTH / 4 , self.SCREEN_HEIGHT / 16 * 3), (self.SCREEN_WIDTH / 4 * 3, self.SCREEN_HEIGHT / 16 * 3))\n pntssurf = self.f24.render(\"Flight score:\", True, (255, 255, 0))\n pntsrect = pntssurf.get_rect()\n pntsrect.left = self.SCREEN_WIDTH / 3\n pntsrect.centery = self.SCREEN_HEIGHT / 16 * 4\n self.screen.blit(pntssurf, pntsrect)\n cntrlsurf = self.f24.render(\"Fortress score:\", True, (255, 255, 0))\n cntrlrect = cntrlsurf.get_rect()\n cntrlrect.left = self.SCREEN_WIDTH / 3\n cntrlrect.centery = self.SCREEN_HEIGHT / 16 * 6\n self.screen.blit(cntrlsurf, cntrlrect)\n vlctysurf = self.f24.render(\"Mine score:\", True, (255, 255, 0))\n vlctyrect = vlctysurf.get_rect()\n vlctyrect.left = self.SCREEN_WIDTH / 3\n vlctyrect.centery = self.SCREEN_HEIGHT / 16 * 8\n self.screen.blit(vlctysurf, vlctyrect)\n speedsurf = self.f24.render(\"Bonus score:\", True, (255, 255, 0))\n speedrect = speedsurf.get_rect()\n speedrect.left = self.SCREEN_WIDTH / 3\n speedrect.centery = self.SCREEN_HEIGHT / 16 * 10\n self.screen.blit(speedsurf, speedrect)\n pntsnsurf = self.f24.render(\"%d\" % self.score.flight, True, (255, 255, 255))\n pntsnrect = pntsnsurf.get_rect()\n pntsnrect.right = self.SCREEN_WIDTH / 3 * 2\n pntsnrect.centery = self.SCREEN_HEIGHT / 16 * 4\n self.screen.blit(pntsnsurf, pntsnrect)\n cntrlnsurf = self.f24.render(\"%d\" % self.score.fortress, True, (255, 255, 255))\n cntrlnrect = cntrlnsurf.get_rect()\n cntrlnrect.right = self.SCREEN_WIDTH / 3 * 2\n cntrlnrect.centery = self.SCREEN_HEIGHT / 16 * 6\n self.screen.blit(cntrlnsurf, cntrlnrect)\n vlctynsurf = self.f24.render(\"%d\" % self.score.mines, True, (255, 255, 255))\n vlctynrect = vlctynsurf.get_rect()\n vlctynrect.right = self.SCREEN_WIDTH / 3 * 2\n vlctynrect.centery = self.SCREEN_HEIGHT / 16 * 8\n self.screen.blit(vlctynsurf, vlctynrect)\n speednsurf = self.f24.render(\"%d\" % self.score.bonus, True, (255, 255, 255))\n speednrect = speednsurf.get_rect()\n speednrect.right = self.SCREEN_WIDTH / 3 * 2\n speednrect.centery = self.SCREEN_HEIGHT / 16 * 10\n self.screen.blit(speednsurf, speednrect)\n #draw line\n pygame.draw.line(self.screen, (255, 255, 255), (self.SCREEN_WIDTH / 4 , self.SCREEN_HEIGHT / 16 * 11), (self.SCREEN_WIDTH / 4 * 3, self.SCREEN_HEIGHT / 16 * 11))\n totalsurf = self.f24.render(\"Total game score:\", True, (255, 255, 0))\n totalrect = totalsurf.get_rect()\n totalrect.left = self.SCREEN_WIDTH / 3\n totalrect.centery = self.SCREEN_HEIGHT / 16 * 12\n self.screen.blit(totalsurf, totalrect)\n totalnsurf = self.f24.render(\"%d\" % (self.score.flight + self.score.fortress + self.score.mines + self.score.bonus), True, (255, 255, 255))\n totalnrect = totalnsurf.get_rect()\n totalnrect.right = self.SCREEN_WIDTH / 3 * 2\n totalnrect.centery = self.SCREEN_HEIGHT / 16 * 12\n self.screen.blit(totalnsurf, totalnrect)\n if self.current_game == self.config['General']['games_per_session']:\n finalsurf = self.f24.render(\"You're done! Press return to exit\", True, (0, 255, 0))\n else:\n finalsurf = self.f24.render(\"Press return for next game\", True, (0, 255, 0))\n finalrect = finalsurf.get_rect()\n finalrect.centerx = self.SCREEN_WIDTH / 2\n finalrect.centery = self.SCREEN_HEIGHT / 16 * 14\n self.screen.blit(finalsurf, finalrect)", "def add_played_game(play):\n games = Play.objects.filter(game=play.game)\n total = 0\n for game in list(games):\n total += game.score\n average_score = total/len(games)\n if ((play.score - average_score) > average_score/2.1):\n play.user.level += 1\n elif ((average_score - play.score) > average_score/1.5):\n play.user.level -= 1\n play.user.save()", "def send_scoreboard(context: CallbackContext) -> None:\n if not isinstance(context.chat_data, dict):\n raise AssertionError\n context.chat_data['question_number'] = -1\n msg_text = \"*Quiz Over*! \\n*ScoreBoard*: \\n\\n\"\n values = sorted(context.chat_data['marksheet'].items(),\n key=lambda x: x[1]['score'],\n reverse=True)\n data = [\n f\"{mention_markdown(id, attendee['name'])} : {attendee['score']}\"\n for id, attendee in values\n ]\n data_str = [\n f\"{rank}. {name_score}\"\n for rank, name_score in enumerate(data, start=1)\n ]\n scoreboard = \"\\n\".join(data_str)\n msg_text += f'{scoreboard}'\n context.bot.delete_message(\n chat_id=context.chat_data['message'].chat.id,\n message_id=context.chat_data['message'].message_id)\n context.bot.send_message(text=msg_text,\n chat_id=context.chat_data['message'].chat.id,\n parse_mode=ParseMode.MARKDOWN).pin()", "def game_loop(games,num_of_players,winning_score):\n updated_score = 0\n game_continue = True\n\n # Assign player list\n games.create_player_list(num_of_players)\n\n # Game Loop\n while game_continue:\n # cycle between players\n for player_position in range (num_of_players):\n print \"\\nPlayer {} Turn\".format(player_position + 1)\n\n # Clear temp score each round\n temp_score = 0\n\n # Turns loop\n while True:\n # User input\n user_input = raw_input(\"Enter (r)ole or (h)old:\")\n\n # roll dice, get value\n if user_input == \"r\":\n # roll dice using class Game.roll with random seed (time)\n roll_result = games.roll(time.time())\n # if roll a 1 end Turns for this player\n if roll_result < 0:\n print \"\\nPlayer {} rolled a 1. End turn.\".format((player_position + 1))\n break\n # continue game logic\n else:\n print \"Player {} rolled a {}\".format((player_position + 1),roll_result)\n # incriment temp score\n temp_score += roll_result\n\n # hold dice and bank temp score\n elif user_input == \"h\":\n print \"Player {} Holds\".format((player_position + 1))\n # assign value to score using class Game\n updated_score = games.update_player_score((player_position),temp_score)\n break\n\n else:\n print \"Whoops! Enter correct selection (r or h)\"\n\n print \"Score List:\",games.return_player_list()\n\n # determine if a winner\n if updated_score >= winning_score:\n print 'Player {} wins!'.format(player_position + 1)\n game_continue = False\n break", "def updateScore(currentScore, highScore):\n screen.fill(pygame.Color(\"black\"), (10, 210, 130, 20))\n hsWidth = getTextWidth(str(highScore))\n hsPos = (150 - hsWidth) // 2, 210\n displayText(str(highScore), GOLD, hsPos)\n\n screen.fill(pygame.Color(\"black\"), (10, 130, 130, 20))\n csWidth = getTextWidth(str(currentScore))\n csPos = (150 - csWidth) // 2, 130\n displayText(str(currentScore), GOLD, csPos)", "def leaderboard(request):\r\n\tMEDIA_URL = '/media/'\r\n\tgames = Game.objects.all()\r\n\tuser_high_scores = []\r\n\tgame_high_scores = []\r\n\tnew = {}\r\n\t# Get global scores\r\n\tfor game in games:\r\n\t\tgame_intermediate_high = Score.objects.filter(game = game.id).order_by('-current_score').values('game__name', 'player__user__username', 'current_score')[:1]\r\n\t\tif (game_intermediate_high.count() > 0):\r\n\t\t\tgame_high_scores.append(game_intermediate_high)\r\n\t# Check if user is authenticated and get user's scores\r\n\tif (request.user.is_authenticated):\r\n\t\tfor game in games:\r\n\t\t\t\tuser_intermediate_high = Score.objects.filter(game=game.id, player = request.user.profile).order_by('-current_score').values('player__user__username','game__name', 'current_score').distinct()[:1]\r\n\t\t\t\tif (user_intermediate_high.count() > 0):\r\n\t\t\t\t\tuser_high_scores.append(user_intermediate_high)\r\n\r\n\treturn render(request, 'leaderboard.html',{'MEDIA_URL' : MEDIA_URL,'games': games, 'user_high_scores': user_high_scores, 'game_high_scores': game_high_scores})", "def mc_update_scores(scores, board, player):\n for row in range(board.get_dim()):\n for col in range(board.get_dim()):\n if board.check_win()==player:\n if board.square(row,col)==player:\n scores[row][col]+=SCORE_CURRENT\n elif board.square(row,col)==switch_player(player):\n scores[row][col]-=SCORE_OTHER\n elif board.check_win()==switch_player(player):\n if board.square(row,col)==player:\n scores[row][col]-=SCORE_CURRENT\n elif board.square(row,col)==switch_player(player):\n scores[row][col]+=SCORE_OTHER\n return" ]
[ "0.67305034", "0.63729185", "0.6132123", "0.6093871", "0.60754216", "0.5949699", "0.59380084", "0.59304893", "0.59282887", "0.5903168", "0.58756876", "0.58682275", "0.5865948", "0.58637714", "0.5862318", "0.5842622", "0.58407027", "0.582625", "0.5826105", "0.5816378", "0.58118486", "0.58025193", "0.5801429", "0.5797606", "0.5796043", "0.57885444", "0.5786602", "0.5771494", "0.57589716", "0.5743708", "0.5719191", "0.571698", "0.57080233", "0.5703273", "0.56883055", "0.56591904", "0.5655245", "0.56544906", "0.56348443", "0.56322277", "0.56313217", "0.5626761", "0.5619536", "0.5613372", "0.56060165", "0.56053054", "0.56036645", "0.55977494", "0.55952966", "0.55917054", "0.5584046", "0.5575388", "0.556565", "0.55591446", "0.5554823", "0.55502903", "0.5549558", "0.55449855", "0.5526964", "0.55220276", "0.5511294", "0.54992265", "0.5497336", "0.5495084", "0.54891205", "0.5474225", "0.5472038", "0.5466722", "0.5465874", "0.54587334", "0.54535013", "0.54509646", "0.54492986", "0.5446763", "0.54453415", "0.54430515", "0.54355055", "0.54217505", "0.5420039", "0.54181576", "0.54161316", "0.5414357", "0.54141015", "0.5413666", "0.5409411", "0.5408591", "0.54050016", "0.53960633", "0.5392795", "0.53855735", "0.5385535", "0.5384166", "0.5377458", "0.5373272", "0.53727657", "0.53713804", "0.5371359", "0.5368386", "0.53657544", "0.53549224" ]
0.5794052
25
Returns the presence for this channel
def presence(self, params=None, timeout=None): params = params or {} path = '/channels/%s/presence' % self.__name return self.__ably._get(path, params=params, timeout=timeout).json()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def presence(self):\n return self.slack_client.api_call(\"users.getPresence?user=\"+self.user_id)", "def isHumanPresence(self):\n\t\treturn self.humanPresence", "def online(self):\n api_call = self.presence()\n if api_call.get('ok'):\n # retrieve all users so we can find our bot\n return api_call.get('online')\n return None", "def Presence(self, *args, **kwargs):\n return Presence(self, *args, **kwargs)", "def ready(self):\n return self._channel.recv_ready()", "def is_open(self, channel=None):\n return self.get_state(channel)", "def getChannelResponse(self):\n \n \n return self.channel_response", "def customers_presence(self):\n return self._customers_presence", "def connected_channel(self):\n if not self.channel_id:\n return None\n\n return self._bot.get_channel(int(self.channel_id))", "async def loop_presence(self):\n # TODO: Does this even work?\n presence = await self.set_presence()\n logger.debug(f'{presence[\"activity\"][1]} {presence[\"status\"][1]}')", "async def check_na_channel(self, guild: discord.Guild):\n\n ch_id = await self.config.guild(guild).na_channel_id()\n\n if ch_id:\n return discord.utils.get(guild.text_channels, id=ch_id)\n return False", "def user_present(ctx: Context, channel: TextChannel) -> bool:\n for member in channel.members:\n if member.id == ctx.author.id:\n return True\n\n return False", "def check_presence(user):\n\n if not settings.SLACK_TOKEN:\n return None\n\n client = WebClient(token=settings.SLACK_TOKEN)\n\n try:\n response = client.users_getPresence(user=user)\n assert response['ok'] is True\n if response['presence'] == 'active':\n return True\n else:\n return False\n except SlackApiError as e:\n assert e.response['ok'] is False\n return None", "def is_present(self):\n return self._is_present()", "def check(self):\n return self.connected", "def connected(self):\n return self._periph.connected", "def channel_is_streaming(self, channel_name = ''): \n \n self.get_stream(channel_name)\n stream_json = self.stream['stream']\n if stream_json is None:\n return False\n else:\n print(stream_json['channel']['name'])\n print(stream_json['game'])\n print(stream_json['viewers'])\n print(stream_json['created_at'])\n return True", "def messages_in_channel(self, client, channel):\n result = None\n if client not in self.storage:\n return result\n if channel not in self.storage[client]:\n return result\n result = len(self.storage[client][channel])\n return result", "def is_connected(self):\n return self.connected_channel is not None", "def get(self, public_id):\n channel = get_channel_state(public_id)\n if not channel:\n api.abort(404)\n else:\n return channel", "def handle_groupchat_presence(self, pr):\n got_offline = False\n got_online = False\n if pr['muc']['room'] not in self.rooms.keys():\n return\n entry = pr['muc'].getStanzaValues()\n entry['show'] = pr['show']\n entry['status'] = pr['status']\n entry['alt_nick'] = pr['nick']\n if pr['type'] == 'unavailable':\n if entry['nick'] in self.rooms[entry['room']]:\n del self.rooms[entry['room']][entry['nick']]\n if '{}/{}'.format(entry['room'], entry['nick']) == self.getOurJidInRoom(entry['room']):\n log.debug(\"I got kicked :( from %s\" % entry['room'])\n del self.rooms[entry['room']]\n got_offline = True\n else:\n if entry['nick'] not in self.rooms[entry['room']]:\n got_online = True\n self.rooms[entry['room']][entry['nick']] = entry\n log.debug(\"MUC presence from %s/%s : %s\", entry['room'],entry['nick'], entry)\n self.xmpp.event(\"groupchat_presence\", pr)\n self.xmpp.event(\"muc::%s::presence\" % entry['room'], pr)\n if got_offline:\n self.xmpp.event(\"muc::%s::got_offline\" % entry['room'], pr)\n if got_online:\n self.xmpp.event(\"muc::%s::got_online\" % entry['room'], pr)", "def is_channel(self):\n return True", "def single_channel():\n return True", "def channels_playing(self):\n channels = c_int()\n real = c_int()\n ckresult(\n _dll.FMOD_System_GetChannelsPlaying(self._ptr, byref(channels), byref(real))\n )\n return so(channels=channels.value, real_channels=real.value)", "def exists(self):\n logging.warning(\n \"IRC back-end does not support determining if a room exists. \"\n \"Returning the result of joined instead.\"\n )\n return self.joined", "def on_presence_updated(self, e):\n self.presence = e.presence if e.presence is not None else 'online'", "def get_status(self):\n if self.__db.channel_exists('{}.today.1_0'.format(self.__dest_code)):\n self.__db.sync_today_channel()\n # maybe just sync this channel? and do same for previous methods\n else:\n self.__db.create_today_channel('{}.today.1_0'.format(self.__dest_code))\n\n conn = sqlite3.connect(self.__db.db_path)\n c = conn.cursor()\n\n today_data = c.execute(\"\"\"SELECT body FROM sync WHERE id = '{}.today.1_0.{}'\"\"\".format(self.__dest_code, self.__entityType)).fetchone()\n\n if today_data is None:\n return None\n else:\n body = json.loads(today_data[0])\n try:\n return body['facilities'][str(self.__id) + ';entityType=' + self.__entityType][0]['scheduleType']\n except:\n return None", "def is_active(self, channel):\n return bool(int(self.bus.ask('sel:%s?' % channel)))", "async def get_widget_info(self) -> 'typing.Tuple[bool, typing.Union[None, channel.Channel]]':\n info = await self._bot.http.get_widget_status(self.id)\n return info.get(\"enabled\", False), self.channels.get(int(info.get(\"channel_id\", 0)))", "def getChannel(self):\r\n return self.channel", "def get_state(self, channel=None):\n return bool(self.getBinaryData(\"SENSOR\", channel))", "def event_detected(self, channel):\n self._check_mode()\n # mit `bool()` kann aus einer Zahl ohne grossen Aufwand ein bool (Wahrheitswert) erstellt werden\n # dabei werden alle Zahlen zu `True`, nur 0 wird zu `False`\n return bool(randint(0, 1))", "def get_play_status(self):\n return self.get(COMMAND_UIC, 'GetPlayStatus')", "def channel_connected(self):\n self.update_status()", "def presenting(self):\n return self._presenting", "def get(self, channel):\n try:\n return self[channel.lower()]\n except KeyError:\n return None", "def is_playing(self):\n return self.connected_channel is not None and self.current is not None", "def get_livechat_channel_info(self):\n self.ensure_one()\n if self.channel_id:\n return self.channel_id.sudo().get_livechat_info()\n return {}", "def joined(self):\n return str(self) in holder.bot.conn.channels.keys()", "def presence(self, presence):\n\n self._presence = presence", "def is_subscribed(self, inst, channel):\r\n if channel not in self._channels:\r\n return False\r\n return inst in self._channels[channel].subscribers", "def is_party_channel(channel: discord.TextChannel) -> bool:\n return get_active_feature(channel) == ActivationState.PARTY", "def _handle_presence(self, presence):\n self.event(\"presence_%s\" % presence['type'], presence)\n\n # Check for changes in subscription state.\n if presence['type'] in ('subscribe', 'subscribed',\n 'unsubscribe', 'unsubscribed'):\n self.event('changed_subscription', presence)\n return\n elif not presence['type'] in ('available', 'unavailable') and \\\n not presence['type'] in presence.showtypes:\n return\n\n # Strip the information from the stanza.\n jid = presence['from'].bare\n resource = presence['from'].resource\n show = presence['type']\n status = presence['status']\n priority = presence['priority']\n\n was_offline = False\n got_online = False\n old_roster = self.roster.get(jid, {}).get(resource, {})\n\n # Create a new roster entry if needed.\n if not jid in self.roster:\n self.roster[jid] = {'groups': [],\n 'name': '',\n 'subscription': 'none',\n 'presence': {},\n 'in_roster': False}\n\n # Alias to simplify some references.\n connections = self.roster[jid]['presence']\n\n # Determine if the user has just come online.\n if not resource in connections:\n if show == 'available' or show in presence.showtypes:\n got_online = True\n was_offline = True\n connections[resource] = {}\n\n if connections[resource].get('show', 'unavailable') == 'unavailable':\n was_offline = True\n\n # Update the roster's state for this JID's resource.\n connections[resource] = {'show': show,\n 'status': status,\n 'priority': priority}\n\n name = self.roster[jid].get('name', '')\n\n # Remove unneeded state information after a resource\n # disconnects. Determine if this was the last connection\n # for the JID.\n if show == 'unavailable':\n log.debug(\"%s %s got offline\" % (jid, resource))\n del connections[resource]\n\n if not connections and not self.roster[jid]['in_roster']:\n del self.roster[jid]\n if not was_offline:\n self.event(\"got_offline\", presence)\n else:\n return False\n\n name = '(%s) ' % name if name else ''\n\n # Presence state has changed.\n self.event(\"changed_status\", presence)\n if got_online:\n self.event(\"got_online\", presence)\n log.debug(\"STATUS: %s%s/%s[%s]: %s\" % (name, jid, resource,\n show, status))", "async def is_publicly_visible(bot: DogBot, channel: discord.TextChannel) -> bool:\n # guild is configured to log all message events\n if await bot.config_is_set(channel.guild, 'log_all_message_events'):\n return True\n\n # find the @everyone overwrite for the channel\n everyone_overwrite = discord.utils.find(lambda t: t[0].name == '@everyone', channel.overwrites)\n return everyone_overwrite is None or everyone_overwrite[1].read_messages is not False", "def isPlaying(self):\n return self.getChannel().get_busy()", "def is_connected(self):\n return self.hw_connected", "def channel(self):\n return self._channel", "def channel(self):\n return self._channel", "def get_channel_status(crate, slot, channel):\n conn = engine.connect()\n\n result = conn.execute(\"SELECT * FROM current_channel_status \"\n \"WHERE crate = %s AND slot = %s AND channel = %s\",\n (crate,slot,channel))\n\n if result is None:\n return None\n\n keys = result.keys()\n row = result.fetchone()\n\n return dict(zip(keys,row))", "def channel(self) -> 'Channel': # stub\n return self._channel", "def get_enabled(self, channel):\n return self.extension_names - self.get_disabled(channel)", "def connected(self) -> bool:\n\t\treturn self._raw_result['data']['connected']", "def _send_presence(self, jid):\n presence = self.xmpp.Presence(sfrom=self.jid, sto=jid)\n\n # vCard update\n vcard_update = ET.Element('{vcard-temp:x:update}x')\n photo = ET.SubElement(vcard_update, 'photo')\n photo.text = hashlib.sha1('random').hexdigest()\n\n presence.setPayload(vcard_update)\n\n # Chat status\n show = ET.Element('{%s}show' % self.xmpp.default_ns)\n show.text = 'chat'\n presence.setPayload(show)\n\n self.xmpp.send(presence)", "def channels(self):\n return [channel for channel in self.client.channels if channel.has_nick(self)]", "def get_connected(self) -> bool:\n try:\n return self._background_process.is_alive()\n except AttributeError:\n return False", "def isconnected(self):\n return self._wlan.isconnected()", "def can_message(guild, channel):\n\treturn authorized(guild, channel) and not muted(guild, channel)", "def connected(self):\n return self._connected", "def connected(self):\n return self._connected", "def ready(self):\n return _cantera.wall_ready(self.__wall_id)", "def channel(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"channel\")", "def online(self) -> bool:\n\t\treturn self._raw_result['data']['online']", "def channel(self):\n return Channel({'id': self.channel_id, 'connection': self.connection})", "def _subscribed(self, account_id):\n sql = \"\"\"SELECT 1 FROM hive_subscriptions\n WHERE community_id = :community_id\n AND account_id = :account_id\"\"\"\n return bool(DB.query_one(\n sql, community_id=self.community_id, account_id=account_id))", "def get_visible(self):\n return self._visible", "def primary_channel(guild: discord.Guild) -> discord.TextChannel | None:\n if guild.system_channel is not None:\n return guild.system_channel\n\n for channel_candidate in guild.channels:\n if (\n isinstance(channel_candidate, discord.TextChannel)\n and channel_candidate.permissions_for(guild.me).send_messages\n ):\n return channel_candidate\n\n return None", "def update_presence(self):\n\t\tif self.config.presence_model == 'permanent':\n\t\t\tself.thin.presence = 'true'", "def channel(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"channel\")", "def channel(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"channel\")", "def get_channels(self):\n return self.channels", "def test_get_device_presence(self):\n\n device_id = self.properties['device1.id']\n response = self.api.get_device_presence(device_id)\n\n self.assertEqual(device_id, response.sdid, 'Sdids must match')\n self.assertIsNotNone(response.data.last_seen_on, 'last_seen_on')\n self.assertIsNotNone(response.data.connected, 'connected')", "def available(self) -> bool:\n return bool(self._connected)", "async def is_public(self, ctx, channel: discord.TextChannel=None):\n channel = channel if channel else ctx.channel\n public = f'{channel.mention} {{}} public to @\\u200beveryone.'\n await ctx.send(public.format('is' if await is_publicly_visible(self.bot, channel) else '**is not**'))", "def get_channels():\n r = slack.channels.list().body\n return [ c for c in r['channels'] if c['is_member'] ]", "def sock_avail(self):\n return self.channel.recv_ready()", "def is_connected(self):\n return self._connected", "def is_connected(self):\n return self._connected", "def is_connected(self):\n return self._connected", "def is_connected(self):\n return self._connected", "def is_connected(self):\n return self._connected", "def is_connected(self):\n return self.connected", "def have_channel_open(channels, user):\n for x in channels:\n chan = channels[x]\n if 'is_member' in chan:\n continue\n if chan['user'] == user:\n return True\n return False", "def is_online(self) -> bool:\n return self.data[Attribute.ONLINE]", "def channel(self) -> Channel:\n return self._channel", "def is_open(self, channel=None):\n return self.get_state(channel) == 2", "def have_channel_open(channels, user):\n for x in channels:\n chan = channels[x]\n if 'is_member' in chan:\n continue\n if \"user\" in chan and chan['user'] == user:\n return True\n return False", "def conan_channel(self):\n return self._conan_channel", "def is_on(self):\n return self.car.data[DATA_PLUGGED_IN]", "def available(self):\n return self._device.available", "def get_active(self):\n return self.get_challenges().filter(status='A')", "def campaign_status(self):\n return self._campaign_status", "def IsReady(self):\r\n\t\treturn self._get_attribute('isReady')", "def isConnected(self):\n return self.connected", "def getChannel(self, channel):\n channel = channel.lower()\n if channel in self.channels:\n return self.channels[channel]\n else:\n c = IrcChannel()\n self.channels[channel] = c\n return c", "def connected(self):\n\n\t\tres = self.eyetribe._tracker.get_trackerstate()\n\n\t\tif res == 0:\n\t\t\tself.connected = True\n\t\telse:\n\t\t\tself.connected = False\n\n\t\treturn self.connected", "def is_channel_owner():\n\n async def check(ctx):\n if ctx.guild:\n owner = ctx.author == ctx.guild.owner\n if not owner:\n await ctx.send(\"I guess you are not this server's pogchamp. Bruh.\")\n return owner\n return True\n\n return commands.check(check)", "def getChannelByName(self, name):\n for c in (self.channels or []):\n if c.settings and c.settings.name == name:\n return c\n return None", "def in_voice(self, server_id):\n srv = self.get_server_dict(server_id)\n return srv['voice'] and srv['voice'].channel", "def connected(self):\n return self.blnkt.connected", "def connected(self):\n return self.port.is_open" ]
[ "0.78225", "0.65050405", "0.63529664", "0.6173057", "0.5725983", "0.5710549", "0.5505179", "0.54912657", "0.54869306", "0.54740065", "0.5472166", "0.5465967", "0.5449083", "0.5377885", "0.5373487", "0.5356357", "0.534464", "0.53218323", "0.52983725", "0.52873003", "0.528721", "0.5279791", "0.52772075", "0.5270897", "0.52548885", "0.5254141", "0.523491", "0.5232872", "0.5229846", "0.52239484", "0.5217372", "0.5198739", "0.51825583", "0.51807386", "0.5176984", "0.5173003", "0.51698405", "0.51657134", "0.51585174", "0.51576144", "0.51504225", "0.514967", "0.51431805", "0.514067", "0.5125621", "0.5123808", "0.5122955", "0.5122955", "0.50906247", "0.50680363", "0.5053306", "0.5043475", "0.5035193", "0.50343007", "0.50342417", "0.50310993", "0.5028915", "0.5027147", "0.5027147", "0.5024009", "0.500633", "0.50017977", "0.49916467", "0.49865904", "0.4984968", "0.49625114", "0.4962288", "0.4961534", "0.4961534", "0.49603492", "0.49595574", "0.49518204", "0.4950511", "0.49361935", "0.49182475", "0.49135342", "0.49135342", "0.49135342", "0.49135342", "0.49135342", "0.49125785", "0.49112138", "0.4906753", "0.4901047", "0.48969492", "0.48963273", "0.4893614", "0.4888135", "0.48838648", "0.48774758", "0.48737743", "0.48708937", "0.48708513", "0.4870791", "0.48580658", "0.48525253", "0.48502102", "0.48491395", "0.48371923", "0.48356378" ]
0.80038244
0
Returns the history for this channel
def history(self, direction=None, limit=None, start=None, end=None, timeout=None): params = {} if direction: params['direction'] = '%s' % direction if limit: params['limit'] = '%d' % limit if start: params['start'] = self._format_time_param(start) if end: params['end'] = self._format_time_param(end) path = '/channels/%s/history' % self.__name if params: path = path + '?' + urlencode(params) if self.__cipher: message_handler = make_encrypted_message_response_handler(self.__cipher) else: message_handler = message_response_handler return PaginatedResult.paginated_query( self.ably.http, path, None, message_handler )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_history(self):\n return self.history", "def history(self):\n return self.board.history", "def history(self):\n return self.info['history']", "def get_history(self):\r\n\r\n return self.board_history", "def history(self):\n return self._history", "def history(self):\n return self._history", "def get_history(self):\n return self.__history[:]", "def History(self):\n return self.historydict.get('history', [])", "def get_history():\n return response_texts_to_entries(make_post_request(HISTORY_API, data={\"k\": config[\"api_key\"]}))", "def history(self):\n return _spacegrant_swig.hdlc_framer_sptr_history(self)", "def get_channel_history(token, channel_id, count=100):\n logging.info(\n 'Attempting to fetch channel history for channel %s', channel_id\n )\n indicator_chars_to_api_methods = {\n 'D': 'im.history',\n 'C': 'channels.history',\n }\n # first char of id indicates whether it refers to a public channel\n # or DM, which require calls to different API methods to fetch\n # history\n indicator_char = channel_id[0]\n api_method = indicator_chars_to_api_methods[indicator_char]\n params = {\n 'token': token,\n 'channel': channel_id,\n 'count': count\n }\n response = request_slack(api_method, params)\n return response['messages']", "def get_order_history(self):\n return self.__call__('orders', 'getorderhistory')", "def getOrderHistory(self):\n return self.__orderhistory", "def history(self):\n raise NotImplementedError\n # from domonic.webapi.history import History\n # return History()", "def history():", "def getHistory(self):\n history = []\n for index in range(self.historyList.count()):\n history.append(self.historyList.item(index).text())\n return history, self.historyList.currentRow()", "def history(self, maxresults=9999999, mindate=None):\n server = self._server.resource(self.name).connect()\n return server.history(maxresults=maxresults, mindate=mindate, accountID=self.accountID)", "def history(self, chrom):\n return self._hist[chrom]", "def history(self, key, _from='-', _to='+', _desc=True):\n return [self.klass.from_json(_object)\n for _object in self.storage.history(key, _from, _to, _desc)]", "def history(self) -> List[SnapshotLogEntry]:\n return self.metadata.snapshot_log", "def history(self):\n return _uhd_swig.usrp_sink_sptr_history(self)", "def history(self, maxresults=None, mindate=None):\n server = self._server._server.resource(self._server.name).connect()\n return server.history(maxresults=maxresults, mindate=mindate,\n accountID=self._server.accountID, librarySectionID=self.sectionKey)", "def history(self):\n return _spacegrant_swig.binary_sink_sptr_history(self)", "def history(self):\n return np.array(self._history)", "def get_action_history(self):\n\t\treturn self._action_history", "def get_history(self, symbol, limit=1000, offset=0):\r\n return self.api.get_history(self.account, symbol, limit, offset)", "def orders_history(self): \n return(self._d_orders['history'])", "def get_game_history(self, request):\n return games_ctrl.get_game_history(request.urlsafe_game_key)", "def historystorage(self):\n return self._historystorage", "def get_cache_history_items(self):\n #gdb.execute(\"p cache->history_items\")\n history_items = ZabbixHashset(gdb.parse_and_eval ('cache->history_items'))\n self.data = history_items.parse()", "def get_history(hdr):\n return hdr['HISTORY']", "def get_game_history(self, req):\n return models.BattleShip.getByUrlKey(req.url_key).getHistory()", "def get_history(self, name):\n return self._scalar_history.get_history(name)", "def get_history(self, name):\n return self._scalar_history.get_history(name)", "def get_history(self, key=None):\n val = self.history.values.get(key, None)\n if val is None:\n return self.history.values\n else:\n return val", "def getModelHistory(self, *args):\n return _libsbml.SBase_getModelHistory(self, *args)", "def history(self):\n return _TestA_swig.my_qpsk_demod_cb_sptr_history(self)", "def QueryHistory(self):\n return []", "def fetch_history(*args, **kwargs):\n return collect_history(*args, **kwargs)", "def get(self):\n res = ''\n for hist in self.history:\n res += (str(hist) + '\\n')\n return res", "def history(self):\n return _spacegrant_swig.message_debug_sptr_history(self)", "def history(self, q=None):\r\n q = q or []\r\n # allow history to be returned for deleted alarms, but scope changes\r\n # returned to those carried out on behalf of the auth'd tenant, to\r\n # avoid inappropriate cross-tenant visibility of alarm history\r\n auth_project = acl.get_limited_to_project(pecan.request.headers)\r\n conn = pecan.request.alarm_storage_conn\r\n kwargs = _query_to_kwargs(q, conn.get_alarm_changes, ['on_behalf_of',\r\n 'alarm_id'])\r\n return [AlarmChange.from_db_model(ac)\r\n for ac in conn.get_alarm_changes(self._id, auth_project,\r\n **kwargs)]", "def get_channel_history(crate, slot, channel, limit=None):\n conn = engine.connect()\n\n query = \"SELECT * FROM channel_status \" + \\\n \"WHERE crate = %s AND slot = %s AND channel = %s \" + \\\n \"ORDER BY timestamp DESC\"\n\n if limit is not None:\n query += \" LIMIT %i\" % limit\n\n result = conn.execute(query, (crate,slot,channel))\n\n if result is None:\n return None\n\n keys = result.keys()\n rows = result.fetchall()\n\n return [dict(zip(keys,row)) for row in rows]", "def revision_history(self, uuid):\n return self.write.revision_history(rid=uuid)", "def history(self):\n return _spacegrant_swig.general_burster_2_sptr_history(self)", "def getMatchHistory(self, **kwargs):\n return self.makeRequest('GetMatchHistory', **kwargs)", "def history(self):\n return _spacegrant_swig.G3RUH_descramble_sptr_history(self)", "def versionHistory(self):\n url = self.metaData().getLink(\"version-history\")\n assert url is not None\n\n header = self._baseHeader.copy()\n response = self._adapter.getRequest(url, header)\n\n return json.loads(response['Body'])", "def get_history(page):\n headings = page.filter_headings()\n idx = [i for i, head in enumerate(headings) \n if 'History' in head or 'history' in head]\n if not idx:\n return \"\"\n sections = page.get_sections(include_headings=True)\n history = str(sections[idx[0]+1].strip_code())\n return history", "def history(self, name, _from=0, to=None):\n params = {}\n if _from is not None:\n params[\"from\"] = str(_from)\n if to is not None:\n params[\"to\"] = str(to)\n with self.get(\n create_url(\"/v3/schedule/history/{name}\", name=name), params\n ) as res:\n code, body = res.status, res.read()\n if code != 200:\n self.raise_error(\"List history failed\", res, body)\n js = self.checked_json(body, [\"history\"])\n\n return [history_to_tuple(m) for m in js[\"history\"]]", "def history(self, maxresults=None, mindate=None):\n hist = []\n for server in self.servers:\n hist.extend(server.history(maxresults=maxresults, mindate=mindate))\n return hist", "def history(self, maxresults=None, mindate=None):\n servers = [x for x in self.resources() if x.provides == 'server' and x.owned]\n hist = []\n for server in servers:\n conn = server.connect()\n hist.extend(conn.history(maxresults=maxresults, mindate=mindate, accountID=1))\n return hist", "def history(self, per_page=None, page=None):\r\n params = base.get_params(None, locals())\r\n url = '{0}/{1}'.format(self.get_url(), 'history')\r\n return http.Request('GET', url, params), parsers.parse_json", "def history(self):\n return _spacegrant_swig.DeNRZI_sptr_history(self)", "def history(self):\n return _spacegrant_swig.NRZI_sptr_history(self)", "def get_value_history(self):\n return self.value_history", "def stack(self):\n return self.history", "def get_history(cls, **filters) -> List[dict]:\n return cls.get_all(**filters)", "def history_orders(self, **params):\n return self._get('historyOrders', signed=True, params=params)", "def instantiate_history(self):\n serialized_history = self.cache.get('history')\n history = ast.literal_eval(serialized_history.decode('utf-8'))\n return history", "def get_history_queue():\n response = houston.get(\"/history/queue\")\n houston.raise_for_status_with_json(response)\n return response.json()", "def history(self):\n alembic.command.history(self.alembic_config(), verbose=True)", "def get_state(self):\n return self.history", "def history(self, request, *args, **kwargs):\n account = self.get_object()\n\n try:\n history = HistoricoConta.objects.filter(conta=account).order_by('-created')\n except ObjectDoesNotExist as obj:\n return Response({\"detail\": \"Could not find history for thus account\",\n \"status_code\": status.HTTP_404_NOT_FOUND}, status=status.HTTP_404_NOT_FOUND)\n\n return Response(HistoricoContaSerializer(history, many=True).data)", "def hosting_history(self, domain):\n return self.apiquery('/v1/{}/hosting-history/'.format(domain))", "def get_history_data(self, exchange, pair, timedelta):\n return self.ccxt.get_history_data(exchange, pair, timedelta)", "def history(self):\n return _spacegrant_swig.hdlc_deframer_sptr_history(self)", "def get_game_history(self, request):\n game = get_by_urlsafe(request.urlsafe_game_key, Game)\n if not game:\n raise endpoints.NotFoundException('Game not found')\n return StringMessage(message=str(game.history))", "def get_history(self):\n\t\t#state = (np.array(self._history['state'])).rehsape(\n\t\tself._history['state'] = (np.squeeze(self._history['state']))\n\t\treturn self._history", "def GetHistory(index=0):\n if index == \"clear\":\n state_mgr.entire_history = []\n else:\n print state_mgr.entire_history[int(index):]", "def agg_history(self):\n cd_list, cr_list = zip(*self._history)\n return pd.concat(cd_list), pd.concat(cr_list)", "def query_history(self, req: HistoryRequest) -> List[BarData]:\n history = []\n\n start_time = generate_datetime3(req.start)\n end_time = generate_datetime3(req.end)\n\n mt5_req = {\n \"type\": FUNCTION_QUERYHISTORY,\n \"symbol\": req.symbol.replace('-', '.'),\n \"interval\": INTERVAL_VT2MT[req.interval],\n \"start_time\": start_time,\n \"end_time\": end_time,\n }\n packet = self.client.send_request(mt5_req)\n\n if packet[\"result\"] == -1:\n self.write_log(\"获取历史数据失败\")\n else:\n for d in packet[\"data\"]:\n bar = BarData(\n symbol=req.symbol.replace('.', '-'),\n exchange=Exchange.OTC,\n datetime=generate_datetime2(d[\"time\"]),\n interval=req.interval,\n volume=d[\"real_volume\"],\n open_price=d[\"open\"],\n high_price=d[\"high\"],\n low_price=d[\"low\"],\n close_price=d[\"close\"],\n gateway_name=self.gateway_name\n )\n history.append(bar)\n\n data = packet[\"data\"]\n begin = generate_datetime2(data[0][\"time\"])\n end = generate_datetime2(data[-1][\"time\"])\n\n msg = f\"获取历史数据成功,{req.symbol.replace('.','-')} - {req.interval.value},{begin} - {end}\"\n self.write_log(msg)\n\n return history", "def task_history(self):\n return self._task_history", "def history(self):\n return _TestA_swig.cleanslate_sptr_history(self)", "def get_score_history(self):\n return self._score_history", "def read_history(self):\n if path.isfile(self.HISTORY_FILE_PATH):\n return pd.read_csv(self.HISTORY_FILE_PATH)\n\n df = pd.DataFrame({}, columns=self.HISTORY_COLS)\n df.to_csv(self.HISTORY_FILE_PATH, index=False)\n return df", "def get_room_history(self, room):\n pass", "def get_item_history(item_id, realm_index) -> list:\n\n # get item json and direct to history\n item_history = __get_item_json__(item_id, realm_index)[\n \"history\"][0]\n\n return item_history", "def get_history(self, clocked: 'Clocked'):\n history = {}\n\n new_tick = self._get_new_tick(clocked)\n\n vclock_history = attributes.get_history(clocked, 'vclock')\n is_vclock_unchanged = (vclock_history.unchanged and\n new_tick == vclock_history.unchanged[0])\n\n for prop in self.history_models.keys():\n value = self._get_prop_value(clocked, prop)\n\n if value is not NOT_FOUND_SENTINEL:\n history[prop] = value\n\n return history, is_vclock_unchanged", "def get_rolling_log_history():\n current_tag = get_current_tag()\n return get_log_history(current_tag)", "def history(self):\n return _spacegrant_swig.udp_debug_sptr_history(self)", "def get_history(self):\n msg_ids = self._records.keys()\n # Remove any that do not have a submitted timestamp.\n # This is extremely unlikely to happen,\n # but it seems to come up in some tests on VMs.\n msg_ids = [m for m in msg_ids if self._records[m]['submitted'] is not None]\n return sorted(msg_ids, key=lambda m: self._records[m]['submitted'])", "def history(self):\n return _spacegrant_swig.invert_bit_sptr_history(self)", "def _get_history_data(self) -> List[Dict[str, Any]]:\n try:\n with open(self._path.as_posix(), \"r\", encoding=\"utf-8\") as history_file:\n data = json.load(history_file)\n data.append(History._get_empty_session_object())\n return data\n except FileNotFoundError:\n self._path.touch()\n return History._get_empty_json_object()\n except json.decoder.JSONDecodeError:\n return History._get_empty_json_object()", "def get_history_since(self, start=0):\n hist = self.service.users().history()\n try:\n results = hist.list(userId='me', startHistoryId=start).execute()\n if 'history' in results:\n yield results['history']\n while 'nextPageToken' in results:\n results = hist.list(userId='me',\n pageToken=results['nextPageToken'],\n startHistoryId=start).execute()\n if 'history' in results:\n yield results['history']\n\n except googleapiclient.errors.HttpError as ex:\n if ex.resp.status == 404:\n raise Gmail.NoHistoryException\n elif ex.resp.status == 403:\n raise Gmail.UserRateException(ex)\n else:\n raise Gmail.GenericException(ex)", "def history_orders(self, **params):\n return self._get('option/historyOrders', signed=True, params=params, version=None)", "def update_history(self):\n logging.info(\"Fetching and storing messages in DB\")\n\n all_channels = self.q(o.Channel).all()\n if self.selected_channels:\n channels = [c for c in all_channels\n if c.name in self.selected_channels]\n else:\n channels = all_channels\n\n for channel in channels:\n logging.info(\"Getting messages for channel `%s'\", channel.name)\n latest = self.q(o.Message).\\\n filter(o.Message.channel == channel).\\\n order_by(o.Message.ts.desc()).first()\n # NOTE(gryf): Trick out the API, which by default (latest and\n # oldest parameters set to 0) return certain amount of latest\n # messages, while we'd like to have it from the beginning of the\n # available history, if there is no database records available. In\n # that case value of 1 here will force the API to get messages\n # starting from first January 1970.\n latest = latest and latest.ts or 1\n\n while True:\n logging.debug(\"Fetching another portion of messages\")\n messages, latest = self._channels_history(channel, latest)\n\n for msg in messages:\n self._create_message(msg, channel)\n\n if latest is None:\n break\n\n self.session.commit()", "async def fetch_channel_history(\n self,\n channel: discord.TextChannel,\n animation_message: discord.Message,\n messages: int\n ) -> List[discord.Message]:\n animation_message_deleted = False\n history = []\n history_counter = 0\n async for msg in channel.history(limit=messages):\n history.append(msg)\n history_counter += 1\n await asyncio.sleep(0.005)\n if history_counter % 250 == 0:\n new_embed = discord.Embed(\n title=f\"Fetching messages from #{channel.name}\",\n description=f\"This might take a while...\\n{history_counter}/{messages} messages gathered\",\n colour=await self.bot.get_embed_colour(location=channel),\n )\n if channel.permissions_for(channel.guild.me).send_messages:\n await channel.trigger_typing()\n if animation_message_deleted is False:\n try:\n await animation_message.edit(embed=new_embed)\n except discord.NotFound:\n animation_message_deleted = True\n return history", "def hist():\n history_dict = {}\n # create history_list\n for i in range(readline.get_current_history_length()):\n history_dict[i+1] = (readline.get_history_item(i+1))\n return history_dict", "def history():\n \n user_id = session[\"user_id\"]\n history_list = hist(user_id, db)\n return render_template('history.html', history=history_list)", "def history(self) -> List[Dict[str, Any]]:\n\n response = self.client.get(f\"/images/{self.id}/history\")\n body = response.json()\n\n if response.status_code == 200:\n return body\n\n if response.status_code == 404:\n raise ImageNotFound(body[\"cause\"], response=response, explanation=body[\"message\"])\n raise APIError(body[\"cause\"], response=response, explanation=body[\"message\"])", "def get_game_history(self, request):\n game = get_by_urlsafe(request.urlsafe_game_key, Game)\n return HistoryForm(history=[history for history in game.history])", "def get_vouchers_history(self, vid_encoded=None, vid=None, action=None,\n uid_from=None, uid_to=None, gid=None,\n valid_after=None, valid_before=None,\n create_after=None, create_before=None,\n last=None, first=None):\n resource = self.kvpath(\n 'vouchers/history',\n ('ident', vid_encoded),\n **{\n 'vid': ('int', vid),\n 'action': ('ident', action),\n 'from': ('int', uid_from),\n 'to': ('int', uid_to),\n 'gid': ('ident', gid),\n 'valid_after': ('isobasic', absdatetime(valid_after)),\n 'valid_before': ('isobasic', absdatetime(valid_before)),\n 'create_after': ('isobasic', absdatetime(create_after)),\n 'create_before': ('isobasic', absdatetime(create_before)),\n 'first': ('int', first),\n 'last': ('int', last)\n }\n )\n return self.request('get', resource)", "def whois_history(self, domain):\n return self.apiquery('/v1/{}/whois/history/'.format(domain))", "def history(self):\n return _uhd_swig.usrp_source_sptr_history(self)", "def node_history(self, node_id, items, zhistory, since, until, items_search=None):\n return self.izx.get_history((node_id,), items, zhistory, since, until, items_search=items_search)", "def getHistory(self):\n\n arrSize = self.buffer[0].shape[1]\n arrayHist = np.asarray(self.buffer)\n\n arrayHist = np.reshape(arrayHist, (1, arrSize * self.bufferSize))\n state = torch.from_numpy(arrayHist).to(self.device)\n state = state.type(torch.cuda.FloatTensor)\n\n return state", "def _grab_history(self):\n self.data['history_lines'] = []\n self.data['history_file'] = None\n self.data['history_encoding'] = None\n self.data['headings'] = []\n self.data['history_last_release'] = ''\n self.data['history_insert_line_here'] = 0\n default_location = None\n config = self.setup_cfg.config\n if config and config.has_option('zest.releaser', 'history_file'):\n default_location = config.get('zest.releaser', 'history_file')\n history_file = self.vcs.history_file(location=default_location)\n self.data['history_file'] = history_file\n if not history_file:\n logger.warn(\"No history file found\")\n return\n logger.debug(\"Checking %s\", history_file)\n history_lines, history_encoding = read_text_file(history_file)\n history_lines = history_lines.split('\\n')\n headings = utils.extract_headings_from_history(history_lines)\n if not headings:\n logger.warn(\"No detectable version heading in the history \"\n \"file %s\", history_file)\n return\n self.data['history_lines'] = history_lines\n self.data['history_encoding'] = history_encoding\n self.data['headings'] = headings\n\n # Grab last header.\n start = headings[0]['line']\n if len(headings) > 1:\n # Include the next header plus underline, as this is nice\n # to show in the history_last_release.\n end = headings[1]['line'] + 2\n else:\n end = len(history_lines)\n history_last_release = '\\n'.join(history_lines[start:end])\n self.data['history_last_release'] = history_last_release\n\n # Add line number where an extra changelog entry can be inserted. Can\n # be useful for entry points. 'start' is the header, +1 is the\n # underline, +2 is probably an empty line, so then we should take +3.\n # Or rather: the first non-empty line.\n insert = start + 2\n while insert < end:\n if history_lines[insert].strip():\n break\n insert += 1\n self.data['history_insert_line_here'] = insert", "def getOutageHistory(self):\n return self._OutageHistory", "def history(self):\n return _spacegrant_swig.ax25_udp_pdu_receiver_sptr_history(self)" ]
[ "0.8223158", "0.8190711", "0.8100468", "0.8018653", "0.7970339", "0.7970339", "0.7960411", "0.763217", "0.7524663", "0.74288756", "0.7316858", "0.7306517", "0.7249383", "0.72073257", "0.7187233", "0.7177014", "0.71616286", "0.71466166", "0.713637", "0.7119178", "0.7116252", "0.7077597", "0.70775014", "0.7072005", "0.7064725", "0.70457023", "0.7040872", "0.70279944", "0.7015604", "0.70131975", "0.701101", "0.6969928", "0.69693416", "0.69693416", "0.69643104", "0.69396913", "0.690286", "0.6896712", "0.6879777", "0.6862023", "0.6853391", "0.6850236", "0.6848229", "0.6847256", "0.6841383", "0.682929", "0.6816694", "0.6801811", "0.6774098", "0.6750369", "0.67482173", "0.6735713", "0.67333275", "0.6728232", "0.67237204", "0.6700948", "0.6680441", "0.666618", "0.66569984", "0.66556114", "0.66492915", "0.6641762", "0.66401124", "0.6637818", "0.6629988", "0.6609054", "0.6600174", "0.65960634", "0.6593442", "0.6584924", "0.6573831", "0.6564741", "0.6559853", "0.6533975", "0.6508428", "0.65071684", "0.6451638", "0.6439183", "0.64088005", "0.639688", "0.6395442", "0.63926256", "0.63794416", "0.63703275", "0.63549197", "0.6354913", "0.6345647", "0.6327147", "0.6325879", "0.6321116", "0.63209987", "0.6314489", "0.6302733", "0.6279625", "0.62744737", "0.6270368", "0.6267697", "0.6248021", "0.62471366", "0.6246799" ]
0.7255194
12
Publishes a message on this channel.
def publish(self, name, data, timeout=None): message = Message(name, data) if self.encrypted: message.encrypt(self.__cipher) if self.ably.options.use_text_protocol: request_body = message.as_json() else: request_body = message.as_thrift() path = '/channels/%s/publish' % self.__name headers = HttpUtils.default_post_headers(not self.ably.options.use_text_protocol) return self.ably.http.post( path, headers=headers, body=request_body, timeout=timeout ).json()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def publish(self, message):\n logger.info(\"Publishing to topic [{0}]: {1}\".format(self._topic_name, message))\n self._executor.send(json.dumps({\n 'op': 'publish',\n 'id': 'publish:{0}:{1}'.format(self._topic_name, self._id),\n 'topic': self._topic_name,\n 'msg': message\n }))", "def publish(self, channel: str, message):\n raise TypeError(\"{} - publish not implemented!\")", "def publish(self, queue, message):\n # 1. Setup the channel to use to publish message\n channel_handler = ChannelHandler(self._connection)\n\n # 2. Open the channel before using it\n channel_handler.open_channel()\n\n # 3. Send the message via the channel\n channel_handler.send_message(self._exchange_name, queue, message)\n\n # 4. Close the channel after publishing the message\n channel_handler.close_channel()\n LOGGER.info('Bellow message `%s` is published in `%s`', message, queue)", "def publish(self, message: model.MQTTMessage):\n self.client.publish(message.topic, payload=message.get_payload())", "def publish(self, message):\n pika_message = message.to_pika_message()\n self._channel.basic_publish(exchange='',\n routing_key=self.name,\n properties=pika_message.properties,\n body=message.body)", "def publish(self, message: str) -> None:", "def send(self):\n if self._stopping:\n return\n\n mytype = 'text/plain'\n\n try:\n if isinstance(json.loads(self.message),dict):\n mytype = 'application/json'\n except (TypeError,json.JSONDecodeError):\n if (isinstance(self.message,dict)):\n mytype = 'application/json'\n self.message = json.dumps(self.message)\n else:\n self.message = str(self.message)\n\n properties = pika.BasicProperties(app_id='sender',\n content_type=mytype)\n\n self._channel.basic_publish(self.exchange, self.routing_key, self.message, properties)\n self._message_number += 1\n self._deliveries.append(self._message_number)\n self.logger.info('published message # %i', self._message_number)", "def publish(self, channel: str, content: str) -> None:\n print(f\"{self._name} publishes message '{content}' to \"\n f\"channel-[{channel}]\")\n self._server.route(channel, content)", "def publish(self, message: None):\n response = self.client.publish(TopicArn=self.params['topic_arn'], Message=message)\n return response", "def produce(self, message):\n self.producer.send(self.topic, message)", "def process(self, message):\n if self.debug:\n self.log(\"Publishing: \" + str(message.data))\n self.channel.basic.publish(\n AMQPMessage(str(message.data)),\n self.exchange, self.routing_key)", "def process(self, message):\n if self.debug:\n self.log(\"Publishing: \" + str(message.data))\n self.channel.basic.publish(\n AMQPMessage(str(message.data)),\n self.exchange, self.routing_key)", "def publish(self, message: Union[SubmissionMessage, CommentMessage]) -> int:\n self.publisher.publish(self.topic, message.serialize().encode(\"utf-8\")).result()", "def publish(topic, message):\n if DEBUG:\n print(\"Publish: '\" + message + \"' (topic: '\" + topic + \"')\")\n DATA[\"client\"].publish(topic, message)", "def _publish(self, topic_name, message):\n msg = {\n 'op': 'publish',\n 'topic': topic_name,\n 'msg': message\n }\n json_msg = json.dumps(msg)\n self.ws.send(json_msg)", "async def publish(self, message):\n try:\n self.write('data: {}\\n\\n'.format(message))\n await self.flush()\n except StreamClosedError:\n self.finished = True", "def publish(self, channel, message, async=True):\n if self.async and async:\n self.workers.add_task(getattr(self, '_publish'), self.subscriptions[str(channel)], message)\n else:\n self._publish(self.subscriptions[str(channel)], message)", "def publish_messages(message):\n\n publisher = pubsub_v1.PublisherClient()\n topic_path = publisher.topic_path(PROJECT, TOPIC)\n\n message = message.encode('utf-8')\n publisher.publish(topic_path, data=message)\n\n print('Message published\\n')", "def publish(self, data):\n # [START pubsub_quickstart_publisher]\n # [START pubsub_publish]\n # Data must be a bytestring\n logger.info(\"publishing message %s\" % data)\n data = data.encode('utf-8')\n self.publisher.publish(self.topic_path, data=data)\n\n logger.info('Published messages: {}'.format(data))\n # [END pubsub_quickstart_publisher]\n # [END pubsub_publish]", "def sendNotification(self, message):\n if self.topicArn is None:\n print 'ERROR: Notification topic not set!'\n return\n\n publishResponse = self.snsClient.publish(\n TopicArn=self.topicArn,\n Message=message\n )", "def publish(self, message: str, message_id: int) -> None:\n payload: str = self._create_payload(message, message_id)\n max_payload_bytes = 268435455\n if size(payload) > max_payload_bytes:\n msg = Message.status_message('Message too large.')\n self.client.queue.put(msg)\n return\n return_value: mqtt.MQTTMessageInfo = self.client.publish(self.client.topic, payload, qos=2)\n if return_value.rc == 0: # Publication successful\n return\n else:\n raise SubscriptionError(f'MQTTMessageInfo error code: {return_value.rc}')", "def publish_message(self):\n\n message_count = 0\n while message_count < self._messages:\n message_count += 1\n message_body = \"task number %i\" %(message_count)\n self._channel.basic_publish(exchange='',\n routing_key=self._queue_name,\n body=message_body,\n properties=pika.BasicProperties(\n delivery_mode=2 # make message persistant\n ))\n print(\"Published message %i\" %(message_count))\n time.sleep(self._message_interval)", "def publish(self, topic, msg):\n\t\tself.topic = topic\n\t\tself.msg = msg \n\t\tself.client.publish(self.topic, self.msg)", "def publish(self, queue, message):\n\n # Instead of passing a queue to the constructor, the publish checks if\n # the target queue exists. If not, it declares the target queue\n if not self.queue:\n self.channel.queue_declare(queue=queue)\n self.queue = queue\n\n self.channel.basic_publish(\n exchange='', routing_key=queue, body=message)", "def publish(self, message_body, routing_key, exchange=None):\n\n publish_exchange = exchange or self.producer.exchange\n\n self.producer.publish(\n body=message_body,\n exchange=publish_exchange,\n routing_key=routing_key,\n retry=settings.PUBLISH_RETRY,\n retry_policy={\n # First retry immediately,\n 'interval_start': settings.PUBLISH_RETRY_INTERVAL_START,\n # then increase by 2s for every retry.\n 'interval_step': settings.PUBLISH_RETRY_INTERVAL_STEP,\n # but don't exceed 30s between retries.\n 'interval_max': settings.PUBLISH_RETRY_INTERVAL_MAX,\n # give up after 30 tries.\n 'max_retries': settings.PUBLISH_RETRY_MAX_RETRIES,\n # callback for logging\n 'errback': self.on_publish_error,\n 'on_revive': self.on_connection_revival\n },\n # declare exchange and queue and bind them\n declare=list(self.queues.values())) # queues is a dict.\n log.info(f'Published '\n f'message: {self.producer.exchange.name}::{routing_key}')\n log.debug(f'Published '\n f'message_body: {message_body}')", "def publish_message(message: str, broker_ip: str, exchange_name: str, exchange_type: str):\n connection = pika.BlockingConnection(\n pika.ConnectionParameters(host=broker_ip))\n channel = connection.channel()\n channel.exchange_declare(exchange=exchange_name, exchange_type=exchange_type, durable=True)\n channel.basic_publish(exchange=exchange_name, routing_key='', body=message)\n print(f'Published {message} to the exchange')\n connection.close()", "def send_message(self, message):\n self.client.queue.put(message)", "def send(self, json):\n try:\n retval = self._channel.basic_publish(\n exchange=self.exchange_config['name'],\n routing_key=self.queue_config['name'],\n body=json,\n mandatory=False,\n properties=self._msg_properties\n )\n\n if retval == False:\n raise exceptions.MessageNotSentException(\"Message not sent, enable pika logging for more information\")\n except Exception as e:\n raise exceptions.ConnectionException(\"Connection error\", e)", "def publish_message(self, topic, message):\n\n def delivery_report(err, msg):\n \"\"\" Called once for each message produced to indicate delivery result.\n Triggered by poll() or flush(). \"\"\"\n if err is not None:\n print('Message delivery failed: {}'.format(err))\n else:\n print('Message delivered to {} [{}]'.format(msg.topic(), msg.partition()))\n\n # Trigger any available delivery report callbacks from previous produce() calls\n self.producer.poll(0)\n\n # Asynchronously produce a message, the delivery report callback\n # will be triggered from poll() above, or flush() below, when the message has\n # been successfully delivered or failed permanently.\n value_to_publish = message\n\n if self.handle_json_message_data:\n if type(message) not in (dict, list):\n raise MessageValueException(\"Your message should be json serializable!\")\n value_to_publish = json.dumps(value_to_publish)\n\n self.producer.produce(topic, value_to_publish.encode('utf8'), callback=delivery_report)\n\n # Wait for any outstanding messages to be delivered and delivery report\n # callbacks to be triggered.\n self.producer.flush()", "def kafka_publish_message(self, message):\n self.kf_sender = self.kf_producer.send(self.kf_topic, value=message.encode('utf-8'));", "def send_message(self, message):\n self.send_message_queue.put(message)", "async def send_message(self, message: dict) -> None:\n await self.client.chat_postMessage(channel=self.channel_id, **message)", "async def send_message(self, channel : str, message : str):\n await self._connection.send_message(channel, message)", "def send(self, msg):\n self._mailbox.put(msg)", "def publish(self, message, topic=''):\n if type(message) != types.ListType:\n message = [message]\n if topic:\n message = [topic] + message\n self.send(message)", "def pushing_message(project_id: str, topic_id: str, message: str):\n publisher = pubsub_v1.PublisherClient()\n # The `topic_path` method creates a fully qualified identifier\n # in the form `projects/{project_id}/topics/{topic_id}`\n topic_path = publisher.topic_path(project_id, topic_id)\n\n # Data must be a bytestring\n message = message.encode(\"utf-8\")\n # When you publish a message, the client returns a future.\n publisher.publish(topic_path, message)\n print(f\"Published messages to {topic_path}.\")", "def publish(self, topic, msg):\n formatted_msg = json.dumps(msg)\n self.client.publish(topic, formatted_msg) # json converting cause of mqtt's data transfer limit.", "def sendMessage(self):\n ps = pubsub.PubSub(from_jid=self.jid, to_jid=self.recipient, stream=self.stream, stanza_type=\"get\")\n ps.publish(self.message, self.node)\n self.stream.set_response_handlers(ps, self.onSuccess, self.onError, lambda stanza: self.onTimeout(stanza, message, recipient))\n self.stream.send(ps)", "def send_message(msg, exchange, key=None):\n print(msg)\n connection = pika.BlockingConnection(pika.ConnectionParameters(host='rabbitmq'))\n channel = connection.channel()\n exchange_type = 'direct' if exchange == 'other' else 'topic'\n channel.exchange_declare(exchange=exchange, exchange_type=exchange_type)\n if key is not None and exchange == 'logs':\n routing_key = f'scheduler.{key}'\n else:\n routing_key = ''\n channel.basic_publish(exchange=exchange, routing_key=routing_key, body=msg)\n connection.close()", "def sendMsg(self, channel, message, length=None):\n self.logger.info(\"Sending in %s: %s\" % (channel, message))\n self.msg(channel, message, length)", "async def publish(self, body, routing_key=None):\n properties = pika.BasicProperties(\n app_id='example-publisher',\n content_type='application/json'\n )\n self.log.debug(\"Publish to %s:%s\", self.exchange,\n routing_key or self.routing_key)\n channel = await self._backend.channel('publish')\n try:\n channel.basic_publish(\n self.exchange,\n routing_key or self.routing_key or '',\n # pylint: disable=c-extension-no-member\n ujson.dumps(body, ensure_ascii=False),\n properties)\n except pika.exceptions.ChannelClosed: # pragma: no cover\n self.log.error(\n 'Message not delivered (%s): %s',\n routing_key, body\n )", "def sendChatMessage(self, msg):\n self.transport.write(msg)", "def send_message(self, topic_name, message):\n self.topics[topic_name].append(message)", "def publish(self, message, routing_key, mandatory=True):\n\n log.debug(\"Publishing message via exchange %s: %r\", self, message)\n if self.internal:\n # Caught on the client side to prevent channel closure\n raise ValueError(\"cannot publish to internal exchange: '%s'!\" % self.name)\n\n raise gen.Return((yield self.__publish_method(\n self.name,\n routing_key,\n message.body,\n properties=message.properties,\n mandatory=mandatory)))", "def publish(self, data=None):\n rospy.loginfo(\"Message published on topic %s\", self.topic)", "def publish(self, queue, message, ttl=3600):\n\n # Get next message ID\n message_id = self.redis.incr(self._ns_nextid())\n\n # Push message to queue\n self.redis.setex(self._ns_message(queue, message_id), ttl, message)\n \n # List all consumers of given queue\n consumers = self.redis.smembers(self._ns_subscriptions(queue))\n\n # Publish the message to all the consumers.\n for consumer in consumers:\n self.redis.rpush(self._ns_queue(queue, consumer), message_id)", "def send(self, msg):\n self.house.PLM.send_queue.put( msg )", "def add_message(self, msg):\n msg_string = json.dumps(msg)\n self.redis_client.publish(self.message_channel, msg_string)\n self.redis_client.lpush(self.message_list, msg_string)\n self.redis_client.ltrim(self.message_list, 0,\n app.config[\"MAX_MESSAGES\"]-1)", "async def publish_message(self, body: str, priority: int = None):\n message = Message(body=body.encode('utf-8'), priority=priority, delivery_mode=DeliveryMode.PERSISTENT)\n await self._channel.default_exchange.publish(message, routing_key=self._queue)", "def send(self, msg):\n return self._channel_action(msg, 1)", "def publish(self, topic, content):\n # check if ACKed\n if not self.connack_rec:\n return 1\n\n # compose frame\n frame = Message.PublishFrame().compose(topic, content)\n\n # send frame\n self.send_q.put(frame.encode())", "def send_message(self, message):\n \n msgPacket = serverbound.play.ChatPacket()\n msgPacket.message = message\n self.connection.write_packet(msgPacket)", "def send_message(self, channel, text):\n if not channel:\n return\n self.post('chat.postMessage', data={\"channel\": channel, \"text\": text})", "def Talk(self, topic, message):\n Send(self.channel, topic, message)", "def send_message(self, message: str):\n self.client.chat_postMessage(\n channel=f\"@{self.username}\", text=message,\n )", "def send_message(self, message, channel=None):\n if channel is None:\n channel = self.default_channel\n\n self._slack_client.api_call(\n \"chat.postMessage\", channel=channel, text=message)", "def command(self, msg):\n self.cmd_pub.publish(msg)", "def __answer(self, msg: str):\n self.channel.basic_publish(\n exchange='main', routing_key='answer', body=msg)", "def send(self, topic, msg):\n out = \"%s %s\" % (topic, msg)\n self.topics[topic].send(bytes(out, 'utf-8'))", "def publish(self, publisher):\n publisher._send(self.payload.event, self.info, *self.payload.args,\n **self.payload.kwargs)", "async def async_send_json_message(self, message: Dict[str, Any]) -> None:\n if self.state != STATE_CONNECTED:\n raise NotConnected\n\n if self._logger.isEnabledFor(logging.DEBUG):\n self._logger.debug(\"Publishing message:\\n%s\\n\", pprint.pformat(message))\n\n assert self.client\n if \"messageId\" not in message:\n message[\"messageId\"] = uuid.uuid4().hex\n await self.client.send_json(message)", "def send(self, message):\n if self.connection:\n self.connection.send(message)", "def send_message(self, text):\n self.redis.publish('message_to_user', json.dumps((self.operator_token, text)))", "def send_message(self, message):\n self.outbox.put(message)\n if message.TYPE_STRING != \"ack\":\n self.awaiting_ack.put((message, time.time()))", "async def _send_json_message(self, message: Dict[str, Any]) -> None:\n if not self.connected:\n raise NotConnected\n\n if LOGGER.isEnabledFor(logging.DEBUG):\n LOGGER.debug(\"Publishing message:\\n%s\\n\", pprint.pformat(message))\n\n assert self._client\n assert \"id\" in message\n\n await self._client.send_json(message, dumps=ujson.dumps)", "def publish_messages(project_id, topic_id, message):\n # [START pubsub_quickstart_publisher]\n # [START pubsub_publish]\n from google.cloud import pubsub_v1\n\n # TODO(developer)\n #project_id = \"covid-canada-dashboard\"\n #topic_id = \"test_topic\"\n\n publisher = pubsub_v1.PublisherClient()\n # The `topic_path` method creates a fully qualified identifier\n # in the form `projects/{project_id}/topics/{topic_id}`\n topic_path = publisher.topic_path(project_id, topic_id)\n print(topic_path)\n\n #data = f\"Message number {n}\"\n #data = \"THIS WORKS! Awesome job Braveenth!\"\n data = message\n # Data must be a bytestring\n data = data.encode(\"utf-8\")\n # When you publish a message, the client returns a future.\n future = publisher.publish(topic_path, data)\n print(future.result())\n\n print(f\"Published messages to {topic_path}.\")\n # [END pubsub_quickstart_publisher]\n # [END pubsub_publish]", "def publish(self, message):\n try:\n self.write('{}\\n\\n'.format(message))\n yield self.flush()\n except StreamClosedError:\n self._finished = True", "def on_reply(self, msg: str):\n self._logger.debug(f\"Got msg: {msg}\")\n self._rabbit_channel.basic_publish(exchange='', routing_key=QueueName.MSG_REPLY, body=str(msg))", "def on_message(self, message):\n self.write_message(u\"%s\" % message)", "def publish(self, topic, payload):\n complete_topic = \"{}/{}\".format(self._base_topic, topic)\n self._client.publish(complete_topic, payload, qos=2)\n logger.info(\"On topic %s published: %s\", complete_topic, payload)", "def write_message(self, payload):\n self.messages.append(payload)", "def send(self, message):\n _check_message_type(message=message)\n response = requests.post(\n self._server_url + _SEND_URL,\n data={\"id\": self._chat_id, \"msg\": message}\n )", "def send_chat_message(self, channel, message):\r\n self._send(\"PRIVMSG #{0} :{1}\".format(channel, message))", "def post_message(self, message):\n logging.info(f\"Posting {message} to {self.channel}...\")\n params = self.params\n params[\"text\"] = message\n response = requests.get(self.url + \"chat.postMessage\", params=params)\n if response.ok:\n logging.info(f'Successfully sent \"{message}\" to {self.channel}.')\n logging.debug(response.json())\n else:\n logging.info(f'Failed to send \"{message}\" to {self.channel}.')\n logging.debug(response.json())\n return response.status_code", "async def send_message(self, message: dict) -> None:\n _LOGGER.debug(f\"send_message - {message}\")\n await self._client.send_message(json.dumps(message))", "def publish(self, event):\n self.pubsub_router.send(event)", "def publish(self, topic:str, data:bytes) -> None:\n\t\tself.mqttClient.publish(topic, data)", "def send(self, message):\n\t\tmessage_string = self.send_address + \" \" + message + \" /\"\n\t\tself.add_to_queue(message_string)", "def send(self, message):\n self.sock.send(message)", "def write_message(self, message):\r\n logging.debug(\"Sending message {mes} to {usr}\".format(mes=message, usr=self.id))\r\n self.handler.write_message(message)", "def send_message(self, message):\n pass", "def send_message(self, data):\n self.transport.write(data)", "def send_msg(self, msg):\n self.msg_queue.put(dict(to=settings.IOTTLY_XMPP_SERVER_USER,msg='/json ' + json.dumps(msg)))", "def write(self, msg):\n self.sock.send(msg.encode())", "def send_message(channel, message):\n slack_client = get_client()\n slack_client.chat_postMessage(channel=channel, text=message, as_user=True)", "def publish(self, data, isAsync = True):\n time = now()\n dataWithId = (self.idGenerator.generateId(), data)\n self.messageQueue.setdefault(time, []).append(dataWithId)\n self.notify(time, dataWithId, isAsync)", "def send(self, msg):\n with self._send_lock:\n self._rt.send_message(msg.bytes())", "def send(self, message):\n if isinstance(message, basestring):\n self.send_queue.put(message)\n else:\n self.send_queue.put(struct.pack(\"!B\", message.type_id) +\n message.pack())", "def publish(self, message: Message) -> bool:\n if not self._connected:\n self.log.warning(f\"Not connected, unable to publish {message}\")\n return False\n\n self.mutex.acquire()\n\n info = self.client.publish(message.topic, message.payload, self.qos)\n\n if info.rc == mqtt.MQTT_ERR_SUCCESS:\n self.log.debug(f\"Published {message}\")\n self.mutex.release()\n return True\n else:\n self.mutex.release()\n return info.is_published()", "def publish(self):\n while True:\n outbound_message = self.outbound_message_queue.peek()\n if outbound_message is None:\n break\n\n if self.connectivity_service.publish(outbound_message) is True:\n self.outbound_message_queue.get()\n else:\n break", "def publish( self, topic, data, qos = 1, retain = False ):\n logging.info( \"Publishing to topic %s\" %topic )\n self.client.publish( topic, data, qos = qos, retain = retain )", "def send_message(self, message):\n source_guid = str(uuid.uuid1())\n date = time.strftime(\"%H:%M:%S\")\n self.api.send_message(\n self.conversation_type,\n self.cid,\n source_guid,\n message[:1000]\n )\n if self.api.send_message(self.conversation_type, self.cid, source_guid, message):\n self.append_message(source_guid, 'me', date, message[:1000])\n if len(message) > 1000:\n self.send_message(message[1000:])", "def _publish_msg(self, msg_type, data=None, metadata=None, buffers=None, **keys):\n data = {} if data is None else data\n metadata = {} if metadata is None else metadata\n content = json_clean(dict(data=data, comm_id=self.comm_id, **keys))\n msg = self.kernel_client.session.msg(msg_type, content=content, parent=self.parent_header, metadata=metadata)\n self.kernel_client.shell_channel.send(msg)", "def send_message(self, message):\n\n self.socket.send(message.serialize())", "def send(self, message):\n pass", "def add_chat_message(self, message):\n try:\n data = message.to_json()\n key = ENVIRONMENT['REDIS_PREFIX'] + \"chat_messages:%s\" % self.channel_id\n \n logging.info(data)\n \n self.redis_server.rpush(key, data)\n self.redis_server.publish(ENVIRONMENT['REDIS_PREFIX'] + 'chat_messages', data)\n except Exception, e:\n logging.info(\"ERROR adding message %s: %s\" % (message, e))\n raise", "def publish_messages(self, project_id, topic_name, orderNumber):\r\n\r\n publisher = pubsub_v1.PublisherClient()\r\n topic_path = publisher.topic_path(project_id, topic_name)\r\n\r\n data = u'Message: {}'.format('test')\r\n # Data must be a bytestring\r\n data = data.encode('utf-8')\r\n # Add attribute to the message\r\n future = publisher.publish(\r\n topic_path, data, OrderNumber=orderNumber)\r\n print(future.result())\r\n\r\n print('Published messages with custom attributes.')", "def send_message(channel, data):\n try:\n socketio.emit(channel, data)\n logging.info('Message was sent.')\n logging.debug(data)\n except Exception as e:\n logging.error(e)\n logging.error(\"Can't send message. Exeption occured\")", "def publish(self, info, user_id):\n del info\n event = {\"user_id\": user_id.value, \"payload\": self}\n\n return OnChatMessageSent(event=event)", "def publish(self) -> None:\n self.logger.debug(\"Publishing\")\n if not self.connectivity_service.is_connected():\n self.logger.warning(\"Not connected, unable to publish messages\")\n return\n\n saved_readings = len(self.readings_persistence.obtain_readings())\n if saved_readings > 0:\n readings_message = (\n self.message_factory.make_from_feed_values_collected(\n self.readings_persistence.obtain_readings()\n )\n )\n if readings_message is not None:\n if self.connectivity_service.publish(readings_message):\n self.readings_persistence.clear_readings()\n else:\n self.logger.warning(\n f\"Failed to publish message: {readings_message}\"\n )\n\n while True:\n message = self.message_queue.peek()\n if message is None:\n break\n\n if self.connectivity_service.publish(message):\n self.message_queue.get()\n else:\n self.logger.warning(f\"Failed to publish message: {message}\")\n break\n self.logger.debug(\"Publishing ended\")" ]
[ "0.8069301", "0.7968601", "0.7785665", "0.76731753", "0.7639972", "0.761992", "0.75402987", "0.7448884", "0.74153394", "0.7411613", "0.7408458", "0.7408458", "0.73652995", "0.73333603", "0.7326912", "0.72585905", "0.7211472", "0.72038764", "0.7194173", "0.7134606", "0.7127167", "0.7074256", "0.70482635", "0.7007741", "0.7004657", "0.6954987", "0.69310707", "0.6921998", "0.6899705", "0.6861343", "0.6824355", "0.6822214", "0.68189657", "0.6800274", "0.6787125", "0.67671555", "0.6761605", "0.67436856", "0.6723561", "0.6675751", "0.66552526", "0.6644685", "0.6606758", "0.65966374", "0.65645087", "0.6543102", "0.6541124", "0.6519444", "0.6517381", "0.6516061", "0.6422328", "0.64212304", "0.64148587", "0.6403279", "0.63963914", "0.6369207", "0.633212", "0.6331294", "0.62996316", "0.6278831", "0.6274874", "0.627443", "0.6253568", "0.6242046", "0.62367475", "0.62321436", "0.62296414", "0.6210519", "0.6209561", "0.6208446", "0.62007564", "0.6195504", "0.6193285", "0.61750656", "0.61745596", "0.61732686", "0.61610496", "0.61599034", "0.61498296", "0.61358976", "0.61285317", "0.6126438", "0.6123378", "0.6121838", "0.6116617", "0.61157626", "0.6102953", "0.60945594", "0.6091274", "0.6085279", "0.607742", "0.60742486", "0.60732186", "0.60707676", "0.60666865", "0.6065298", "0.60444343", "0.60419726", "0.60313404", "0.6030922" ]
0.6742099
38
The set of arguments for constructing a Assessment resource.
def __init__(__self__, *, resource_details: pulumi.Input[Union['AzureResourceDetailsArgs', 'OnPremiseResourceDetailsArgs', 'OnPremiseSqlResourceDetailsArgs']], resource_id: pulumi.Input[str], status: pulumi.Input['AssessmentStatusArgs'], additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, assessment_name: Optional[pulumi.Input[str]] = None, metadata: Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']] = None, partners_data: Optional[pulumi.Input['SecurityAssessmentPartnerDataArgs']] = None): pulumi.set(__self__, "resource_details", resource_details) pulumi.set(__self__, "resource_id", resource_id) pulumi.set(__self__, "status", status) if additional_data is not None: pulumi.set(__self__, "additional_data", additional_data) if assessment_name is not None: pulumi.set(__self__, "assessment_name", assessment_name) if metadata is not None: pulumi.set(__self__, "metadata", metadata) if partners_data is not None: pulumi.set(__self__, "partners_data", partners_data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(__self__,\n resource_name: str,\n args: AssessmentArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AssessmentPolicyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n assessment_name: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentMetadataPropertiesArgs']]] = None,\n partners_data: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentPartnerDataArgs']]] = None,\n resource_details: Optional[pulumi.Input[Union[pulumi.InputType['AzureResourceDetailsArgs'], pulumi.InputType['OnPremiseResourceDetailsArgs'], pulumi.InputType['OnPremiseSqlResourceDetailsArgs']]]] = None,\n resource_id: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[pulumi.InputType['AssessmentStatusArgs']]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n assessment_name: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentMetadataPropertiesArgs']]] = None,\n partners_data: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentPartnerDataArgs']]] = None,\n resource_details: Optional[pulumi.Input[Union[pulumi.InputType['AzureResourceDetailsArgs'], pulumi.InputType['OnPremiseResourceDetailsArgs'], pulumi.InputType['OnPremiseSqlResourceDetailsArgs']]]] = None,\n resource_id: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[pulumi.InputType['AssessmentStatusArgs']]] = None,\n __props__=None,\n __name__=None,\n __opts__=None):\n if __name__ is not None:\n warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)\n resource_name = __name__\n if __opts__ is not None:\n warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)\n opts = __opts__\n if opts is None:\n opts = pulumi.ResourceOptions()\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n if opts.version is None:\n opts.version = _utilities.get_version()\n if opts.id is None:\n if __props__ is not None:\n raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')\n __props__ = dict()\n\n __props__['additional_data'] = additional_data\n __props__['assessment_name'] = assessment_name\n __props__['metadata'] = metadata\n __props__['partners_data'] = partners_data\n if resource_details is None and not opts.urn:\n raise TypeError(\"Missing required property 'resource_details'\")\n __props__['resource_details'] = resource_details\n if resource_id is None and not opts.urn:\n raise TypeError(\"Missing required property 'resource_id'\")\n __props__['resource_id'] = resource_id\n if status is None and not opts.urn:\n raise TypeError(\"Missing required property 'status'\")\n __props__['status'] = status\n __props__['display_name'] = None\n __props__['links'] = None\n __props__['name'] = None\n __props__['type'] = None\n alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_=\"azure-nextgen:security:Assessment\"), pulumi.Alias(type_=\"azure-native:security/latest:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/latest:Assessment\"), pulumi.Alias(type_=\"azure-native:security/v20190101preview:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/v20190101preview:Assessment\"), pulumi.Alias(type_=\"azure-native:security/v20200101:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/v20200101:Assessment\")])\n opts = pulumi.ResourceOptions.merge(opts, alias_opts)\n super(Assessment, __self__).__init__(\n 'azure-native:security:Assessment',\n resource_name,\n __props__,\n opts)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n application_object_id: Optional[pulumi.Input[str]] = None,\n audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n issuer: Optional[pulumi.Input[str]] = None,\n subject: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n admin_role_values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n allowed_organizations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n editor_role_values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n email_assertion: Optional[pulumi.Input[str]] = None,\n groups_assertion: Optional[pulumi.Input[str]] = None,\n idp_metadata_url: Optional[pulumi.Input[str]] = None,\n idp_metadata_xml: Optional[pulumi.Input[str]] = None,\n login_assertion: Optional[pulumi.Input[str]] = None,\n login_validity_duration: Optional[pulumi.Input[int]] = None,\n name_assertion: Optional[pulumi.Input[str]] = None,\n org_assertion: Optional[pulumi.Input[str]] = None,\n role_assertion: Optional[pulumi.Input[str]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AccessConfigurationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[AclArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n disable_status_check: Optional[pulumi.Input[bool]] = None,\n email: Optional[pulumi.Input[str]] = None,\n masters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n application_name: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n resource_group_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n label: Optional[pulumi.Input[str]] = None,\n permissions: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n application_object_id: Optional[pulumi.Input[str]] = None,\n audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n credential_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n issuer: Optional[pulumi.Input[str]] = None,\n subject: Optional[pulumi.Input[str]] = None):\n if application_object_id is not None:\n pulumi.set(__self__, \"application_object_id\", application_object_id)\n if audiences is not None:\n pulumi.set(__self__, \"audiences\", audiences)\n if credential_id is not None:\n pulumi.set(__self__, \"credential_id\", credential_id)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if display_name is not None:\n pulumi.set(__self__, \"display_name\", display_name)\n if issuer is not None:\n pulumi.set(__self__, \"issuer\", issuer)\n if subject is not None:\n pulumi.set(__self__, \"subject\", subject)", "def __init__(__self__,\n resource_name: str,\n args: RoleAssignmentArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n categories: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n implementation_effort: Optional[pulumi.Input[str]] = None,\n remediation_description: Optional[pulumi.Input[str]] = None,\n severity: Optional[pulumi.Input[str]] = None,\n threats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n user_impact: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n extra: Optional[pulumi.Input[Mapping[str, pulumi.Input[Sequence[pulumi.Input[str]]]]]] = None,\n groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n non_resource_attributes: Optional[pulumi.Input['NonResourceAttributesArgs']] = None,\n resource_attributes: Optional[pulumi.Input['ResourceAttributesArgs']] = None,\n uid: Optional[pulumi.Input[str]] = None,\n user: Optional[pulumi.Input[str]] = None):\n if extra is not None:\n pulumi.set(__self__, \"extra\", extra)\n if groups is not None:\n pulumi.set(__self__, \"groups\", groups)\n if non_resource_attributes is not None:\n pulumi.set(__self__, \"non_resource_attributes\", non_resource_attributes)\n if resource_attributes is not None:\n pulumi.set(__self__, \"resource_attributes\", resource_attributes)\n if uid is not None:\n pulumi.set(__self__, \"uid\", uid)\n if user is not None:\n pulumi.set(__self__, \"user\", user)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n application_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n source_resource_type: Optional[pulumi.Input[Union[str, 'ApplicationSourceResourceType']]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n application_object_id: pulumi.Input[str],\n audiences: pulumi.Input[Sequence[pulumi.Input[str]]],\n display_name: pulumi.Input[str],\n issuer: pulumi.Input[str],\n subject: pulumi.Input[str],\n description: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"application_object_id\", application_object_id)\n pulumi.set(__self__, \"audiences\", audiences)\n pulumi.set(__self__, \"display_name\", display_name)\n pulumi.set(__self__, \"issuer\", issuer)\n pulumi.set(__self__, \"subject\", subject)\n if description is not None:\n pulumi.set(__self__, \"description\", description)", "def __init__(__self__,\n resource_name: str,\n args: AppArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ApplicationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ApplicationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ApplicationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n agent_id: Optional[pulumi.Input[int]] = None,\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n key: Optional[pulumi.Input[str]] = None,\n values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n assessment_type: str,\n display_name: str,\n policy_definition_id: str,\n severity: str,\n categories: Optional[Sequence[str]] = None,\n description: Optional[str] = None,\n implementation_effort: Optional[str] = None,\n partner_data: Optional['outputs.SecurityAssessmentMetadataPartnerDataResponse'] = None,\n preview: Optional[bool] = None,\n remediation_description: Optional[str] = None,\n threats: Optional[Sequence[str]] = None,\n user_impact: Optional[str] = None):\n pulumi.set(__self__, \"assessment_type\", assessment_type)\n pulumi.set(__self__, \"display_name\", display_name)\n pulumi.set(__self__, \"policy_definition_id\", policy_definition_id)\n pulumi.set(__self__, \"severity\", severity)\n if categories is not None:\n pulumi.set(__self__, \"categories\", categories)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if implementation_effort is not None:\n pulumi.set(__self__, \"implementation_effort\", implementation_effort)\n if partner_data is not None:\n pulumi.set(__self__, \"partner_data\", partner_data)\n if preview is not None:\n pulumi.set(__self__, \"preview\", preview)\n if remediation_description is not None:\n pulumi.set(__self__, \"remediation_description\", remediation_description)\n if threats is not None:\n pulumi.set(__self__, \"threats\", threats)\n if user_impact is not None:\n pulumi.set(__self__, \"user_impact\", user_impact)", "def __init__(__self__,\n resource_name: str,\n args: VpcAssociationAuthorizationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n definition: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n role_arn: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n bucket: Optional[pulumi.Input[str]] = None,\n default_acl: Optional[pulumi.Input[str]] = None,\n predefined_acl: Optional[pulumi.Input[str]] = None,\n role_entities: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: RuleArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: OAuthArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: StudioArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n app_name: Optional[pulumi.Input[str]] = None,\n bundle_id: Optional[pulumi.Input[str]] = None,\n encoded_icon: Optional[pulumi.Input[str]] = None,\n industry_id: Optional[pulumi.Input[str]] = None,\n package_name: Optional[pulumi.Input[str]] = None,\n product_id: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n roles: pulumi.Input['AccessPolicySpecRolesArgs'],\n subject: pulumi.Input['AccessPolicySpecSubjectArgs'],\n target: pulumi.Input['AccessPolicySpecTargetArgs']):\n pulumi.set(__self__, \"roles\", roles)\n pulumi.set(__self__, \"subject\", subject)\n pulumi.set(__self__, \"target\", target)", "def __init__(__self__,\n resource_name: str,\n args: PolicyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: UserArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: UserArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[AgentArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def _setup_arguments(self):\n\n self._parser.add_argument(\"-a\", \"--area-interest\",\n help=\"Area of interest to process, \"\n \"shapefile path\", required=True)\n # FUTURE VERSIONS\n # self._parser.add_argument(\"-s\", \"--srtm-dem\",\n # help=\"Path to SRTM DEM file. Zip format\",\n # required=False)\n # self._parser.add_argument(\"-y\", \"--hsheds-dem\",\n # help=\"Path to HSHEDS DEM file. Zip format\",\n # required=False)\n # self._parser.add_argument(\"-g\", \"--groves-file\",\n # help=\"Path to groves classification file. \"\n # \"Zip format\",\n # required=False)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n license_count: Optional[pulumi.Input[int]] = None,\n license_count_hard_limit: Optional[pulumi.Input[bool]] = None,\n license_counting_type: Optional[pulumi.Input[str]] = None,\n license_rules: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n accessibility_error_redirect_url: Optional[pulumi.Input[str]] = None,\n accessibility_login_redirect_url: Optional[pulumi.Input[str]] = None,\n accessibility_self_service: Optional[pulumi.Input[bool]] = None,\n admin_note: Optional[pulumi.Input[str]] = None,\n app_links_json: Optional[pulumi.Input[str]] = None,\n app_settings_json: Optional[pulumi.Input[str]] = None,\n authentication_policy: Optional[pulumi.Input[str]] = None,\n auto_key_rotation: Optional[pulumi.Input[bool]] = None,\n auto_submit_toolbar: Optional[pulumi.Input[bool]] = None,\n client_basic_secret: Optional[pulumi.Input[str]] = None,\n client_id: Optional[pulumi.Input[str]] = None,\n client_uri: Optional[pulumi.Input[str]] = None,\n consent_method: Optional[pulumi.Input[str]] = None,\n enduser_note: Optional[pulumi.Input[str]] = None,\n grant_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n groups_claim: Optional[pulumi.Input[pulumi.InputType['OAuthGroupsClaimArgs']]] = None,\n hide_ios: Optional[pulumi.Input[bool]] = None,\n hide_web: Optional[pulumi.Input[bool]] = None,\n implicit_assignment: Optional[pulumi.Input[bool]] = None,\n issuer_mode: Optional[pulumi.Input[str]] = None,\n jwks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OAuthJwkArgs']]]]] = None,\n jwks_uri: Optional[pulumi.Input[str]] = None,\n label: Optional[pulumi.Input[str]] = None,\n login_mode: Optional[pulumi.Input[str]] = None,\n login_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n login_uri: Optional[pulumi.Input[str]] = None,\n logo: Optional[pulumi.Input[str]] = None,\n logo_uri: Optional[pulumi.Input[str]] = None,\n omit_secret: Optional[pulumi.Input[bool]] = None,\n pkce_required: Optional[pulumi.Input[bool]] = None,\n policy_uri: Optional[pulumi.Input[str]] = None,\n post_logout_redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n profile: Optional[pulumi.Input[str]] = None,\n redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n refresh_token_leeway: Optional[pulumi.Input[int]] = None,\n refresh_token_rotation: Optional[pulumi.Input[str]] = None,\n response_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n status: Optional[pulumi.Input[str]] = None,\n token_endpoint_auth_method: Optional[pulumi.Input[str]] = None,\n tos_uri: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n user_name_template: Optional[pulumi.Input[str]] = None,\n user_name_template_push_status: Optional[pulumi.Input[str]] = None,\n user_name_template_suffix: Optional[pulumi.Input[str]] = None,\n user_name_template_type: Optional[pulumi.Input[str]] = None,\n wildcard_redirect: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: EndpointAclPolicyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n action: Optional[pulumi.Input[str]] = None,\n layer_name: Optional[pulumi.Input[str]] = None,\n organization_id: Optional[pulumi.Input[str]] = None,\n principal: Optional[pulumi.Input[str]] = None,\n statement_id: Optional[pulumi.Input[str]] = None,\n version_number: Optional[pulumi.Input[int]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AccountAliasArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n app_id: Optional[pulumi.Input[str]] = None,\n index: Optional[pulumi.Input[str]] = None,\n master: Optional[pulumi.Input[str]] = None,\n pattern: Optional[pulumi.Input[str]] = None,\n permissions: Optional[pulumi.Input[str]] = None,\n required: Optional[pulumi.Input[bool]] = None,\n title: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n user_type: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def add_required_arguments(self, *args):\n self._add_sample_specific_arguments(True, *args)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n policy: Optional[pulumi.Input[str]] = None,\n resource_arn: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n authorize: Optional[pulumi.Input[str]] = None,\n communications_enabled: Optional[pulumi.Input[bool]] = None,\n login: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: OrgConfigurationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def construct_params(self):\n\n return {\"expand\": self.get_expand()}", "def __init__(__self__,\n resource_name: str,\n args: EnvironmentArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n attestation_authority: Optional[pulumi.Input[pulumi.InputType['AttestationAuthorityArgs']]] = None,\n base_image: Optional[pulumi.Input[pulumi.InputType['BasisArgs']]] = None,\n build_type: Optional[pulumi.Input[pulumi.InputType['BuildTypeArgs']]] = None,\n compliance: Optional[pulumi.Input[pulumi.InputType['ComplianceNoteArgs']]] = None,\n deployable: Optional[pulumi.Input[pulumi.InputType['DeployableArgs']]] = None,\n discovery: Optional[pulumi.Input[pulumi.InputType['DiscoveryArgs']]] = None,\n dsse_attestation: Optional[pulumi.Input[pulumi.InputType['DSSEAttestationNoteArgs']]] = None,\n expiration_time: Optional[pulumi.Input[str]] = None,\n long_description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n note_id: Optional[pulumi.Input[str]] = None,\n package: Optional[pulumi.Input[pulumi.InputType['PackageArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n related_url: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RelatedUrlArgs']]]]] = None,\n sbom: Optional[pulumi.Input[pulumi.InputType['DocumentNoteArgs']]] = None,\n sbom_reference: Optional[pulumi.Input[pulumi.InputType['SBOMReferenceNoteArgs']]] = None,\n short_description: Optional[pulumi.Input[str]] = None,\n spdx_file: Optional[pulumi.Input[pulumi.InputType['FileNoteArgs']]] = None,\n spdx_package: Optional[pulumi.Input[pulumi.InputType['PackageInfoNoteArgs']]] = None,\n spdx_relationship: Optional[pulumi.Input[pulumi.InputType['RelationshipNoteArgs']]] = None,\n upgrade: Optional[pulumi.Input[pulumi.InputType['UpgradeNoteArgs']]] = None,\n vulnerability_assessment: Optional[pulumi.Input[pulumi.InputType['VulnerabilityAssessmentNoteArgs']]] = None,\n vulnerability_type: Optional[pulumi.Input[pulumi.InputType['VulnerabilityTypeArgs']]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n api: Optional[pulumi.Input[pulumi.InputType['ApplicationApiArgs']]] = None,\n app_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,\n feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]]] = None,\n group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n logo_image: Optional[pulumi.Input[str]] = None,\n marketing_url: Optional[pulumi.Input[str]] = None,\n notes: Optional[pulumi.Input[str]] = None,\n oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,\n optional_claims: Optional[pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']]] = None,\n owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,\n privacy_statement_url: Optional[pulumi.Input[str]] = None,\n public_client: Optional[pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']]] = None,\n required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]]] = None,\n service_management_reference: Optional[pulumi.Input[str]] = None,\n sign_in_audience: Optional[pulumi.Input[str]] = None,\n single_page_application: Optional[pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']]] = None,\n support_url: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n template_id: Optional[pulumi.Input[str]] = None,\n terms_of_service_url: Optional[pulumi.Input[str]] = None,\n web: Optional[pulumi.Input[pulumi.InputType['ApplicationWebArgs']]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: LicenseConfigurationArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: BundleArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n certificate: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: BudgetArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: BudgetActionArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AlarmArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[NoteArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def get_arguments():\n\n # Creates the ArgumentParser\n parser = argparse.ArgumentParser(usage='Creates an ensemble of classifiers based on majority voting.')\n\n # Adds a dataset argument with pre-defined choices\n parser.add_argument('dataset', help='Dataset identifier', choices=['RSDataset', 'RSSCN7', 'UCMerced_LandUse'])\n\n return parser.parse_args()", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n identity_pool_id: Optional[pulumi.Input[str]] = None,\n identity_provider_name: Optional[pulumi.Input[str]] = None,\n principal_tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n use_defaults: Optional[pulumi.Input[bool]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: ScriptArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n client_id: Optional[pulumi.Input[str]] = None,\n client_secret: Optional[pulumi.Input[str]] = None,\n consumer_id: Optional[pulumi.Input[str]] = None,\n hash_secret: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AclRuleArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def createConstraint(*argv):", "def base_arguments(self):\n raise NotImplementedError()", "def __init__(__self__, *,\n non_resource_attributes: Optional[pulumi.Input['NonResourceAttributesArgs']] = None,\n resource_attributes: Optional[pulumi.Input['ResourceAttributesArgs']] = None):\n if non_resource_attributes is not None:\n pulumi.set(__self__, \"non_resource_attributes\", non_resource_attributes)\n if resource_attributes is not None:\n pulumi.set(__self__, \"resource_attributes\", resource_attributes)", "def __init__(__self__,\n resource_name: str,\n args: WorkflowArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(self, resource, *args):\n self.args = list(args)\n self.flags = OrderedDict()\n self.additional_flags = []\n self._AddCommonFlags(resource)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n account_id: Optional[pulumi.Input[str]] = None,\n group: Optional[pulumi.Input[str]] = None,\n image_id: Optional[pulumi.Input[str]] = None,\n organization_arn: Optional[pulumi.Input[str]] = None,\n organizational_unit_arn: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def define_options(self):\n\n from clinica.engine.cmdparser import PIPELINE_CATEGORIES\n\n clinica_comp = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_COMPULSORY'])\n clinica_comp.add_argument(\"caps_directory\",\n help='Path to the CAPS directory.')\n clinica_comp.add_argument(\"list_bvalues\", type=str,\n help='String listing all the shells (i.e. the b-values) in the corrected DWI datasets comma separated (e.g, 0,300,700,2200)')\n # Optional arguments\n clinica_opt = self._args.add_argument_group(PIPELINE_CATEGORIES['CLINICA_OPTIONAL'])\n\n clinica_opt.add_argument(\"-wd\", \"--working_directory\",\n help='Temporary directory to store pipeline intermediate results')\n clinica_opt.add_argument(\"-np\", \"--n_procs\", type=int, default=4,\n help='Number of cores used to run in parallel')\n clinica_opt.add_argument(\"-tsv\", \"--subjects_sessions_tsv\",\n help='TSV file containing a list of subjects with their sessions.')", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n configuration_set_name: Optional[pulumi.Input[str]] = None,\n event_destination: Optional[pulumi.Input[pulumi.InputType['ConfigurationSetEventDestinationEventDestinationArgs']]] = None,\n event_destination_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n accept_language: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n distributor: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n owner: Optional[pulumi.Input[str]] = None,\n provisioning_artifact_parameters: Optional[pulumi.Input[pulumi.InputType['ProductProvisioningArtifactParametersArgs']]] = None,\n support_description: Optional[pulumi.Input[str]] = None,\n support_email: Optional[pulumi.Input[str]] = None,\n support_url: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n type: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(self, objectives, constraints, declarations = None):\n \n self.objectives = objectives\n self.constraints = constraints\n self.declarations = declarations", "def __init__(__self__, *,\n arn: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n license_count: Optional[pulumi.Input[int]] = None,\n license_count_hard_limit: Optional[pulumi.Input[bool]] = None,\n license_counting_type: Optional[pulumi.Input[str]] = None,\n license_rules: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n owner_account_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):\n if arn is not None:\n pulumi.set(__self__, \"arn\", arn)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if license_count is not None:\n pulumi.set(__self__, \"license_count\", license_count)\n if license_count_hard_limit is not None:\n pulumi.set(__self__, \"license_count_hard_limit\", license_count_hard_limit)\n if license_counting_type is not None:\n pulumi.set(__self__, \"license_counting_type\", license_counting_type)\n if license_rules is not None:\n pulumi.set(__self__, \"license_rules\", license_rules)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if owner_account_id is not None:\n pulumi.set(__self__, \"owner_account_id\", owner_account_id)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if tags_all is not None:\n pulumi.set(__self__, \"tags_all\", tags_all)", "def configure_args(self):\n super(InstaApriori, self).configure_args()\n self.add_passthru_arg('-iteration', type=int, help=\"The current iteration. Not used as a command line argument\")\n self.add_passthru_arg('--k', type=int, default=3, help=\"Specify the maximum size of itemsets to find\")\n self.add_passthru_arg('--s', type=float, help=\"Specify the minimum support threshold\")\n self.add_passthru_arg('--c', type=float, default=0, help=\"Specify the minimum confidence threshold\")\n self.add_file_arg('--f', default='frequent.txt',\n help=\"Specify the name of the file used to store frequent itemsets\")", "def __init__(__self__,\n resource_name: str,\n args: ProductArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[DomainArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n end_time: Optional[pulumi.Input[int]] = None,\n name: Optional[pulumi.Input[str]] = None,\n program_text: Optional[pulumi.Input[str]] = None,\n start_time: Optional[pulumi.Input[int]] = None,\n time_range: Optional[pulumi.Input[int]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n inter_region_traffic_qos_policy_description: Optional[pulumi.Input[str]] = None,\n inter_region_traffic_qos_policy_name: Optional[pulumi.Input[str]] = None,\n transit_router_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_router_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n domain_id: Optional[pulumi.Input[str]] = None,\n group_id: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n role_id: Optional[pulumi.Input[str]] = None,\n user_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(self, applicationNumber='', payment=0.0, effectiveDate='', expirationDate='', permitID='', *args, **kw_args):\n #: Permit application number that is used by municipality, state, province, etc.\n self.applicationNumber = applicationNumber\n\n #: Total cost of permit.\n self.payment = payment\n\n #: Date that permit became official.\n self.effectiveDate = effectiveDate\n\n #: Permit expiration date.\n self.expirationDate = expirationDate\n\n #: Permit identifier.\n self.permitID = permitID\n\n super(AccessPermit, self).__init__(*args, **kw_args)", "def __init__(__self__,\n resource_name: str,\n args: BotArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: RuntimeArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AppUserBaseSchemaPropertyArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n description: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SyntheticsPrivateLocationMetadataArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: AggregatorArgs,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n type_name: Optional[pulumi.Input[str]] = None,\n type_version_arn: Optional[pulumi.Input[str]] = None,\n version_id: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[InstanceArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n client_id: Optional[pulumi.Input[str]] = None,\n client_secret: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n metadata_endpoint: Optional[pulumi.Input[str]] = None,\n opid: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n service_name: Optional[pulumi.Input[str]] = None,\n use_in_api_documentation: Optional[pulumi.Input[bool]] = None,\n use_in_test_console: Optional[pulumi.Input[bool]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n api_management_id: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n arn: Optional[pulumi.Input[str]] = None,\n minimum_engine_version: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None):\n if arn is not None:\n pulumi.set(__self__, \"arn\", arn)\n if minimum_engine_version is not None:\n pulumi.set(__self__, \"minimum_engine_version\", minimum_engine_version)\n if name is not None:\n pulumi.set(__self__, \"name\", name)\n if name_prefix is not None:\n pulumi.set(__self__, \"name_prefix\", name_prefix)\n if tags is not None:\n pulumi.set(__self__, \"tags\", tags)\n if tags_all is not None:\n pulumi.set(__self__, \"tags_all\", tags_all)\n if user_names is not None:\n pulumi.set(__self__, \"user_names\", user_names)", "def _set_arguments(self):\n cert_location = f\"dependencies{sep}certificates{sep}localuser.crt\"\n key_location = f\"dependencies{sep}certificates{sep}localuser.key\"\n assert Path(cert_location).exists(), (\n f\"The certificate isn't \"\n f\"present at location {Path(cert_location).absolute()}\"\n )\n assert Path(key_location).exists(), (\n f\"The certificate key isn't \"\n f\"present at location {Path(key_location).absolute()}\"\n )\n self._arguments = [\n (\n \"test-certificate-verify\",\n [\"-k\", key_location, \"-c\", cert_location],\n ),\n (\n \"test-sig-algs\",\n [],\n ),\n (\n \"test-clienthello-md5\",\n [],\n ),\n (\n \"test-tls13-pkcs-signature\",\n [],\n ),\n ]", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n acl_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n dest_cidr: Optional[pulumi.Input[str]] = None,\n dest_port_range: Optional[pulumi.Input[str]] = None,\n direction: Optional[pulumi.Input[str]] = None,\n ip_protocol: Optional[pulumi.Input[str]] = None,\n policy: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n source_cidr: Optional[pulumi.Input[str]] = None,\n source_port_range: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n aggregator_accounts: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AggregatorAggregatorAccountArgs']]]]] = None,\n aggregator_name: Optional[pulumi.Input[str]] = None,\n aggregator_type: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n config: Optional[pulumi.Input[pulumi.InputType['ConfigArgs']]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ...", "def __init__(__self__,\n resource_name: str,\n args: Optional[ExtensionAssociationArgs] = None,\n opts: Optional[pulumi.ResourceOptions] = None):\n ...", "def Args(parser):\n\n parser.add_argument(\n '--service',\n help='The service to which the principal is to be added.',\n required=True)\n parser.add_argument(\n '--label',\n help=('Optionally, the visibility label to which the principal is '\n 'to be added.'))\n parser.add_argument(\n 'type',\n help=('The type of principal to add to the access policy entity. '\n 'Choose from {0}.').format(\n ', '.join(sorted(Add._PRINCIPAL_TYPES))),\n type=lambda x: str(x).lower(),\n choices=sorted(Add._PRINCIPAL_TYPES))\n parser.add_argument(\n 'principal',\n help='The principal to add to the access policy entity.')", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n extension_identifier: Optional[pulumi.Input[str]] = None,\n extension_version_number: Optional[pulumi.Input[int]] = None,\n parameters: Optional[Any] = None,\n resource_identifier: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ExtensionAssociationTagArgs']]]]] = None,\n __props__=None):\n ...", "def __init__(__self__, *,\n agent_pool: Optional[pulumi.Input[str]] = None,\n apms: Optional[pulumi.Input[Sequence[pulumi.Input['ApmReferenceArgs']]]] = None,\n builder: Optional[pulumi.Input[str]] = None,\n certificates: Optional[pulumi.Input[Sequence[pulumi.Input['CertificateReferenceArgs']]]] = None,\n env: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n relative_path: Optional[pulumi.Input[str]] = None,\n resource_requests: Optional[pulumi.Input['BuildResourceRequestsArgs']] = None):\n if agent_pool is not None:\n pulumi.set(__self__, \"agent_pool\", agent_pool)\n if apms is not None:\n pulumi.set(__self__, \"apms\", apms)\n if builder is not None:\n pulumi.set(__self__, \"builder\", builder)\n if certificates is not None:\n pulumi.set(__self__, \"certificates\", certificates)\n if env is not None:\n pulumi.set(__self__, \"env\", env)\n if relative_path is not None:\n pulumi.set(__self__, \"relative_path\", relative_path)\n if resource_requests is not None:\n pulumi.set(__self__, \"resource_requests\", resource_requests)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n access_string: Optional[pulumi.Input[str]] = None,\n authentication_mode: Optional[pulumi.Input[pulumi.InputType['UserAuthenticationModeArgs']]] = None,\n engine: Optional[pulumi.Input[str]] = None,\n no_password_required: Optional[pulumi.Input[bool]] = None,\n passwords: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_id: Optional[pulumi.Input[str]] = None,\n user_name: Optional[pulumi.Input[str]] = None,\n __props__=None):\n ..." ]
[ "0.71892506", "0.676996", "0.6377259", "0.6322052", "0.63183135", "0.59458417", "0.5945797", "0.57835245", "0.57723206", "0.5743007", "0.5741708", "0.5738933", "0.57031816", "0.5700542", "0.5700223", "0.56762666", "0.5639396", "0.56337005", "0.56271166", "0.56271166", "0.56271166", "0.56263995", "0.5615061", "0.55948", "0.5593265", "0.55912226", "0.5580287", "0.55713814", "0.55625665", "0.5555123", "0.55371684", "0.55290675", "0.552441", "0.55202657", "0.55202657", "0.55114406", "0.5508774", "0.5503573", "0.5490095", "0.5486293", "0.54854524", "0.5481603", "0.54726195", "0.5467663", "0.54671466", "0.54623276", "0.54517055", "0.54457605", "0.54457295", "0.54433733", "0.5430518", "0.5427976", "0.54240936", "0.541827", "0.54170704", "0.5406536", "0.5404046", "0.539931", "0.5396702", "0.5393359", "0.5385171", "0.53704464", "0.5364729", "0.5363133", "0.53620476", "0.53544784", "0.5354412", "0.5344888", "0.5341096", "0.53327405", "0.53178805", "0.5304636", "0.530155", "0.52870697", "0.5284953", "0.5284176", "0.52801275", "0.5278925", "0.5277522", "0.5276545", "0.52756906", "0.527458", "0.527413", "0.5271627", "0.5270566", "0.5267364", "0.526096", "0.5259757", "0.5257476", "0.525082", "0.5249002", "0.5246864", "0.5244981", "0.5235743", "0.5233967", "0.5225666", "0.5225419", "0.52231044", "0.5222025", "0.5220995" ]
0.6006293
5
Details of the resource that was assessed
def resource_details(self) -> pulumi.Input[Union['AzureResourceDetailsArgs', 'OnPremiseResourceDetailsArgs', 'OnPremiseSqlResourceDetailsArgs']]: return pulumi.get(self, "resource_details")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def resource(self):\n return str(self._resource)", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def details(self):\n pass", "def __str__(self):\n return self.resource.__name__", "def __str__(self):\n return self.__resource;", "def __str__(self):\n\n return str(self.__resource);", "def meta_data(self):\r\n return simplejson.dumps(self.__resource_meta)", "def detail(self):\n info = self.info()\n return info", "def PrintResource(resource):\n print resource.resource_id.text, resource.GetResourceType()", "def details(self):\n raise NotImplementedError()", "def getResource(self):\n pass;", "def resourceid(self):", "def info(self) -> dict:", "def info(self):\n return self.__dict__[self.sid]", "def info(self):", "def info(self):", "def resource(self):\n return self.properties.get('resource',\n Entity(self.context, ResourcePath(\"resource\", self.resource_path)))", "def details(self):\n return self._details", "def _get_information(self):\n pass", "def get_resource(self):\n msg = _(\"wrote a new wechat article : %(title)s\") % {\n 'title': self.title}\n msg = unicode(msg)\n resource = {\n 'title': msg,\n 'description': self.get_digest(),\n 'url': self.get_absolute_url(),\n 'image_url': self.cover_img.url,\n }\n return resource", "def info(self, resource, id):\n return self.request('/' + resource + '/' + str(id))", "def info(self):\n self._info()", "def info(self):\n return self._info", "def info(self):\n return self._info", "def get_info(self):\n return None", "def metadata(self):\r\n return resources.Metadata(self)", "def get_resource(self):\n raise errors.Unimplemented()", "def get_info(self):\n pass", "def get_info(self):\n pass", "def getResource(self):\n return self.__resource;", "def info(self):\r\n return self._get('info', {})", "def details (self):\n return six.text_type(self)", "def info(self) -> str:\n return self._info", "def info(self) -> str:\n return self._info", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_info(self):\n return \"TODO !\"", "def get_details(self):", "def data(self):\n return { # TODO Actually query for this shit\n \"foo\": self.__name__,\n \"url\": f\"{self.request.resource_url(self)}\",\n }", "def res_description(self):\n return self.get(\"res_description\", decode=True)", "def getResource(self):\n\n return self.__resource;", "def getInfo(self):\n return self.name + \" [\" + self.target_type + \"]\"", "def get_info(self) -> str:\n template_data = self.get_template_data()\n return self.get_template().render(\n resource=self.resource,\n markdown=markdown,\n data=template_data,\n base=RESOURCE_TEMPLATE\n )", "def info(self) -> str:\n return pulumi.get(self, \"info\")", "def get_info(self) -> str:\n return self.info", "def details(self) -> \"dict\":\n return self._attrs.get(\"details\")", "def get_info(self) -> str:\n raise NotImplementedError()", "def info(self):\n return self.info_text", "def detail(self):\n return self.status[\"health\"][\"detail\"]", "def _resource_fields(chromo):\n return {\n 'name': chromo['resource_name'],\n 'description': chromo['title'],\n 'url_type': u'datastore',\n }", "def get_info(self):\n self.exists = self.check_subscr()\n return self.attrs", "def getInfo():", "def info(self):\n if not self._was_read:\n self.read()\n return self._info", "def get_details(self):\n raise Exception(\"bad details\")", "def info(self):\n return self.current_run.info", "def __str__(self):\n \n for att in self.__dict__:\n print('%s: %r' % (att, getattr(self, att)))\n \n return 'Background Sources class object attributes'", "def info(self):\n return {}", "def resource_state(self) -> str:\n return pulumi.get(self, \"resource_state\")", "def details(self):\n print \"ABC - Deployer.details()\"", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def getInfo(self):\n return self.info", "def name(self):\n return self.raw_resource[\"name\"]", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get_resource_information():\n\n\n # the resources we are allowed to use is easy. We just copy this...\n resource_limit_dict = _resources_allowed_dict.copy()\n\n \n # from the other dict, we only take the resource information. (this omits\n # locks and timing information that isn't needed)\n\n # first, let's do the easy thing, the quantity resources. These are just \n # floats\n resource_use_dict = {}\n for resourcename in resource_constants.quantity_resources:\n resource_use_dict[resourcename] = _resources_consumed_dict[resourcename]\n\n # for the fungible resources (files opened, etc,), we only need a count...\n for resourcename in resource_constants.fungible_item_resources:\n resource_use_dict[resourcename] = len(_resources_consumed_dict[resourcename])\n\n # for the individual item resources (ports, etc,), we copy the set...\n for resourcename in resource_constants.individual_item_resources:\n resource_use_dict[resourcename] = _resources_consumed_dict[resourcename].copy()\n\n # and that's it!\n return (resource_limit_dict, resource_use_dict)", "def getInfo(self):\n doc = minidom.parse(urllib.urlopen(serverString + \"/rest/asset/\" + self.id))\n self._getInfoFromNode(doc.getElementsByTagName(\"asset\")[0])", "def info(self):\n return (self._title, self._version, self._descr)", "def getInfo(self):\n return self._info", "def print_details(self):\n print(\"[{}]\".format(self.name))\n print(\"ID: \" + str(self.id))\n print(\"name: %s\" % self.name)\n print(\"URL: %s\" % self.url)\n print(\"CPUs: \" + str(self.cpus) + \" cores\")\n print(\"Mem: \" + self.memory_str)\n print(\"Tasks: \" + str(self.tasks_len))\n print(\"Uptime %s\" + self.uptime)\n print(\"Uptime Descriptive %s\" + self.uptime_descriptive)\n print(\" \")", "def resource_link_title(self):\n return self.request.POST.get(\"resource_link_title\", self.resource_link_id)", "def get_main_information(self) -> Dict:\n if self.information is None:\n self.information = self.orthanc.get_instance_information(\n self.identifier\n )\n\n return self.information", "def resources(self):", "def getInstDescription(self):\n return self.name()", "def resource(self):\n return self.add_resource", "def extract_resource_details(metadata):\n\n # check data integrity\n if Update.get_entry(metadata, 'success') is not True:\n raise UpdateException('metadata does not have `success` equal to `True`')\n if len(Update.get_entry(metadata, 'result')) != 1:\n raise UpdateException('metadata does not have exactly 1 result')\n if len(Update.get_entry(metadata, 'result', 0, 'resources')) != 1:\n raise UpdateException('metadata does not have exactly 1 resource')\n\n # return resource details\n resource = Update.get_entry(metadata, 'result', 0, 'resources', 0)\n return resource['url'], resource['revision_timestamp']", "def get_description(self):", "def return_info(self):\n\t\treturn self.info", "def print_resource():\n logging.info(\"__package__: %s\", __package__)\n logging.info(\"__name__: %s\", __name__)\n logging.info(\"JSON_RESOURCE: %s\", JSON_RESOURCE)\n logging.info(\"JSON_PATH: %s\", JSON_PATH)", "def describe(self):\n return str(self)", "def get_main_information(self) -> Dict:\n if self.lock:\n if self._information is None:\n # Setup self._information for the first time when study is lock\n self._information = self.client.get_instances_id(self.id_)\n\n return self._information\n\n return self.client.get_instances_id(self.id_)", "def target_resource(self):\n return self._target_resource", "def _resource_dump(pe, res):\n rva = res.data.struct.OffsetToData\n size = res.data.struct.Size\n\n return pe.get_data(rva, size)", "def info(self):\n print self.id, self.type, self.xyz.get_xyz", "def usage_information(self):\n return self._usage_information", "def info() -> None:", "def _get_infores(source: str) -> str:\n if source in self.context.catalog:\n return self.context.catalog[source]\n else:\n infores: str = _process_infores(source)\n if infores:\n self.context.catalog[source] = infores\n return infores\n else:\n return \"\"", "def get_info(self):\n return {}", "def __repr__(self):\r\n return self.uri", "def resource_status(self) -> 'outputs.InstantSnapshotResourceStatusResponse':\n return pulumi.get(self, \"resource_status\")", "def name(self):\n\n return self.resource[\"metadata\"][\"name\"]" ]
[ "0.7746103", "0.7746103", "0.76440537", "0.7001793", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.686545", "0.6756598", "0.6750164", "0.66482615", "0.6627125", "0.6553682", "0.6521818", "0.64652663", "0.6450048", "0.6442991", "0.63966125", "0.63947666", "0.6393753", "0.6393753", "0.63681227", "0.6352751", "0.6350844", "0.6317342", "0.6305615", "0.6295532", "0.62758183", "0.62539893", "0.62459886", "0.62382597", "0.62354326", "0.62251353", "0.62251353", "0.6220586", "0.61779207", "0.6151666", "0.6150642", "0.6150642", "0.6147839", "0.6147839", "0.6147839", "0.6133916", "0.61214894", "0.6112529", "0.60607797", "0.6058028", "0.6054495", "0.6044078", "0.602688", "0.60230654", "0.60037214", "0.600051", "0.59956336", "0.59946823", "0.59854203", "0.59655184", "0.59423304", "0.5940428", "0.5916753", "0.59152496", "0.5907256", "0.5904732", "0.590381", "0.59034437", "0.5895525", "0.5895525", "0.58940214", "0.58899766", "0.5883736", "0.5868952", "0.5866518", "0.58644927", "0.5849613", "0.5849438", "0.5825993", "0.5815696", "0.5811836", "0.58045256", "0.5796444", "0.5791339", "0.5789975", "0.5769325", "0.5766697", "0.5762917", "0.57476383", "0.57412225", "0.5738121", "0.57340276", "0.5725352", "0.57217497", "0.57186913", "0.5713716", "0.5702322", "0.57022995", "0.5698046" ]
0.6483526
18
The identifier of the resource.
def resource_id(self) -> pulumi.Input[str]: return pulumi.get(self, "resource_id")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resourceid(self):", "def id(self):\n return self.raw_resource.uuid", "def id(self):\n return self.raw_resource[\"id\"]", "def resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def identifier(self):\n return self.__id", "def identifier(self):\n return self._id", "def get_resource_id(self, obj):\n return obj.id", "def resourceDocumentId(self, resource: Resource) -> str:", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self):\n return self._identifier", "def identifier(self) -> str:\n return self._identifier", "def identifier(self):\n\n return self._identifier", "def identifier(self):\r\n return self.id", "def get_objectID(self):\n return self.resource.uuid", "def getId(self):\n return self.identifier", "def identifier(self):\n return self._client.identifier", "def get_identifier(self) -> str:\n return self.identifier", "def identifier(self):\n\n return self.name", "def resource_id(self, value, match_option=None):\n return self.attributes(\"resource-id\", value, match_option)", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")", "def id(self) -> str:\n return pulumi.get(self, \"id\")" ]
[ "0.87842387", "0.87842387", "0.87842387", "0.8679565", "0.8428031", "0.8255889", "0.82200164", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.7965054", "0.79283077", "0.79256773", "0.78568465", "0.7837528", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.77767205", "0.7744166", "0.77372473", "0.77177894", "0.76549655", "0.7644212", "0.76434886", "0.7639201", "0.76186806", "0.75738925", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831", "0.7501831" ]
0.81783426
14
The result of the assessment
def status(self) -> pulumi.Input['AssessmentStatusArgs']: return pulumi.get(self, "status")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def result(self):\n return self['result']", "def results(self):\n\n\t\tresults = {'answer':42}\n\n\t\treturn results", "def result(self):\n\n print('Ergebnisse: -------------\\n'\n 'Richtige Antworten:{} \\n'\n 'Falsche Antworten:{} \\n'.format(self.answer_right, self.answer_wrong))", "def result(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"result\")", "def output(self):\r\n return self.result", "def getResult(self):\n return self.ok", "def result(self, result):\n print(result)", "def result( self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):", "def result(self):", "def evaluate(self):\n scores = []\n scores.append(self.word_analogy())\n print(\"Word Analogy (acc): \", scores[0])\n scores.append(self.word_similarity())\n print(\"Word Similarity (MSE): \", scores[1])\n scores.append(self.concept_categorization())\n print(\"Concept Categorization (purity): \", scores[2])\n scores.append(self.sentiment_analysis())\n print(\"Sentiment Analysis (acc): \", scores[3])\n return scores", "def _get_result(self):\r\n \r\n return self._result", "def final_result(self):\r\n print(\" Game \\t\\t Word \\t\\t Result \\t\\t Bad Guess \\t\\t Missed Letters \\t\\t Score \")\r\n print(\" ---- \\t\\t ---- \\t\\t ------ \\t\\t --------- \\t\\t -------------- \\t\\t ----- \")\r\n count = 0\r\n final_score = 0\r\n for x in self.instances:\r\n count += 1\r\n print(\" \"+str(count)+\" \\t\\t \"+str(x.get_word())+\" \\t\\t \"+str(x.get_result())+\" \\t\\t \"+str(x.get_wrong_guess())+\" \\t\\t\\t \"+str(x.get_wrong_letter())+\" \\t\\t\\t \"+str(round(x.get_score(),3)))\r\n final_score += x.get_score()\r\n\r\n print(\"\\nFinal Score : \"+str(round(final_score,3)))", "def result(self):\r\n raise NotImplementedError('method result() is not implemented')", "def outcome(self):\r\n return self._outcome", "def parse_verifier_result(self):\n stat = self.get_verifier_result(self.verification_id)\n try:\n num_executed = stat['num_tests'] - stat['num_skipped']\n try:\n self.result = 100 * stat['num_success'] / num_executed\n except ZeroDivisionError:\n self.result = 0\n if stat['num_tests'] > 0:\n LOGGER.info(\"All tests have been skipped\")\n else:\n LOGGER.error(\"No test has been executed\")\n return\n\n with open(os.path.join(self.res_dir, \"rally.log\"),\n 'r', encoding='utf-8') as logfile:\n output = logfile.read()\n\n success_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} success ',\n output):\n success_testcases.append(match)\n failed_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} fail',\n output):\n failed_testcases.append(match)\n skipped_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} skip(?::| )',\n output):\n skipped_testcases.append(match)\n\n self.details = {\"tests_number\": stat['num_tests'],\n \"success_number\": stat['num_success'],\n \"skipped_number\": stat['num_skipped'],\n \"failures_number\": stat['num_failures'],\n \"success\": success_testcases,\n \"skipped\": skipped_testcases,\n \"failures\": failed_testcases}\n except Exception: # pylint: disable=broad-except\n self.result = 0\n\n LOGGER.info(\"Tempest %s success_rate is %s%%\",\n self.case_name, self.result)", "def result(self): \n return self.body", "def calculate(self):\n\n return \"Yes\" if self.result else \"No\"", "def results(self):\r\n pass", "def _load_assessment_results_page(self):\r\n\r\n fmt = '{0:0.' + str(Configuration.PLACES) + 'g}'\r\n\r\n self.txtAvailability.set_text(\r\n str(fmt.format(self._function_model.availability)))\r\n self.txtMissionAt.set_text(\r\n str(fmt.format(self._function_model.mission_availability)))\r\n self.txtMissionHt.set_text(\r\n str(fmt.format(self._function_model.mission_hazard_rate)))\r\n self.txtPredictedHt.set_text(\r\n str(fmt.format(self._function_model.hazard_rate)))\r\n\r\n self.txtMMT.set_text(str(fmt.format(self._function_model.mmt)))\r\n self.txtMCMT.set_text(str(fmt.format(self._function_model.mcmt)))\r\n self.txtMPMT.set_text(str(fmt.format(self._function_model.mpmt)))\r\n\r\n self.txtMissionMTBF.set_text(\r\n str(fmt.format(self._function_model.mission_mtbf)))\r\n self.txtMTBF.set_text(str(fmt.format(self._function_model.mtbf)))\r\n self.txtMTTR.set_text(str(fmt.format(self._function_model.mttr)))\r\n\r\n return False", "def get_results(self):\n return self.result", "def present_result(self, parameters, result):\n print \"Result for parameters %s: %.7f\" % (repr(parameters), result)", "def result(self):\n return self.a", "def results(self):\n pass", "def get_result(self):\n if len(self.result_transcripts) > 0:\n return self.result_transcripts[0]\n else:\n return ''", "def status(self) -> pulumi.Output['outputs.AssessmentStatusResponse']:\n return pulumi.get(self, \"status\")", "def print_result(self):\n print(\"Final results: \")\n for i in range(1, len(self.agents) + 1):\n agent = self.agents[i-1]\n print(agent.name + \": {} wins\".format(self.results[agent.name]))", "def __get_evaluation_summary(self):\n self.logger.debug(\n f\"Getting summary for assignment {self.assignment_id}, eval_id {self.eval_id}\"\n )\n result = self.interactor.get_policy_eval_summary(self.assignment_id)\n\n if result.status_code != 200:\n self.logger.debug(\n f\"Could not get summary for assignment {self.assignment_id} for eval_id {self.eval_id} - {result.text}\"\n )\n raise Exception(\n f\"Summary could not be retrived: {result.status_code} - {result.text}\"\n )\n\n return result.json()[\"value\"][0][\"results\"]", "def score(self):\n return 1 if self.succeeded() else 0", "def Results(self):\n return self.data", "def Results(self):\n return self.data", "def GetResult(self, playerjm):\n return self.score / len(self.scores)", "def report(self, result):\n raise NotImplementedError", "def status(self) -> pulumi.Output['outputs.AssessmentStatusResponseResponse']:\n return pulumi.get(self, \"status\")", "def complain_result(self) -> Optional[str]:\n utils.logger.debug(f\"vote_result({self.complain_votes[self.round].get_summary()})\")\n if self.complain_votes[self.round].is_completed():\n vote_result = self.complain_votes[self.round].get_result()\n return vote_result.hex_hx()\n else:\n return None", "def show_result(dict_result):\r\n\r\n\tcorrects = dict_result[\"Corrects\"]\r\n\twrongs = dict_result[\"Wrongs\"]\r\n\tn_questions = dict_result[\"n_questions\"]\r\n\r\n\tprint(\"\\n\\n\",\"-\"*10,\"Final Result\", \"-\"*10)\r\n\r\n\tfinal_note = (len(corrects)*100)/n_questions\r\n\tprint(\"\\nResult: \", final_note*10)\r\n\r\n\tif final_note*10 > 600:\r\n\t\tprint(\"\\nYOU PASS!\")\r\n\telse:\r\n\t\tprint(\"\\nI'm sorry, you don't pass, but please try again!\")\r\n\r\n\tif len(wrongs) > 0:\r\n\t\tprint(\"\\nSome questions for review:\", end=\" \")\r\n\t\tfor i in wrongs:\r\n\t\t\tif i == wrongs[-1]:\r\n\t\t\t\tprint(i)\r\n\t\t\telse:\r\n\t\t\t\tprint(i, end=\", \")", "def get_outcome(self):\n return self.__outcome", "def result(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"result\")", "def evaluate(self) :\n pass", "def reportResult(self):\n return True;", "def __str__(self):\n return self.result", "def result(self):\n return (\"Recall@\" + str(self.length) + \": \"), (self.hit / self.test)", "def scan_result(self):\n assert 'masscan' in self._scan_result, 'Do a scan before trying to get result !'\n\n return self._scan_result", "def generateFinalResult(self):\n if self.__testResult == 'FAIL':\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n elif self.__testResult == 'PASS':\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n elif self.__testResult == 'NONE':\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY) \n self.__testResult = 'PASS'\n #else:\n total_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"total_count\", TestScriptSymbolTable.test_result_tab))\n pass_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"pass_count\", TestScriptSymbolTable.test_result_tab))\n fail_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"fail_count\", TestScriptSymbolTable.test_result_tab))\n conditional_chk_flag = int(TestScriptSymbolTable.get_value_from_sym_tab(\"conditional_chk_flag\", TestScriptSymbolTable.test_result_tab))\n num_of_pass_required = int(TestScriptSymbolTable.get_value_from_sym_tab(\"num_of_pass_required\", TestScriptSymbolTable.test_result_tab))\n \n if total_count >= 1:\n if conditional_chk_flag == 1:\n if num_of_pass_required <= pass_count:\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'PASS'\n else:\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'FAIL'\n else:\n if fail_count > 0:\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'FAIL'\n else:\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'PASS'\n else:\n if GlobalConfigFiles.curr_tc_name != \"\":\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n logging.debug(\"\\n TEST COMPLETED without FINAL RESULT...\")\n\n self.__testResult = 'FAIL'\n\n self.tmsPacket.TestResult = self.__testResult\n if GlobalConfigFiles.curr_tc_name != \"\":\n logging.info(\"\\n FINAL TEST RESULT ---> %15s\", self.__testResult)\n logging.info(' END: TEST CASE [%s]', GlobalConfigFiles.curr_tc_name)\n\n Util.set_color(Util.FOREGROUND_WHITE)\n GlobalConfigFiles.test_result = self.__testResult\n\n self.tmsPacket.TimeStamp = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime())\n if GlobalConfigFiles.curr_tc_name != \"\":\n self.tmsPacket.writeTMSJson()\n\n return", "def _to_string(self):\n self.results.print_results()\n self.results.print_comparison()", "def get_result(self) -> Any:\n ...", "def evaluate(self):\n pass", "def evaluate(self):\n pass", "def _get_result(self):\n try:\n # get test data\n test_id = self._feature_processor.test_data_id\n test_feature = self._feature_processor.test_data_feature\n test_target = self._feature_processor.test_data_target\n\n # process data\n test_feature = test_feature.astype(\"float64\", errors='ignore')\n\n # predict\n predict_res = self._model.predict(test_feature)\n predict_res_df = pd.DataFrame(predict_res, columns=[PredictConstance.PRE])\n proba_res = self._model.predict_proba(test_feature)\n proba_res_df = pd.DataFrame([str(x) for x in proba_res],\n columns=[PredictConstance.PROBA])\n\n res = [test_id, predict_res_df, proba_res_df]\n # get model score\n if test_target is not None:\n res.append(test_target)\n model_auc = pre_utils.PredictUtils.get_roc_score(test_target, proba_res)\n model_score = pre_utils.PredictUtils.get_model_score(test_target, predict_res)\n model_score.update(model_auc)\n with open(os.path.join(self._result_path, PredictConstance.TEST_SCORE), \"w\") as ftp:\n ftp.write(str(model_score))\n\n # joint predict result\n self._joint_predict_result(res)\n\n return True\n except Exception as err:\n self.managerlogger.logger.error(\"base ml get result error: %s\" % err)\n self.errorlogger.logger.error(\"base ml get result error:\\n %s\" % traceback.format_exc())\n return False", "def _get_output_for_task_success(self, attempted, succeeded, total, student=None):\r\n # view task entry for task in progress\r\n instructor_task = self._create_progress_entry(student)\r\n task_id = instructor_task.task_id\r\n mock_result = Mock()\r\n mock_result.task_id = task_id\r\n mock_result.state = SUCCESS\r\n mock_result.result = {\r\n 'attempted': attempted,\r\n 'succeeded': succeeded,\r\n 'total': total,\r\n 'action_name': 'rescored',\r\n }\r\n output = self._test_get_status_from_result(task_id, mock_result)\r\n return output", "def getResults():", "def test_call_result_as_dict(self):\r\n exp_assignments = rdp_test1_expected_dict\r\n min_confidence = self.default_app.Params['Confidence']\r\n\r\n # Since there is some variation in the assignments, run\r\n # 10 trials and make sure we get the expected result at least once\r\n num_trials = 10\r\n unverified_seq_ids = set(exp_assignments.keys())\r\n for i in range(num_trials):\r\n obs_assignments = self.default_app(self.tmp_seq_filepath)\r\n for seq_id in list(unverified_seq_ids):\r\n obs_assignment, obs_confidence = obs_assignments[seq_id]\r\n exp_assignment, exp_confidence = exp_assignments[seq_id]\r\n self.assertTrue(obs_confidence >= min_confidence)\r\n if obs_assignment == exp_assignment:\r\n unverified_seq_ids.remove(seq_id)\r\n if not unverified_seq_ids:\r\n break\r\n\r\n messages = []\r\n for seq_id in unverified_seq_ids:\r\n messages.append(\r\n \"Unable to verify %s in %s trials\" % (seq_id, num_trials))\r\n messages.append(\" Expected: %s\" % exp_assignments[seq_id][0])\r\n messages.append(\" Observed: %s\" % obs_assignments[seq_id][0])\r\n messages.append(\" Confidence: %s\" % obs_assignments[seq_id][1])\r\n\r\n # make sure all taxonomic results were correct at least once\r\n self.assertFalse(unverified_seq_ids, msg='\\n'.join(messages))", "def get_result(self):\n\n x = self.rps_data[0][1].upper()\n y = self.rps_data[1][1].upper()\n if x[0] == '|':\n x = x[2:3]\n if y[0] == '|':\n y = y[2:3]\n if x == y:\n self.write_scores(\"Draw\")\n return \"Draw\"\n elif (x == 'R' and y == 'S') or (x == 'S' and y == 'P') or (x == 'P' and y == 'R'):\n self.write_scores(\"First\")\n return \"First\"\n else:\n self.write_scores(\"Second\")\n return \"Second\"", "def _AddResult(self):\n if not self._results:\n result = analyzer_result.AnalyzerResult()\n result.attribute_name = 'test_result'\n result.attribute_value = 'is_vegetable'\n self._results.append(result)", "def result(self):\n if self.__json:\n return self.__json[\"result\"]\n else:\n return {}", "def getTestResults():", "def evaluate(self) -> None:\n eval_results = {'segmentation': self.evaluate_segmentation()}\n if self.task == 'tracking':\n eval_results['tracking'] = self.evaluate_tracking()\n self.save_result(eval_results)", "def result(self):\n assert(self.__complete)\n return self.__result", "def get_results(self):\n error_dict = {'error_code_test': self.error_code_test,\n 'error_text_test': self.error_text_test}\n\n return self.testresults, error_dict, self.checkstats", "def get_eval_result(self):\n return self.content_eval", "def test_pass_result(self):\r\n data = {\r\n \"EdX-ID\": self.receipt_id,\r\n \"Result\": \"PASS\",\r\n \"Reason\": \"\",\r\n \"MessageType\": \"You have been verified.\"\r\n }\r\n json_data = json.dumps(data)\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'), data=json_data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',\r\n HTTP_DATE='testdate'\r\n )\r\n attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)\r\n self.assertEqual(attempt.status, u'approved')\r\n self.assertEquals(response.content, 'OK!')", "def get_result(self):\n config = self.bisect_config\n results_confidence = 0\n if self.culprit:\n results_confidence = self.api.m.math_utils.confidence_score(\n self.lkgr.values, self.fkbr.values)\n\n if self.failed:\n status = 'failed'\n elif self.bisect_over:\n status = 'completed'\n else:\n status = 'started'\n\n aborted_reason = None\n if self.failed_initial_confidence:\n aborted_reason = _FAILED_INITIAL_CONFIDENCE_ABORT_REASON\n elif self.failed_direction:\n aborted_reason = _DIRECTION_OF_IMPROVEMENT_ABORT_REASON\n return {\n 'try_job_id': config.get('try_job_id'),\n 'bug_id': config.get('bug_id'),\n 'status': status,\n 'buildbot_log_url': self._get_build_url(),\n 'bisect_bot': self.get_perf_tester_name(),\n 'command': config['command'],\n 'test_type': config['test_type'],\n 'metric': config['metric'],\n 'change': self.relative_change,\n 'score': results_confidence,\n 'good_revision': self.good_rev.commit_hash,\n 'bad_revision': self.bad_rev.commit_hash,\n 'warnings': self.warnings,\n 'aborted_reason': aborted_reason,\n 'culprit_data': self._culprit_data(),\n 'revision_data': self._revision_data()\n }", "def final_report(self):\n print('Final Count for', self.reason, self.successes, 'of', self.tests, 'tests passed')", "def save_result(self):\n self.print_to_console()", "def evaluate(self):\n predictions = self.model.predict(self.test[0])\n accuracy = accuracy_score(self.test[1], predictions)\n print(\"Accuracy:\", str(accuracy * 100) + \"%\")\n self.plot_results(predictions)", "def result_summary(self):\r\n summary = ['Ran %d commands to test %d scripts. %d of these commands '\r\n 'failed and %d scripts could not be tested due to errors.' %\r\n (self.total_commands, self.total_scripts,\r\n self._num_failures(), self._num_script_errors())]\r\n\r\n if self._num_failures() > 0:\r\n summary.append('Failed scripts were: %s' %\r\n ' '.join(self._failed_scripts()))\r\n\r\n for error_info in self.script_errors.values():\r\n if len(error_info[0]) > 0:\r\n summary.append(self._format_script_error_summary(\r\n error_info[0], error_info[1]))\r\n\r\n if self.warnings:\r\n summary.append('Warnings:')\r\n for warning in self.warnings:\r\n summary.append(' ' + warning)\r\n\r\n return '\\n'.join(summary)", "def result(self):\n return (\"HitRate@\" + str(self.length) + \": \"), (self.hit / self.test)", "def log_readable_accessibility_result(self, type):\n # logger.info(self.axe_instance.report(self.results[type]))\n type_results = self.axe_instance.report(self.results[type])\n results = type_results.split(\"Rule Violated:\")\n\n for result in results:\n if \"Impact Level\" in result:\n final_result = result.strip()\n chunks = final_result.split(\"\\n\")\n\n html_text = \"\"\"\n <style>\n #demo table, #demo th, #demo td{\n border: 1px dotted black;\n border-collapse: collapse;\n table-layout: auto;\n }\n </style>\n <table id=\"demo\" style=\"width:100%%\">\n <tr>\n <th style=\"width:50%%\">Issue</th>\n <th style=\"width:5%%\">URL</th>\n <th style=\"width:7%%\">Impact</th>\n <th style=\"width:10%%\">Tags</th>\n </tr>\n <tr>\n <td>%s</td>\n <td style=\"text-align:center\"><a href=\"%s\">Link</a></td>\n <td style=\"text-align:center\">%s</td>\n <td style=\"text-align:center\">%s</td>\n </tr>\n </table>\n <table id=\"demodesc\" style=\"width:100%%\">\n <tr>\n <th style=\"text-align:left\">Element Affected</th>\n </tr>\n <tr>\n <td>%s</td>\n </tr>\n </table>\n \"\"\"%(str(chunks[0]), (chunks[1].split(\"URL: \"))[-1], (chunks[2].split(\"Impact Level: \"))[-1],\n (chunks[3].split(\"Tags: \"))[-1], str((final_result.split(\"\\n\\tElements Affected:\\n\\t\"))[-1]))\n logger.info(html_text, html=True)\n\n # for index in range(len(chunk_results)):\n # logger.info(chunk_results[index])", "def result(self, result, limit):\n\n # If score is empty, this a direct query\n score = result[\"score\"]\n score = score if score is not None else 1.0\n\n self.console.print(\n f\"[bright_green]Question (by {result['questionuser']}): {result['question']} [{score:4f}][/bright_green]\",\n highlight=False,\n )\n self.console.print(f\"Id: {result['id']}\", highlight=False)\n self.console.print(f\"Last Activity: {result['date']}\", highlight=False)\n self.console.print(f\"Tags: {result['tags']}\")\n self.console.print(f\"Answer (by {result['answeruser']}):\\n\", highlight=False)\n self.console.print(self.markdown(result[\"answer\"]))\n self.console.print(f\"\\nReference: {result['reference']}\")\n\n # Print results divider\n if limit > 1:\n self.console.rule()", "def get_exam_status(self, course):\n return {'completed': False, 'exist': False, 'progress': 53}", "def getPredictedResult(self):\n output = self.svclassifier.predict([self.inputData])\n return output[0]", "def vulnerability_assessment(self) -> pulumi.Output['outputs.VulnerabilityAssessmentNoteResponse']:\n return pulumi.get(self, \"vulnerability_assessment\")", "def test_print_results(self):\n calculated = super().predict_and_print()\n self.assertEqual(calculated, EXP_PRINT_OUTPUT_BASE.format(.18, .1, 0.186, self.test_model.model.train_time) +\n \"Max tree max_depth: 1\\n\"\n \"Number of n_estimators: 1\\n\"\n \"Impurity method: entropy\\n\")", "def evaluation( self ) :\n\n return( self.__evaluation )", "def evaluate(self):\n try:\n self._evaluate()\n except Exception as e:\n if str(e) == \"assignment destination is read-only\":\n log.exception(\n \"Encountered error during scenario evaluation. Be sure \"\n + \"that the classifier's predict() isn't directly modifying the \"\n + \"input variable itself, as this can cause unexpected behavior in ART.\"\n )\n else:\n log.exception(\"Encountered error during scenario evaluation.\")\n sys.exit(1)\n\n if self.results is None:\n log.warning(f\"{self._evaluate} did not set self.results to a dict\")\n\n self.save()", "def score(self) -> Tuple[bool, str, float]:\n\n num_miss = np.sum(self.algorithm_data[:,FieldRolls.StepResult] != self.algorithm_data[:,FieldRolls.ResultPresentation])\n num_miss_perc = num_miss * 100/self.algorithm_data.shape[0]\n return True, \"\", num_miss_perc", "def print_output(self):\n print(\"Reference score: \" + str(self.PotTax_reference.sum().TFI))\n print(\"Intervention score: \" + str(self.PotTax_intervention.sum().TFI))\n return", "def evaluation(self):\n return self._evaluation", "def print_outcome(self) -> None:\n pass", "def results():\n \n to_predict_list = request.form.to_dict() \n to_predict_list = list(to_predict_list.values()) \n to_predict_list = list(map(float, to_predict_list)) \n result = ValuePredictor(to_predict_list) \n if int(result)== 1: \n prediction ='Run Martha, or you\\'re gonna get the sugar.'\n else: \n prediction ='Go ahead and have another donut Martha, you\\'re all good.' \n return render_template(\"results.html\",\n year=datetime.now().year,\n prediction = prediction\n )", "def show_results(self):\n print(\"Survey results:\")\n for response in self.responses:\n print('- ' + response)", "def test_self_assessment(self):\r\n\r\n # Navigate to the self-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('Self-Assessed')\r\n self.submit_essay('self', 'Censorship in the Libraries')\r\n\r\n # Fill in the rubric and expect that we get feedback\r\n rubric = self.open_response.rubric\r\n\r\n self.assertEqual(rubric.categories, [\"Writing Applications\", \"Language Conventions\"])\r\n rubric.set_scores([0, 1])\r\n rubric.submit('self')\r\n\r\n self.assertEqual(rubric.feedback, ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # The first score is self-assessment, which we've answered, so it's 1/2\r\n # The other scores are AI- and peer-assessment, which we haven't answered so those are 0/2\r\n self.assertEqual(scores, [(1, 2), (0, 2), (0, 2)])", "def _send_lti2_outcome(self):\r\n payload = textwrap.dedent(\"\"\"\r\n {{\r\n \"@context\" : \"http://purl.imsglobal.org/ctx/lis/v2/Result\",\r\n \"@type\" : \"Result\",\r\n \"resultScore\" : {score},\r\n \"comment\" : \"This is awesome.\"\r\n }}\r\n \"\"\")\r\n data = payload.format(score=0.8)\r\n return self._send_lti2(data)", "def result(self) -> Item:\n return self._result", "def print_results(self) -> None:\n print(\"=\" * 70, file=sys.stderr)\n total = 0.0\n max_points = 0.0\n for problem in self.problems:\n total += problem.run_tests()\n max_points += problem.max_grade\n print(f\"Total Grade: {total}/{max_points}\", file=sys.stderr)", "def present_solved_equation(self, result):\n print(\"the result to the equation is:\", result)", "def display_results(self):\n print \"Resultats pour le fichier : \\n================================\"\n print \"Moyenne arithmetique : \", self.results['arithAvg']\n print \"Moyenne quadratique : \", self.results['quadAvg']\n print \"Moyenne geometrique : \", self.results['geoAvg']\n print \"Moyenne harmonique : \", self.results['harmAvg']\n print \"Ecart a la moyenne : \", self.results['std']\n print \"Valeure maximale : \", self.results['max']\n print \"Valeurs minimale : \", self.results['min']\n print \"Variance : \", self.results['var']\n print \"Moments d'ordre R (jusqu'a 4) : \", self.results['momentsR']\n print \"Moments centrés d'ordre R (jusqu'a 4) : \", self.results['centralMomentsR']\n print \"Dissymetrie : \", self.results['dissym']\n print \"Coefficient d'applatissement : \", self.results['flattening']\n print \"Ecart type : \", self.results['ecartType']", "def __repr__(self):\n return (f'rsatoolbox.inference.Result\\n'\n f'containing evaluations for {self.n_model} models\\n'\n f'evaluated using {self.cv_method} of {self.method}'\n )", "def get_ideal_result(self):\n sim = Aer.get_backend('qasm_simulator')\n self.stats.ideal_distribution = execute(self.compiled_circ, sim).result().get_counts()", "def get_result(wd):\n try:\n result = wd.find_element_by_id(\"js-score\").text\n return result\n except:\n return \"N/A Result\"", "def setCheckResult(self, rlt):\n total_count = TestScriptSymbolTable.get_value_from_sym_tab(\"total_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"total_count\", total_count, TestScriptSymbolTable.test_result_tab)\n \n #if rlt == 'PASS':\n if 'PASS' in rlt:\n pass_count = TestScriptSymbolTable.get_value_from_sym_tab(\"pass_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"pass_count\", pass_count, TestScriptSymbolTable.test_result_tab)\n else:\n fail_count = TestScriptSymbolTable.get_value_from_sym_tab(\"fail_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"fail_count\", fail_count, TestScriptSymbolTable.test_result_tab)", "def _process_results(self):\n self.portfolio.create_backtest_result_dataframe()\n stats = self._show_stats()\n return stats", "def test_get_results_simple(self):\n\t\ttest = sentiment.LibraryRun(self.text3, self.lib)\n\t\ttest.do_run()\n\t\tobj_ut = test.get_results()\n\t\tself.assertEqual(obj_ut, ['.text id\\t.text score\\tneg hits\\t\\\npos hits\\ttotal hits\\ttotal wordcount\\n', '100\\t-1\\t2\\t0\\t2\\t7\\n'])", "def get_test_results(self):\n element = self.find_element_by_id(self.results_id, wait=True)\n\n if element:\n return element.text\n else:\n return False", "def _get_problem_report_results_str(self):\n return 'curr_rew: %0.3f, best_rew: %0.3f'%(self.curr_reward, self.curr_best_reward)", "def val(self):\n return self.output", "def val(self):\n return self.output", "def returnData(self):\r\n return self.returnRes" ]
[ "0.7455872", "0.7048662", "0.687895", "0.68733793", "0.6868782", "0.6851356", "0.68279386", "0.6774118", "0.67248684", "0.67248684", "0.67248684", "0.65798163", "0.65798163", "0.6516882", "0.6478505", "0.6459931", "0.645169", "0.64418316", "0.64348304", "0.6432639", "0.639982", "0.6385924", "0.6374701", "0.6368318", "0.63612336", "0.63388896", "0.6336285", "0.6308378", "0.6273809", "0.619285", "0.61546695", "0.61486435", "0.6140451", "0.6140451", "0.61257154", "0.6117071", "0.6113301", "0.6112252", "0.61075246", "0.6106663", "0.61027324", "0.6100377", "0.60927457", "0.60904044", "0.6089315", "0.6081166", "0.6078048", "0.60617936", "0.6061454", "0.6054585", "0.6054585", "0.60481507", "0.60448974", "0.6044641", "0.60236734", "0.6020184", "0.6013553", "0.60089904", "0.59969735", "0.5990609", "0.5970148", "0.5963881", "0.5962178", "0.59620976", "0.5957298", "0.59538335", "0.59504116", "0.5945629", "0.5922979", "0.59061986", "0.59046936", "0.58985126", "0.5895648", "0.58692396", "0.5868474", "0.58612394", "0.5856009", "0.5855832", "0.5853069", "0.5851778", "0.5851156", "0.5845176", "0.5840127", "0.5829681", "0.5828683", "0.58258647", "0.58162", "0.5812824", "0.5812594", "0.5805723", "0.5802141", "0.57928103", "0.57849693", "0.5783451", "0.57800597", "0.5772011", "0.57669103", "0.57668173", "0.57654524", "0.57654524", "0.57650846" ]
0.0
-1
Additional data regarding the assessment
def additional_data(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]: return pulumi.get(self, "additional_data")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def student_view_data(self, context=None):\n return {\n 'title': self.title,\n 'description': self.description,\n 'embed_code': self.embed_code,\n 'highres_url': self.highres_url,\n 'lowres_url': self.lowres_url,\n }", "def get_assessment(self):\n if not self.has_assessment:\n raise IllegalState()\n else:\n raise Unimplemented()", "def get_assessment_metadata(self):\n return Metadata(**settings.METADATA['assessment_id'])", "def details(self):\n pass", "def additional_data(self):\n return self._additional_data", "def get_sample_award_badge_data(self):\n return {\n \"recipient\": {\n \"identity\": \"[email protected]\"\n },\n \"notify\": True,\n \"evidence\": [{\n \"url\": \"http://example.com/\",\n \"narrative\": \"Joe completed all...\"\n }]\n }", "def generate_extra_data(self):\n self.data[\"male_initial\"], self.data[\"female_initial\"] = \\\n self.get_initial_student_count()\n \n date_line = '<p class=\"report-title\"> %s</p>' \\\n %(self.start_date.strftime(\"%B %Y\"))\n row1 = \"\"\"\n <table>\n <tr class=\"tblRow\"><td>%s</td><td>Enrollment For Year</td>\n <td>Male:</td><td>%d</td><td>Female:</td><td>%d</td>\n <td>Total:</td><td>%d</td></tr>\n \"\"\" %(unicode(self.school), self.data[\"male_initial\"], \n self.data[\"female_initial\"], \n self.data[\"male_initial\"] + self.data[\"female_initial\"])\n row2 = \"\"\"\n <tr class=\"tblOddRow\"><td>%s</td><td>Enrollment For Month</td>\n <td>Male:</td><td>%d</td><td>Female:</td><td>%d</td>\n <td>Total:</td><td>%d</td></tr>\n \"\"\" %(unicode(self.section), self.data[\"male_current\"], \n self.data[\"female_current\"],\n self.data[\"male_current\"] + self.data[\"female_current\"])\n row3 = \"\"\"\n <tr class=\"tblRow\"><td>%s</td><td>Average Attendance</td>\n <td>Male:</td><td>%.1f</td><td>Female:</td><td>%.1f</td>\n <td>Total:</td><td>%.1f</td></tr>\n \"\"\" %(\"Secondary\", self.data[\"aa_male\"], self.data[\"aa_female\"] ,\n self.data[\"aa_combined\"])\n row4 =\"\"\"\n <tr class=\"tblOddRow\"><td>%s</td><td>Percentage of Attendance</td>\n <td>Male:</td><td>%.1f %% </td><td>Female:</td><td>%.1f %% </td>\n <td>Total:</td><td>%.1f %% </td></tr>\n \"\"\" %(unicode(self.school.municipality), self.data[\"pa_male\"], \n self.data[\"pa_female\"], self.data[\"pa_combined\"])\n row5 = \"\"\"\n <tr class=\"tblRow\"><td>School Days: %d</td><td>Percentage of Enrollment</td>\n <td>Male:</td><td>%.1f %% </td><td>Female:</td><td>%.1f %% </td>\n <td>Total:</td><td>%.1f %% </td></tr>\n </table>\n \"\"\" %(self.data[\"num_school_days\"], \n self.data[\"male_current\"] * 100.0 / self.data[\"male_initial\"],\n self.data[\"female_current\"] * 100.0 / \n self.data[\"female_initial\"],\n (self.data[\"male_current\"] + self.data[\"female_current\"]) * \n 100.0 /\n (self.data[\"male_initial\"] + self.data[\"female_initial\"]))\n self.extra_data = date_line + row1 + row2 + row3 + row4 + row5", "def getInfo(self):\n self.name, self.description = achievements[self.id]", "def getDetail(self):\n\t\t\n\t\treturn (super().setParameters(0,self.getDefense(),0))\n\t\t\n\t\t#return \"\\n#########################################################\\n\"+\"\\nItem of Defense, Name of item:\"+self.getName()+\"\\nCapacity of defense:\"+str(self.getDefense())+\"\\nCapacity of attack:0 \\n Capacity of heal:0 \\n\"+\"#########################################################\\n\"", "def info():\n # -------- Task 1 -------------------------\n # Please complete the following information\n\n return {\"agent name\": \"?\", # COMPLETE HERE\n \"student name\": [\"?\"], # COMPLETE HERE\n \"student number\": [\"?\"]} # COMPLETE HERE", "def course_info(self):\n print(\"Course name: {}\".format(self._course_name))\n print(\"Lead teacher: {}\".format(self._teacher))\n\n if len(self._students) == 0:\n print(\"Course does not enrolled by any student\")\n else:\n print(\"Enrolled: {}/{}\".format(len(self._students), self._total_place))", "def get_infos(self):\n infos = dict()\n infos[\"dataset\"] = self.dataset_name\n infos[\"task\"] = \"separate_noisy\"\n infos[\"licenses\"] = [librispeech_license, tac_license]\n return infos", "def report_data(self):\n return {}", "def AddAncillaryData(self, ds):\n self.IsAncillaryData = True\n self.AncillaryData = ds", "def details(self):\n raise NotImplementedError()", "def student_state(self):\n submission = self.get_submission()\n if submission:\n uploaded_submission = submission.get(\"answer\").get(\"filename\", None)\n if uploaded_submission:\n uploaded = {\"filename\": submission['answer']['filename']}\n else:\n uploaded = None\n else:\n uploaded = None\n\n submission = self.get_question()\n if submission:\n uploaded_submission = submission.get(\"question\").get(\"filename\", None)\n if uploaded_submission:\n quploaded = {\"filename\": submission['question']['filename']}\n else:\n quploaded = None\n else:\n quploaded = None\n\n submission = self.get_solution()\n if submission:\n uploaded_submission = submission.get(\"solution\").get(\"filename\", None)\n if uploaded_submission:\n suploaded = {\"filename\": submission['solution']['filename']}\n else:\n suploaded = None\n else:\n suploaded = None\n \n \n \n return {\n \"display_name\": self.title,\n \"question\":self.question,\n \"uploaded\": uploaded,\n \"quploaded\":quploaded,\n \"suploaded\":suploaded,\n \"raw_answer\":self.raw_answer,\n \"raw_question\":self.raw_question,\n \"score\": self.score,\n \"weight\":self.weight,\n \"attempts\": self.attempts,\n \"max_attempts\": self.max_attempts,\n }", "def info(self):\n self.update_info()\n print('Number of electrodes: ' + str(self.n_elecs))\n print('Recording time in seconds: ' + str(self.dur))\n print('Sample Rate in Hz: '+ str(self.sample_rate))\n print('Number of sessions: ' + str(self.n_sessions))\n print('Date created: ' + str(self.date_created))\n print('Meta data: ' + str(self.meta))", "def get_details(self):", "def _get_information(self):\n pass", "def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta", "def get_assessment_part_mdata():\n return {\n 'assessment_part': {\n 'element_label': {\n 'text': 'assessment part',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'accepts an osid.id.Id object',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_id_values': [''],\n 'syntax': 'ID',\n 'id_set': [],\n },\n 'assessment': {\n 'element_label': {\n 'text': 'assessment',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'accepts an osid.id.Id object',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_id_values': [''],\n 'syntax': 'ID',\n 'id_set': [],\n },\n 'weight': {\n 'element_label': {\n 'text': 'weight',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'enter a cardinal value',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_cardinal_values': [None],\n 'syntax': 'CARDINAL',\n 'minimum_cardinal': None,\n 'maximum_cardinal': None,\n 'cardinal_set': []\n },\n 'allocated_time': {\n 'element_label': {\n 'text': 'allocated time',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'enter a valid duration object.',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_duration_values': [None],\n 'syntax': 'DURATION',\n 'date_time_set': [],\n },\n }", "def vulnerability_assessment(self) -> pulumi.Output['outputs.VulnerabilityAssessmentNoteResponse']:\n return pulumi.get(self, \"vulnerability_assessment\")", "def additional_data(self):\n # type: () -> string_types\n return self._additional_data", "def get_assessments_metadata(self):\n return Metadata(**settings.METADATA['assessment_ids'])", "def ExtraInfo(self) -> object:", "def post_add_assessment(self):\n course = courses.Course(self)\n assessment = course.add_assessment()\n course.save()\n self.redirect(self.get_action_url(\n 'edit_assessment', key=assessment.unit_id,\n extra_args={'is_newly_created': 1}))", "def test_instructor_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n # We have configured the stub to simulate that this essay will be staff-graded\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def vulnerability_assessment(self) -> Optional[pulumi.Input['VulnerabilityAssessmentNoteArgs']]:\n return pulumi.get(self, \"vulnerability_assessment\")", "def get_details(self):\n raise Exception(\"bad details\")", "def summary_data(self):\n data = {\n \"total\": self.total,\n \"card_one_value\": self.cards[0].value,\n \"card_two_value\": self.cards[1].value,\n \"card_one_rank\": self.cards[0].rank,\n \"card_two_rank\": self.cards[1].rank,\n \"cards\": \" \".join([str(card) for card in self.cards]),\n \"soft\": int(self.soft),\n \"from_split\": int(self.from_split),\n \"blackjack\": int(self.blackjack),\n \"num_cards\": len(self.cards),\n \"start_total\": self.cards[0] + self.cards[1],\n \"wager\": int(self.wager),\n \"insurance\": int(self.insurance),\n \"surrender\": int(self.surrender),\n \"double_down\": int(self.double_down),\n \"num_aces\": self.num_aces,\n \"num_hard_aces\": self.num_hard_aces\n }\n return data", "def __init__(__self__, *,\n assessment_type: str,\n display_name: str,\n policy_definition_id: str,\n severity: str,\n categories: Optional[Sequence[str]] = None,\n description: Optional[str] = None,\n implementation_effort: Optional[str] = None,\n partner_data: Optional['outputs.SecurityAssessmentMetadataPartnerDataResponse'] = None,\n preview: Optional[bool] = None,\n remediation_description: Optional[str] = None,\n threats: Optional[Sequence[str]] = None,\n user_impact: Optional[str] = None):\n pulumi.set(__self__, \"assessment_type\", assessment_type)\n pulumi.set(__self__, \"display_name\", display_name)\n pulumi.set(__self__, \"policy_definition_id\", policy_definition_id)\n pulumi.set(__self__, \"severity\", severity)\n if categories is not None:\n pulumi.set(__self__, \"categories\", categories)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if implementation_effort is not None:\n pulumi.set(__self__, \"implementation_effort\", implementation_effort)\n if partner_data is not None:\n pulumi.set(__self__, \"partner_data\", partner_data)\n if preview is not None:\n pulumi.set(__self__, \"preview\", preview)\n if remediation_description is not None:\n pulumi.set(__self__, \"remediation_description\", remediation_description)\n if threats is not None:\n pulumi.set(__self__, \"threats\", threats)\n if user_impact is not None:\n pulumi.set(__self__, \"user_impact\", user_impact)", "def additional_data(self) -> pulumi.Output[Optional[Mapping[str, str]]]:\n return pulumi.get(self, \"additional_data\")", "def additional_data(self) -> pulumi.Output[Optional[Mapping[str, str]]]:\n return pulumi.get(self, \"additional_data\")", "def _set_meta_info(self):\n self._report_data['environment'] = f'{self._get_environment()}'.lstrip()\n self._report_data['meta_account_id'] = self._account_id\n if self._account_name:\n self._report_data['meta_account_name'] = self._account_name\n\n # Get source ???\n # Appears in the Description section of the PDF Document Properties as Title.\n self._report_data['meta_title'] = ReportMeta.reports[self._report_key]['metaTitle'].upper()\n self._report_data['meta_subtitle'] = ReportMeta.reports[self._report_key]['metaSubtitle']\n\n # Appears in the Description section of the PDF Document Properties as Subject.\n if self._report_key in (ReportTypes.SEARCH_DETAIL_REPORT,\n ReportTypes.SEARCH_TOC_REPORT,\n ReportTypes.SEARCH_BODY_REPORT):\n search_type: str = self._report_data['searchQuery']['type']\n search_desc: str = TO_SEARCH_DESCRIPTION[search_type]\n criteria: str = ''\n if search_type == 'OWNER_NAME':\n criteria = self._report_data['searchQuery']['criteria']['ownerName']['last'] + ', '\n criteria += self._report_data['searchQuery']['criteria']['ownerName']['first']\n if 'middle' in self._report_data['searchQuery']['criteria']['ownerName']:\n criteria += ' ' + self._report_data['searchQuery']['criteria']['ownerName']['middle']\n else:\n criteria = self._report_data['searchQuery']['criteria']['value'].upper()\n self._report_data['meta_subject'] = f'{search_desc} - \"{criteria}\"'\n if search_type == 'MHR_NUMBER':\n self._report_data['footer_content'] = f'MHR Number Search - \"{criteria}\"'\n else:\n self._report_data['footer_content'] = f'MHR {search_desc} Search - \"{criteria}\"'\n elif self._report_key in (ReportTypes.MHR_REGISTRATION, ReportTypes.MHR_COVER,\n ReportTypes.MHR_TRANSFER, ReportTypes.MHR_EXEMPTION, ReportTypes.MHR_NOTE,\n ReportTypes.MHR_TRANSPORT_PERMIT, ReportTypes.MHR_REGISTRATION_COVER):\n reg_num = self._report_data.get('mhrNumber', '')\n self._report_data['footer_content'] = f'Manufactured Home Registration #{reg_num}'\n self._report_data['meta_subject'] = f'Manufactured Home Registration Number: {reg_num}'\n if self._get_environment() != '':\n self._report_data['footer_content'] = 'TEST DATA | ' + self._report_data['footer_content']", "def test_client_risk_assessment_retrieve(self):\n pass", "def get_study_info(self,std_id):\n raise NotImplementedError", "def data(self):\n pass", "def data(self):\n pass", "def meta_data(self) -> Dict:\n pass", "def analysis(self, game_info):\n pass", "def input_payment_details(self):\n pass", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def get_raw_information(self):\n try:\n info = self.student_attendance_record.get_period_info(\n self.start_date, self.day_periods)\n return (self.student_name, self.student_gender, info)\n except AttributeError:\n raise AttributeError, \\\n \"Failed to get student attendance record for: %s\" \\\n %unicode(self.student)", "def get_additional(cls, obj, **kwargs):\n if \"classifier_results\" in obj.extra_data:\n keywords = obj.extra_data.get('classifier_results').get(\"complete_output\")\n else:\n keywords = []\n prediction_results = obj.extra_data.get(\"arxiv_guessing\", {})\n if prediction_results:\n prediction_results = prediction_results[0].get(\"result\")\n return render_template(\n 'inspire_workflows/styles/harvesting_record_additional.html',\n object=obj,\n keywords=keywords,\n score=prediction_results.get(\"max_score\"),\n decision=prediction_results.get(\"decision\")\n )", "def data_for_question(self, question_type):\n\t\treturn {}", "def _assessments_editor_context(self, assessment_dates):\n assessments = {}\n for asmnt, date_range in zip(self.rubric_assessments, assessment_dates):\n # Django Templates cannot handle dict keys with dashes, so we'll convert\n # the dashes to underscores.\n template_name = make_django_template_key(asmnt['name'])\n assessments[template_name] = copy.deepcopy(asmnt)\n assessments[template_name]['start'] = date_range[0]\n assessments[template_name]['due'] = date_range[1]\n\n # In addition to the data in the student training assessment, we need to include two additional\n # pieces of information: a blank context to render the empty template with, and the criteria\n # for each example (so we don't have any complicated logic within the template). Though this\n # could be accomplished within the template, we are opting to remove logic from the template.\n student_training_module = self.get_assessment_module('student-training')\n\n student_training_template = {\n 'answer': {\n 'parts': [\n {'text': ''} for _ in self.prompts\n ]\n }\n }\n criteria_list = copy.deepcopy(self.rubric_criteria_with_labels)\n for criterion in criteria_list:\n criterion['option_selected'] = \"\"\n student_training_template['criteria'] = criteria_list\n\n if student_training_module:\n student_training_module = update_assessments_format([student_training_module])[0]\n example_list = []\n # Adds each example to a modified version of the student training module dictionary.\n for example in student_training_module['examples']:\n criteria_list = copy.deepcopy(self.rubric_criteria_with_labels)\n # Equivalent to a Join Query, this adds the selected option to the Criterion's dictionary, so that\n # it can be easily referenced in the template without searching through the selected options.\n for criterion in criteria_list:\n for option_selected in example['options_selected']:\n if option_selected['criterion'] == criterion['name']:\n criterion['option_selected'] = option_selected['option']\n example_list.append({\n 'answer': example['answer'],\n 'criteria': criteria_list,\n })\n assessments['training'] = {'examples': example_list, 'template': student_training_template}\n # If we don't have student training enabled, we still need to render a single (empty, or default) example\n else:\n assessments['training'] = {'examples': [student_training_template], 'template': student_training_template}\n\n return assessments", "def test_ai_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def save_test_evidence(self):\n payload = {\n \"test_id\": self.test_id,\n \"test_case_name\": self.test_case_name,\n \"epoch_timestamp\": self.epoch_timestamp,\n \"human_timestamp\": self.human_timestamp,\n \"verification_name\": self.verification_name,\n \"status\": self.status,\n \"value\": self.value,\n \"critical_value\": self.critical_value\n }\n return self.insert_regression_test_evidence(self.test_case_name, payload)", "def get_info(self):\n self.exists = self.check_subscr()\n return self.attrs", "def data(self):", "def has_assessment(self):\n return 'assessmentId' in self._my_map and bool(self._my_map['assessmentId'])", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def _set_additional_fields(self, data):\n # Remove the non required rules data.\n if 'rules' in data:\n del data['rules']\n # Change description into proper string.\n data['description'] = re.sub(\"[\\'\\\"]\", \"\", data['description'])\n # Calculate and update the premium field.\n premium = str(data.get('premium', \"false\")).lower() == 'true'\n data['pvtVuln'] = premium\n return data", "def collect_data(self,sensation,action,reward,next_sensation):\n pass", "def get_main_information(self) -> Dict:\n if self.lock:\n if self._information is None:\n # Setup self._information for the first time when study is lock\n self._information = self.client.get_instances_id(self.id_)\n\n return self._information\n\n return self.client.get_instances_id(self.id_)", "def add_details(self):\n\n if self.co.algorithm == \"vv\":\n algo = \"Verlocity Verlot\"\n if self.co.algorithm == \"rk4o\":\n algo = \"Runge Kutta Forth Order\"\n if self.co.algorithm == \"herm\":\n algo = \"Hermite Fourth Order\"\n\n self.algorithm_title = self.ax.text(\n 1.01, 0.65, \"Algorithm:\", transform=self.ax.transAxes\n )\n self.algorithm_text = self.ax.text(\n 1.01, 0.58, algo, transform=self.ax.transAxes\n )\n self.timestep_text = self.ax.text(\n 1.01, 0.51, \"dt =\" + str(self.co.tstep), transform=self.ax.transAxes\n )\n self.length_softening_distance = self.ax.text(\n 1.01,\n 0.44,\n r\"$\\epsilon$ = \" + str(self.co.epsilon),\n transform=self.ax.transAxes,\n )", "def metadata(self) -> Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']]:\n return pulumi.get(self, \"metadata\")", "def info(self):", "def info(self):", "def training_info(self):\n pass", "def test_superuser_create_assessment(self):\n req, resp = data.get_assessment(self.contract['id'])\n\n response = self.superuser.post(self.assessment_list_url, req)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_superuser_create_assessment(self):\n req, resp = data.get_assessment(self.contract['id'])\n\n response = self.superuser.post(self.assessment_list_url, req)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def data(self):\n return (self._full_name, self.total_donation, self.num_of_donations, self.avg_donation)", "def healthcare():", "def get_main_information(self) -> Dict:\n if self.information is None:\n self.information = self.orthanc.get_instance_information(\n self.identifier\n )\n\n return self.information", "def what_is_the_grade(self):\n\t\treturn_dict = {\n\t\t\t'section_title': self.title, \n\t\t\t'section_weight': self.weight,\n\t\t\t'grade_value' : self.current_grade_value,\n\t\t\t'comment_text' : self.current_comment_text,\n\t\t\t'default_comments_text' : self.current_default_comment_text,\n\t\t\t'example_comments_text' : self.current_example_comment_text,\n\t\t\t'is_complete': self.is_complete\n\t\t}\n\n\t\treturn return_dict", "def make_extra_questions_txt(self):\n raise NotImplementedError", "def account_summary(self):\n pass", "def export(self):\n metadata = {\n 'user': self.operator,\n 'technique': ' | '.join(self.techniques),\n }\n return self.text, metadata", "def extra(self) -> Dict[str, Any]:\n extra = self.extras.copy()\n if isinstance(self.author, str):\n extra['Author'] = self.author\n if isinstance(self.email, str):\n extra['Email'] = self.email\n if isinstance(self.description, str):\n extra['Description'] = self.description\n return extra", "def prepare_student_data(self) -> dict:\n self._filename_pre_data()\n empty_student = {}\n empty_student[\"scoreTimestamp\"] = \"N/A\"\n for i in self.draft_out:\n empty_student[i] = \"N/A\"\n for i in self.pre_data:\n empty_student[i] = self.pre_data[i]\n self.pre_data = empty_student", "def save_assessment(self, data, _system):\r\n\r\n closed, msg = self.check_if_closed()\r\n if closed:\r\n return msg\r\n\r\n if self.child_state != self.ASSESSING:\r\n return self.out_of_sync_error(data)\r\n\r\n try:\r\n score = int(data.get('assessment'))\r\n score_list = [int(x) for x in data.getall('score_list[]')]\r\n except (ValueError, TypeError):\r\n # This is a dev_facing_error\r\n log.error(\"Non-integer score value passed to save_assessment, or no score list present.\")\r\n # This is a student_facing_error\r\n _ = self.system.service(self, \"i18n\").ugettext\r\n return {\r\n 'success': False,\r\n 'error': _(\"Error saving your score. Please notify course staff.\")\r\n }\r\n\r\n # Record score as assessment and rubric scores as post assessment\r\n self.record_latest_score(score)\r\n self.record_latest_post_assessment(json.dumps(score_list))\r\n\r\n d = {'success': True, }\r\n\r\n self.change_state(self.DONE)\r\n d['allow_reset'] = self._allow_reset()\r\n\r\n d['state'] = self.child_state\r\n return d", "def show_data(self, ):\r\n return print('society_name : {}\\n'\r\n 'flat : {}\\n'\r\n 'house_no : {}\\n'\r\n 'no_of_members : {}\\n'\r\n 'income : {}\\n '\r\n .format(self.society_name, self.flat, self.house_no, self.no_of_members, self.income))", "def get_assessments(self):\n if not self.is_assessment_based_activity():\n raise IllegalState()\n else:\n raise Unimplemented()", "def _load_assessment_results_page(self):\r\n\r\n fmt = '{0:0.' + str(Configuration.PLACES) + 'g}'\r\n\r\n self.txtAvailability.set_text(\r\n str(fmt.format(self._function_model.availability)))\r\n self.txtMissionAt.set_text(\r\n str(fmt.format(self._function_model.mission_availability)))\r\n self.txtMissionHt.set_text(\r\n str(fmt.format(self._function_model.mission_hazard_rate)))\r\n self.txtPredictedHt.set_text(\r\n str(fmt.format(self._function_model.hazard_rate)))\r\n\r\n self.txtMMT.set_text(str(fmt.format(self._function_model.mmt)))\r\n self.txtMCMT.set_text(str(fmt.format(self._function_model.mcmt)))\r\n self.txtMPMT.set_text(str(fmt.format(self._function_model.mpmt)))\r\n\r\n self.txtMissionMTBF.set_text(\r\n str(fmt.format(self._function_model.mission_mtbf)))\r\n self.txtMTBF.set_text(str(fmt.format(self._function_model.mtbf)))\r\n self.txtMTTR.set_text(str(fmt.format(self._function_model.mttr)))\r\n\r\n return False", "def look_for_other_attributes(context):\n json_data = context.response.json()\n assert \"recommended_versions\" in json_data, \"No recommended version found\"\n assert \"registration_link\" in json_data, \"No snyk registration link found\"\n assert \"component_analyses\" in json_data, \"No component analyses data found\"\n assert \"message\" in json_data, \"No message found\"\n assert \"severity\" in json_data, \"No severity found\"\n assert \"known_security_vulnerability_count\" in json_data\n assert \"security_advisory_count\" in json_data", "def __str__(self):\n # First obtain a string describing the underlying data model.\n strg = super(MiriTelescopeEmissionModel, self).__str__()\n \n # Add the extras\n if self.meta.instrument.filter is not None:\n strg += \"Data valid for filter=\\'%s\\' \" % \\\n self.meta.instrument.filter\n else:\n strg += \"Data valid for UNKNOWN filter \"\n if self.meta.telescope_temperature is not None:\n strg += \"and telescope temperature=%.2fK\" % \\\n self.meta.telescope_temperature\n else:\n strg += \"and UNKNOWN telescope temperature\"\n return strg", "def scrape_admission_details(self, main_content):\n group_items = self._scrape_group_items(main_content, 'field field-name-field-admission-details '\n 'field-type-text-long field-label-above')\n details_str = self.scrape_group_items_str(group_items)\n\n details_str = details_str.lstrip()\n details_str = details_str.rstrip()\n\n if len(details_str) > 0:\n details_str = self.csv_quote_escape(details_str)\n\n return details_str", "def __init__(__self__, *,\n assessment_count: Optional[pulumi.Input[int]] = None,\n extended_details: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n group_count: Optional[pulumi.Input[int]] = None):\n if assessment_count is not None:\n pulumi.set(__self__, \"assessment_count\", assessment_count)\n if extended_details is not None:\n pulumi.set(__self__, \"extended_details\", extended_details)\n if group_count is not None:\n pulumi.set(__self__, \"group_count\", group_count)", "def duty_details(self):\n return self._duty_details", "def test_client_risk_assessment_list(self):\n pass", "def field_data(self):\r\n\r\n return DictFieldData({\r\n 'data': '<peergrading/>',\r\n 'location': self.problem_location,\r\n 'use_for_single_location': True,\r\n 'link_to_location': self.coe_location.to_deprecated_string(),\r\n 'graded': True,\r\n })", "def additional_info(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"additional_info\")", "def info(self):\n print self.id, self.type, self.xyz.get_xyz", "def createIndustryInfo(self):\n self.setCurrentValue(0)\n self.setMinMax()\n self.writeIndustryName()\n self.createIndustrySim()\n self.writeIndustryDescription()\n self.writeIndustryCost()", "def fixture_additional_information_example():\n test_example = AdditionalInformation(\n ethical_considerations=ETHICAL_CONSIDERATIONS,\n caveats_and_recommendations=CAVEATS_AND_RECOMMENDATIONS,\n custom_details=CUSTOM_DETAILS,\n )\n return test_example", "def record(self):\n # TODO: record the data", "def get_enrolment_info(self):\n return None", "def agreements():\n pass", "def details(self) -> \"dict\":\n return self._attrs.get(\"details\")", "def assessment_type(self) -> str:\n return pulumi.get(self, \"assessment_type\")", "def get_info(self):\n return \"TODO !\"", "def info(self):\n ss = \"\\nSummary ARF info\\n\"\n ss += \"----------------\\n\"\n # Summarise data members\n ss += array_stats_str(self.energy_lo, 'Energy lo')\n ss += array_stats_str(self.energy_hi, 'Energy hi')\n ss += array_stats_str(self.effective_area.to('m^2'), 'Effective area')\n ss += 'Safe energy threshold lo: {0:6.3f}\\n'.format(self.energy_thresh_lo)\n ss += 'Safe energy threshold hi: {0:6.3f}\\n'.format(self.energy_thresh_hi)\n\n return ss", "def additional_data(self, additional_data):\n\n self._additional_data = additional_data", "def info(self) -> dict:", "def data(self) -> dict:\n raise NotImplementedError()", "def _section_course_info(course_key, access):\r\n course = get_course_by_id(course_key, depth=None)\r\n\r\n section_data = {\r\n 'section_key': 'course_info',\r\n 'section_display_name': _('Course Info'),\r\n 'access': access,\r\n 'course_id': course_key,\r\n 'course_display_name': course.display_name,\r\n 'enrollment_count': CourseEnrollment.num_enrolled_in(course_key),\r\n 'has_started': course.has_started(),\r\n 'has_ended': course.has_ended(),\r\n 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': course_key.to_deprecated_string()}),\r\n }\r\n\r\n try:\r\n advance = lambda memo, (letter, score): \"{}: {}, \".format(letter, score) + memo\r\n section_data['grade_cutoffs'] = reduce(advance, course.grade_cutoffs.items(), \"\")[:-2]\r\n except Exception:\r\n section_data['grade_cutoffs'] = \"Not Available\"\r\n # section_data['offline_grades'] = offline_grades_available(course_key)\r\n\r\n try:\r\n section_data['course_errors'] = [(escape(a), '') for (a, _unused) in modulestore().get_course_errors(course.id)]\r\n except Exception:\r\n section_data['course_errors'] = [('Error fetching errors', '')]\r\n\r\n return section_data", "def additional_log_details(self) -> Dict[str, Any]:\n additional_details = {}\n if hasattr(self, \"requestor\"):\n additional_details[\"Requestor\"] = self.requestor\n return additional_details" ]
[ "0.63733923", "0.61233914", "0.60425943", "0.5986746", "0.59847146", "0.5970008", "0.596638", "0.59152776", "0.590361", "0.5881575", "0.58584493", "0.5775308", "0.56499285", "0.56463957", "0.5642021", "0.5598647", "0.55952555", "0.5585957", "0.5569665", "0.55653036", "0.55639553", "0.55543524", "0.5536431", "0.5517683", "0.55130774", "0.5507232", "0.54993033", "0.54916847", "0.54893136", "0.54888856", "0.54759717", "0.5422631", "0.5422631", "0.54145104", "0.54067224", "0.5405085", "0.5389159", "0.5389159", "0.5370805", "0.53435177", "0.5341477", "0.5341398", "0.53386515", "0.53239447", "0.5311002", "0.530835", "0.5276652", "0.52746725", "0.5270163", "0.5269666", "0.5268228", "0.5266818", "0.5266818", "0.5266818", "0.52635217", "0.5256684", "0.52479905", "0.5242154", "0.5241826", "0.52315754", "0.5228113", "0.5228113", "0.5225876", "0.5223823", "0.5223823", "0.5214896", "0.5205461", "0.5203595", "0.5200222", "0.5197203", "0.5196509", "0.5195554", "0.51945066", "0.5190833", "0.51833236", "0.5164492", "0.5162665", "0.5158723", "0.5157848", "0.51545966", "0.5154589", "0.5150459", "0.5147789", "0.5144505", "0.51418686", "0.51375574", "0.5136846", "0.5136676", "0.5136221", "0.5134584", "0.51338243", "0.51294935", "0.5120115", "0.51200217", "0.51198107", "0.51186234", "0.51178473", "0.5115324", "0.51111245", "0.5109562", "0.51066816" ]
0.0
-1
The Assessment Key Unique key for the assessment type
def assessment_name(self) -> Optional[pulumi.Input[str]]: return pulumi.get(self, "assessment_name")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def key(self):\n return str(self._id)", "def get_key_id(self):", "def generate_key(self):\n return str(uuid4())", "def key(self):\n\n return self.qualifiers.get(\"key\", False)", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self) -> str:\n return pulumi.get(self, \"key\")", "def key(self):\n return self._key if self._key else self.factory().key", "def key(self) -> str:\n return self.__key", "def unique_id(self):\n return self._type", "def key_type(self) -> global___Type:", "def key_type(self):\n return self._key_type", "def unique_id(self):\n return f\"{self.device.id}-{self.key}\"", "def key(self) -> str:\n return self._key", "def get_key(self):\n return self._determine_key()", "def key(self):\n return self.account_name()", "def key(self):\n return self.account_name()", "def key(self):\n return self.key_for(self.id)", "def attribute_key(self) -> str:\n return pulumi.get(self, \"attribute_key\")", "def assessment_type(self) -> str:\n return pulumi.get(self, \"assessment_type\")", "def key_id(self):\n return self._key_id", "def key(self) -> str:\n\n return self._key", "def unique_identifier(self) -> str:\n return pulumi.get(self, \"unique_identifier\")", "def key_id(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"key_id\")", "def key_id(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"key_id\")", "def unique_key(self):\n return json.dumps([self.name, self.birthDate])", "def generate_key():\n # generate random key\n key = get_random_string()\n\n # if it's already taken, generate another\n if EmailManager.objects.filter(key=key).exists():\n return EmailManager.generate_key()\n\n # return it\n return key", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")", "def key(self) -> Optional[str]:\n return pulumi.get(self, \"key\")" ]
[ "0.6836664", "0.6800989", "0.67905533", "0.6716157", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.66948956", "0.6597809", "0.65791583", "0.65453047", "0.65368867", "0.6531874", "0.6524194", "0.65102917", "0.6493321", "0.6479187", "0.6479187", "0.64426947", "0.6413757", "0.6413258", "0.64126647", "0.6410853", "0.6385415", "0.63821936", "0.6375673", "0.63705766", "0.63628274", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204", "0.63515204" ]
0.0
-1
Describes properties of an assessment metadata.
def metadata(self) -> Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']]: return pulumi.get(self, "metadata")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metadata(self) -> global___SummaryMetadata:", "def metadata(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentMetadataPropertiesResponse']]:\n return pulumi.get(self, \"metadata\")", "def metadata(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentMetadataPropertiesResponse']]:\n return pulumi.get(self, \"metadata\")", "def get_assessment_metadata(self):\n return Metadata(**settings.METADATA['assessment_id'])", "def describe(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n return response\n property_info = {'child_properties': self.child_properties,\n 'descendant_properties': self.descendant_properties,\n 'parent_properties': self.parent_properties,\n 'domain': self.domain,\n 'range': self.range,\n 'uri': self.uri,\n 'label': self.label,\n 'description': self.description}\n return property_info", "def metadata(self) -> Mapping[str, str]:\n return pulumi.get(self, \"metadata\")", "def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta", "def description(self):\n desc = self.title\n ops = []\n for attribute in self.attributes.all():\n value = attribute.value\n if isinstance(value, list):\n ops.append(\n \"%s = '%s'\" % (attribute.type, (\", \".join([str(v) for v in value])))\n )\n else:\n ops.append(\"%s = '%s'\" % (attribute.type, value))\n if ops:\n desc = \"%s (%s)\" % (desc, \", \".join(ops))\n return desc", "def metadata(self):\n return {\n \"wildtype\" : self.wildtype,\n \"genotypes\" : self.genotypes,\n \"phenotypes\" : self.Raw.phenotypes,\n \"stdeviations\" : self.stdeviations,\n \"n_replicates\" : self.n_replicates,\n \"mutations\" : self.mutations,\n \"log_transform\" : self.log_transform,\n \"order\" : self.order,\n \"epistasis\" : {\n \"keys\" : self.epistasis.keys,\n \"values\" : self.epistasis.values,\n }\n }", "def describe(self):\r\n mdataset_description = {\r\n 'kind': \"HConteiner\",\r\n 'compliance': self._compliance,\r\n 'has_encryption': self.has_encryption,\r\n 'encryption': self._encryption,\r\n 'sensitive': self._sensitive,\r\n 'license': self._license,\r\n }\r\n verbose_event()\r\n return mdataset_description", "def propertyDetails(self):\n return (PROPERTY_DETAILS.get(aa, NONE) for aa in self.sequence)", "def get_assessments_metadata(self):\n return Metadata(**settings.METADATA['assessment_ids'])", "def description(self) -> str:\n return self._search_in_properties(ATTR_DESCRIPTION)", "def generate_property_template(self):\n template = {\n \"@id\": \"url or curie of the property\",\n \"@type\": \"rdf:Property\",\n \"rdfs:comment\": \"description of the property\",\n \"rdfs:label\": \"carmel case, should match @id\",\n \"schema:domainIncludes\": {\n \"@id\": \"class which use it as a property, could be list\"\n },\n \"schema:isPartOf\": {\n \"@id\": \"http://schema.biothings.io\"\n },\n \"schema:rangeIncludes\": {\n \"@id\": \"relates a property to a class that constitutes (one of) the expected type(s) for values of the property\"\n }\n }\n return template", "def metadata(self): # -> None:\n ...", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def __repr__(self):\n return self._metadata.__str__()", "def _short_info(self) -> str:\n nullable = \"Nullable \" if self._is_nullable else \"\"\n\n # Good candidate for python pattern matching once <3.10 support no longer required\n num_metadata_items = len(self.__metadata)\n if num_metadata_items == 0:\n metadata = \"\"\n elif num_metadata_items == 1:\n metadata = f\" [with {num_metadata_items} metadata item]\"\n else:\n metadata = f\" [with {num_metadata_items} metadata items]\"\n\n return f\"<{nullable}{self.__class__.__name__}{metadata}: {self._resolve_field_name()}>\"", "def metadata(self) -> dict:\n meta = {}\n meta['filename'] = self.filename\n meta['label'] = self.label\n meta['url'] = self.url\n\n return meta", "def meta(self):\n title = 'Месторасположение: {0}'.format(self.object.emplacement)\n return {\n 'title': title\n }", "def __metadata__(self):\n raise NotImplementedError", "def metadata(self) -> dict:\n meta = {}\n meta['name'] = self.name\n meta['id'] = self.id\n meta['family'] = self.family\n \n meta['ptd_type'] = []\n meta['pos'] = []\n meta['atype'] = []\n meta['db_vect'] = []\n meta['scale'] = []\n for cp in self.parameters:\n meta['ptd_type'].append(cp.get('ptd_type', None))\n meta['pos'].append(cp.get('pos', None))\n meta['atype'].append(cp.get('atype', None))\n meta['db_vect'].append(cp.get('db_vect', None))\n meta['scale'].append(cp.get('scale', None))\n \n return meta", "def description(self):", "def describe(self):\n raise NotImplementedError()", "def describe(self):\n raise NotImplementedError()", "def summarize_metadata(self):\n meta_dict = {}\n for comp in self.dataset.data_vars:\n for mkey, mvalue in self.dataset[comp].attrs.items():\n meta_dict[f\"{comp}.{mkey}\"] = mvalue\n\n return meta_dict", "def describe(self) -> str:", "def __repr__(self):\n\n return self._metadata.__str__()", "def description(self):\n pass", "def description(self):\n pass", "def _describe(self) -> Dict[str, Any]:", "def getDescription(self):\n raise NotImplementedError", "def description(self):\n if \"description\" in self._prop_dict:\n return self._prop_dict[\"description\"]\n else:\n return None", "def metadata(self) -> global___SnippetConfigMetadata:", "def describe(self) -> Text:\n return self.__repr__()", "def meta(self):\n title = 'Оборудование: {0}'.format(self.object.value)\n return {\n 'title': title\n }", "def GetMetadata(self):\n return self.dict['meta']", "def metadata(self):\r\n return resources.Metadata(self)", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def metadata(self) -> Mapping[str, str]:\r\n return self._metadata", "def metadata(self) -> Mapping[str, str]:\r\n return self._metadata", "def add_metadata_properties(self, sentence, result):\r\n for property in sentence.properties:\r\n if property.property_metadata.is_category:\r\n result[property.name] = property.value", "def describe(self):\n print(self.description)", "def describe(self):\n print(self.description)", "def get_description(self):\n raise NotImplementedError", "def get_description(self):", "def get_description(self):\n return \"It is an Entity.\"", "def metadata(self):\n metadata = dict([(key,{}) for key in self.keys])\n for day in self.days:\n metadata[\"Days\"].append(day.attrs)\n for period in day.period:\n metadata[\"Periods\"].append(period.attrs)\n for course in period.courses:\n metadata[\"Courses\"].append(course.attrs)\n for instructor in course.instructor:\n metadata[\"Instructors\"].append(instructor.attrs)\n return metadata", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def properties(self):\n raise NotImplementedError", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description():", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def get_meta_str(self):\n s = []\n t = \"%-32s: %s\"\n s.append(t % (\"Edition\", self._meta.get(\"edition\", \"---\")))\n s.append(t % (\"Master-table\", self._meta.get(\"master\", \"---\")))\n cc = self._meta.get(\"center\", \"---\")\n cs = self._meta.get(\"subcenter\", \"---\")\n if self._tables is not None:\n cc = self._tables.lookup_codeflag(1033, cc)\n cs = self._tables.lookup_codeflag(1034, cs)\n s.append(t % (\"Centre\", cc))\n s.append(t % (\"Sub-Centre\", cs))\n s.append(t % (\"Update sequence number\", self._meta.get(\"update\", \"---\")))\n s.append(t % (\"Type of data\", (\"observed\" if self._meta.get(\"obs\", 0) else \"other\")))\n dc = self._meta.get(\"cat\", \"---\")\n if self._tables is not None:\n dc = self._tables.lookup_common(dc)\n s.append(t % (\"Data category\", dc))\n s.append(t % (\"International data sub-category\", self._meta.get(\"cat_int\", \"---\")))\n s.append(t % (\"Local data sub-category\", self._meta.get(\"cat_loc\", \"---\")))\n s.append(t % (\"Version number of master table\", self._meta.get(\"mver\", \"---\")))\n s.append(t % (\"Version number of local table\", self._meta.get(\"lver\", \"---\")))\n s.append(t % (\"Most typical time\", self._meta.get(\"datetime\", \"---\")))\n s.append(t % (\"Optional section present\", (\"yes\" if self._meta.get(\"sect2\", False) else \"no\")))\n s.append(t % (\"Compression\", (\"yes\" if self._meta.get(\"comp\", False) else \"no\")))\n s.append(t % (\"Number of data subsets\", self._meta.get(\"subsets\", \"---\")))\n return \"\\n\".join(s)", "def test_model_metadata_values(self):\n self.assertEqual(self.meta['author'], 'Giang Nguyen, Stefan Dlugolinsky')\n self.assertEqual(self.meta['author-email'], '[email protected], [email protected]')", "def get_metadata(self):\n meta_data = {}\n if self.beam_energy is not None:\n meta_data['beam_energy'] = self.beam_energy\n if self.collection_angle is not None:\n meta_data['collection_angle'] = self.collection_angle\n return meta_data", "def describe(self):\n\n ret = []\n ret.append(\"Functional ID: %s\" % self._number)\n ret.append(\"Functional Name: %s\" % self._xc_func_name)\n ret.append(\"Attributes:\")\n ret.append(\" Name: %s\" % self._name)\n ret.append(\" Kind: %d\" % self._kind)\n ret.append(\" Family: %d\" % self._family)\n ret.append(\"Citations:\")\n for x in self._refs:\n ret.append(\" \" + x)\n\n return \"\\n\".join(ret)", "def testDescription(self):\n dis_meta = DiseaseMeta()\n\n self.util.stringTypeTest(self, dis_meta, \"description\")\n\n self.util.stringPropertyTest(self, dis_meta, \"description\")", "def get_metadata(self):\n metadata = NeuroscopeRecordingInterface.get_ecephys_metadata(\n xml_file_path=get_xml_file_path(data_file_path=self.source_data['folder_path'])\n )\n metadata['Ecephys'].update(\n ElectricalSeries=dict(\n name='ElectricalSeries',\n description=\"Raw acquisition traces.\"\n )\n )\n\n return metadata", "def get_metadata(self):\n metadata = NeuroscopeRecordingInterface.get_ecephys_metadata(\n xml_file_path=get_xml_file_path(data_file_path=self.source_data['file_path'])\n )\n metadata['Ecephys'].update(\n ElectricalSeries=dict(\n name='ElectricalSeries',\n description=\"Raw acquisition traces.\"\n )\n )\n\n return metadata", "def meta(self, name=None, text_key=None, axis_edit=None):\n if not name:\n return self._meta\n else:\n return self.describe(name, text_key=text_key, axis_edit=axis_edit)", "def reportProperties():", "def get_description(self):\n pass", "def description(self) -> str:\n pass", "def __str__(self):\n return self._metadata.__str__()", "def metadata_reporter(self):\n logging.info('Creating summary report')\n header = '{}\\n'.format(','.join(self.headers))\n # Create a string to store all the results\n data = str()\n for sample in self.metadata:\n # Add the value of the appropriate attribute to the results string\n data += GenObject.returnattr(sample, 'name')\n # SampleName\n data += GenObject.returnattr(sample.run, 'SamplePlate')\n # Genus\n data += GenObject.returnattr(sample.general, 'closestrefseqgenus')\n # SamplePurity\n data += GenObject.returnattr(sample.confindr, 'num_contaminated_snvs')\n # N50\n n50 = GenObject.returnattr(sample.quast, 'N50',\n number=True)\n if n50 != '-,':\n data += n50\n else:\n data += '0,'\n # NumContigs\n data += GenObject.returnattr(sample.quast, 'num_contigs',\n number=True)\n # TotalLength\n data += GenObject.returnattr(sample.quast, 'Total_length',\n number=True)\n # MeanInsertSize\n data += GenObject.returnattr(sample.quast, 'mean_insert',\n number=True)\n # InsertSizeSTD\n data += GenObject.returnattr(sample.quast, 'std_insert',\n number=True)\n # AverageCoverageDepth\n data += GenObject.returnattr(sample.qualimap, 'MeanCoveragedata',\n number=True)\n # CoverageDepthSTD\n data += GenObject.returnattr(sample.qualimap, 'StdCoveragedata',\n number=True)\n # PercentGC\n data += GenObject.returnattr(sample.quast, 'GC',\n number=True)\n # MASH_ReferenceGenome\n data += GenObject.returnattr(sample.mash, 'closestrefseq')\n # MASH_NumMatchingHashes\n data += GenObject.returnattr(sample.mash, 'nummatches')\n # 16S_result\n data += GenObject.returnattr(sample.sixteens_full, 'sixteens_match')\n # 16S PercentID\n data += GenObject.returnattr(sample.sixteens_full, 'percent_id')\n # CoreGenesPresent\n data += GenObject.returnattr(sample.gdcs, 'coreresults')\n # rMLST_Result\n try:\n # If the number of matches to the closest reference profile is 53, return the profile number\n if sample.rmlst.matches == 53:\n if type(sample.rmlst.sequencetype) is list:\n rmlst_seq_type = ';'.join(sorted(sample.rmlst.sequencetype)).rstrip(';') + ','\n else:\n rmlst_seq_type = GenObject.returnattr(sample.rmlst, 'sequencetype')\n rmlst_seq_type = rmlst_seq_type if rmlst_seq_type != 'ND,' else 'new,'\n data += rmlst_seq_type\n else:\n # Otherwise the profile is set to new\n data += 'new,'\n except AttributeError:\n data += 'new,'\n # MLST_Result\n try:\n if sample.mlst.matches == 7:\n if type(sample.mlst.sequencetype) is list:\n mlst_seq_type = ';'.join(sorted(sample.mlst.sequencetype)).rstrip(';') + ','\n else:\n mlst_seq_type = GenObject.returnattr(sample.mlst, 'sequencetype')\n mlst_seq_type = mlst_seq_type if mlst_seq_type != 'ND,' else 'new,'\n data += mlst_seq_type\n else:\n data += 'new,'\n except AttributeError:\n data += 'new,'\n # MLST_gene_X_alleles\n try:\n # Create a set of all the genes present in the results (gene name split from allele)\n gene_set = {gene.split('_')[0] for gene in sample.mlst.combined_metadata_results}\n for gene in sorted(gene_set):\n allele_list = list()\n # Determine all the alleles that are present for each gene\n for allele in sample.mlst.combined_metadata_results:\n if gene in allele:\n allele_list.append(allele.replace(' ', '_'))\n # If there is more than one allele in the sample, add both to the string separated by a ';'\n if len(allele_list) > 1:\n data += '{},'.format(';'.join(allele_list))\n # Otherwise add the only allele\n else:\n data += allele_list[0] + ','\n # If there are fewer than seven matching alleles, add a ND for each missing result\n if len(gene_set) < 7:\n data += (7 - len(gene_set)) * 'ND,'\n except AttributeError:\n # data += '-,-,-,-,-,-,-,'\n data += 'ND,ND,ND,ND,ND,ND,ND,'\n # E_coli_Serotype\n try:\n # If no O-type was found, set the output to be O-untypeable\n if ';'.join(sample.ectyper.o_type) == '-':\n otype = 'O-untypeable'\n else:\n otype = sample.ectyper.o_type\n # Same as above for the H-type\n if ';'.join(sample.ectyper.h_type) == '-':\n htype = 'H-untypeable'\n\n else:\n htype = sample.ectyper.h_type\n serotype = '{otype}:{htype},'.format(otype=otype,\n htype=htype)\n # Add the serotype to the data string unless neither O-type not H-type were found; add ND instead\n data += serotype if serotype != 'O-untypeable:H-untypeable,' else 'ND,'\n except AttributeError:\n data += 'ND,'\n # SISTR_serovar_antigen\n data += GenObject.returnattr(sample.sistr, 'serovar_antigen').rstrip(';')\n # SISTR_serovar_cgMLST\n data += GenObject.returnattr(sample.sistr, 'serovar_cgmlst')\n # SISTR_serogroup\n data += GenObject.returnattr(sample.sistr, 'serogroup')\n # SISTR_h1\n data += GenObject.returnattr(sample.sistr, 'h1').rstrip(';')\n # SISTR_h2\n data += GenObject.returnattr(sample.sistr, 'h2').rstrip(';')\n # SISTR_serovar\n data += GenObject.returnattr(sample.sistr, 'serovar')\n # GeneSeekr_Profile\n try:\n if sample.genesippr.report_output:\n data += ';'.join(sample.genesippr.report_output) + ','\n else:\n data += 'ND,'\n except AttributeError:\n data += 'ND,'\n # Vtyper_Profile\n data += GenObject.returnattr(sample.verotoxin, 'verotoxin_subtypes_set')\n # AMR_Profile and resistant/sensitive status\n if sample.resfinder_assembled.pipelineresults:\n # Profile\n for resistance, resistance_set in sorted(sample.resfinder_assembled.pipelineresults.items()):\n data += '{res}({r_set});'.format(res=resistance.replace(',', ';'),\n r_set=';'.join(sorted(list(resistance_set))))\n data += ','\n # Resistant/Sensitive\n data += 'Resistant,'\n else:\n # Profile\n data += 'ND,'\n # Resistant/Sensitive\n data += 'Sensitive,'\n # Plasmid Result'\n if sample.mobrecon.pipelineresults:\n for plasmid, details in sorted(sample.mobrecon.pipelineresults.items()):\n data += '{plasmid}({details});'.format(plasmid=plasmid,\n details=details)\n data += ','\n else:\n data += 'ND,'\n # TotalPredictedGenes\n data += GenObject.returnattr(sample.prodigal, 'predictedgenestotal',\n number=True)\n # PredictedGenesOver3000bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover3000bp',\n number=True)\n # PredictedGenesOver1000bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover1000bp',\n number=True)\n # PredictedGenesOver500bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover500bp',\n number=True)\n # PredictedGenesUnder500bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesunder500bp',\n number=True)\n # AssemblyDate\n data += datetime.now().strftime('%Y-%m-%d') + ','\n # PipelineVersion\n data += self.commit + ','\n # Name of the database used in the analyses\n data += os.path.split(self.reffilepath)[-1] + ','\n # Database download date\n data += self.download_date\n # Append a new line to the end of the results for this sample\n data += '\\n'\n # Replace any NA values with ND\n cleandata = data.replace('NA', 'ND')\n with open(os.path.join(self.reportpath, 'combinedMetadata.csv'), 'w') as metadatareport:\n metadatareport.write(header)\n metadatareport.write(cleandata)", "def properties(self) -> Optional[str]:\n return pulumi.get(self, \"properties\")", "def metadata(self):\n return {\n \"namespace\": self.namespace,\n \"short_name\": f\"{self.namespace}_{self._dataset_metadata['DatasetCode']}\",\n \"name\": f\"{self._dataset_metadata['DatasetName']} - FAO ({self.publication_year})\",\n \"description\": self._dataset_metadata[\"DatasetDescription\"],\n \"source_name\": \"Food and Agriculture Organization of the United Nations\",\n \"publication_year\": int(self.publication_year),\n \"publication_date\": self._dataset_metadata[\"DateUpdate\"],\n \"date_accessed\": str(dt.date.today()),\n \"url\": self.url,\n \"source_data_url\": self.source_data_url,\n \"file_extension\": \"zip\",\n }" ]
[ "0.64961946", "0.6462961", "0.6462961", "0.6290093", "0.6145428", "0.6084069", "0.6051594", "0.60471356", "0.5994781", "0.5933389", "0.59084606", "0.58996195", "0.5870387", "0.5848383", "0.5837634", "0.5826372", "0.5826372", "0.5818724", "0.58181715", "0.5814273", "0.5807516", "0.58013815", "0.57757455", "0.576439", "0.57614464", "0.57614464", "0.57312673", "0.5714556", "0.5714087", "0.56985253", "0.56985253", "0.56906027", "0.567607", "0.56658524", "0.5664016", "0.5653505", "0.56488395", "0.5630287", "0.5630278", "0.56219167", "0.56204927", "0.56204927", "0.5615076", "0.56134796", "0.56134796", "0.5605538", "0.5592627", "0.55762833", "0.5575389", "0.55743563", "0.55727476", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.5558656", "0.55581427", "0.55581427", "0.55581427", "0.55579007", "0.5557472", "0.555165", "0.55477333", "0.55310714", "0.55232775", "0.55124277", "0.55087674", "0.5502045", "0.5499669", "0.5498878", "0.54924434", "0.54868865", "0.5483703", "0.5479821" ]
0.64541817
3
Data regarding 3rd party partner integration
def partners_data(self) -> Optional[pulumi.Input['SecurityAssessmentPartnerDataArgs']]: return pulumi.get(self, "partners_data")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def info():\n if g.party_id is None:\n # No party is configured for the current site.\n abort(404)\n\n party = party_service.get_party(g.party_id)\n\n return {\n 'party': party,\n }", "def get(self,\n partner_id):\n abort(501)", "def get(self,\n partner_id):\n abort(501)", "def getPremiumInfo(self, authenticationToken):\r\n pass", "def partner_data(self) -> Optional['outputs.SecurityAssessmentMetadataPartnerDataResponse']:\n return pulumi.get(self, \"partner_data\")", "def investing(site):\n url = (\n \"https://www.widgets.investing.com/live-currency-cross-rates?\"\n + \"theme=darkTheme&cols=last&pairs=3,2111,2124,2126,650,962711,69,68\"\n )\n headers = {\n \"href\": (\n \"https://www.investing.com?utm_source=WMT&amp;utm_medium=referral&amp;\"\n + \"utm_campaign=LIVE_CURRENCY_X_RATES&amp;utm_content=Footer%20Link\"\n ),\n \"target\": \"_blank\",\n \"rel\": \"nofollow\",\n }\n try:\n session = requests.Session()\n session.headers = headers\n cfscrape_requests = cfscrape.create_scraper(sess=session)\n ret = cfscrape_requests.get(url, headers=headers, timeout=(15, 15)).text\n lines = ret.split('target=\"_blank\"')\n lines = [i.replace(\" \", \"\").replace(\",\", \"\") for i in lines]\n lines = [i for i in lines if \"askpid\" in i]\n lines = [i.split(\"hiddenFour\")[0] for i in lines]\n data = {\n item.split(\"</a>\")[0].replace(\">\", \"\"): item.split('last\">')[1]\n for item in lines\n }\n\n data = {k.replace(\"/\", \":\"): v.split(\"</div>\")[0] for k, v in data.items()}\n data = {k: float(v) for k, v in data.items()}\n data[\"USD:XAG\"] = 1 / data.pop(\"XAG:USD\")\n data[\"USD:XAU\"] = 1 / data.pop(\"XAU:USD\")\n data = refine_data(data)\n print(site, data)\n race_write(f\"{site}_forex.txt\", json_dumps(data))\n except:\n print(f\"{site} failed to load\")", "def get_data(ticket_info, logger):\n\n # This string will be returned and added as a comment to the SecureChange ticket\n return_str = \"\"\n\n # Log integration starting\n logger.info(\"Running '{}' integration\".format(__name__))\n\n try:\n\n for req in ticket_info['Requests']:\n for src in req['Sources']:\n if src['Private'] and src['Cidr'] == '32':\n return_str = '{}\\n\\n{}\\n\\n{}'.format(return_str, device_query(src['Ip'], fmt='text'), event_query(src['Ip'], fmt='text'))\n for dst in req['Destinations']:\n if dst['Private'] and dst['Cidr'] == '32':\n return_str = '{}\\n\\n{}\\n\\n{}'.format(return_str, device_query(dst['Ip'], fmt='text'), event_query(dst['Ip'], fmt='text'))\n\n\n except Exception as e:\n\n # Log the error and return an empty string\n logger.error(\"Error: {}\".format(e))\n return None\n\n # Log integration completing\n logger.info(\"{} integration completed\".format(__name__))\n\n # Return comment\n return return_str", "def get_oauth_data():", "def get_personal_info(self):\n self.get(\"INFO\",\"GetPersonalInfo\")\n response = self.send()\n return response", "def datapack_details(request):\n print 'get datapack details'\n\n context = request['context']\n print context\n try:\n telephone_number = first_entity_value(request['entities'], 'phone_number')\n with open(os.path.join(sys.path[0], \"app/wit/static/users.json\"), \"r\") as data_file:\n data = json.load(data_file)\n network_details = data[telephone_number]['data_details']\n print network_details\n\n\n\n reply = \"Our Initial Investigation shows that you're are currently using \" + network_details['network_services_available'] + \" and have subscribed for \" + network_details['network_services_subscribed'] + \".\"\n if network_details['megabytes_available'] == 0:\n reply += \" You have exhausted your datapack. Change your network settings to use pay2go plan or recharge now with available datapacks. Please check http://www.airtel.in/Airtel3G/tariff.html\"\n elif network_details['network_services_available'] != network_details['network_services_subscribed']:\n reply += \" Your subscribed datapack settings does not match with services available. Please change your network settings\"\n\n except:\n telephone_number = None\n reply = \"Your number is not subscribed with Airtel. Please contact your network operator for your query\"\n\n context['datapack'] = reply\n\n return context", "def get_account_details(self):\n pass", "def awaiting_payment(self):", "def get_infos(self):\n infos = dict()\n infos[\"dataset\"] = self.dataset_name\n infos[\"task\"] = \"separate_noisy\"\n infos[\"licenses\"] = [librispeech_license, tac_license]\n return infos", "def info(self):\n\n self.call(method='getInvoice', args=[self.reference_no, self.with_vat])", "def _production_partner(self, cr, uid, ids, prop, unknow_none, context=None):\n result = {}\n for prod in self.browse(cr, uid, ids, context=context):\n result[prod.id] = {\n 'partner_id':'',\n 'partner_rag_soc':'',\n }\n #import pdb;pdb.set_trace()\n if prod.origin:\n # ha trovato un dato nelle origini verifica che esista un ordine cliente e ne legge l'informazione\n cerca = [('name','=',prod.origin)]\n sale_ids = self.pool.get('sale.order').search(cr,uid,cerca)\n if sale_ids:\n riga_sale = self.pool.get('sale.order').browse(cr,uid,sale_ids)[0]\n result[prod.id]['partner_ref'] = riga_sale.partner_id.ref\n result[prod.id]['partner_rag_soc'] = riga_sale.partner_id.name\n return result", "def request_info(self):\r\n if self.use_http():\r\n self.enqueue_http_request(\"money/info\", {}, \"info\")\r\n else:\r\n self.send_signed_call(\"private/info\", {}, \"info\")", "def partner_get(self):\n try:\n document = mongo_module.mongo_find(self.partner_id, single=True)\n output = 'partner' if document else 'No data match'\n code = 200 if document else 204\n except Exception as err:\n document = None\n output = str(err)\n code = 400\n return output, document, code", "def partners_data(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentPartnerDataResponse']]:\n return pulumi.get(self, \"partners_data\")", "def partners_data(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentPartnerDataResponse']]:\n return pulumi.get(self, \"partners_data\")", "def get_profile_data(self, transceiver, placement):", "def getfundamentals(self, results):\n self.log(\"Retrieving fundamental phone information\")\n self.log(\"Phone serial number\")\n results['uniqueserial']=sha.new(self.get_esn()).hexdigest()\n results['groups']=self.get_groups()\n results['ringtone-index']=self.get_ringtone_index()\n results['wallpaper-index']=self.get_wallpaper_index()\n return results", "def _get_information(self):\n pass", "def getInfo():", "def onchange_partner_id(self\n ):\n if self._context is None:\n context = {}\n acc_part_brw = False\n acc_id = False\n rp_obj = self.env['res.partner']\n wh_line_obj = self.env['account.wh.src.line']\n\n if self.partner_id:\n #partner = rp_obj.browse(self.partner_id)\n acc_part_brw = rp_obj._find_accounting_partner(self.partner_id)\n if self.type and self.type in ('out_invoice', 'out_refund'):\n acc_id = acc_part_brw.property_account_receivable_id \\\n and acc_part_brw.property_account_receivable_id.id or False\n else:\n acc_id = acc_part_brw.property_account_payable_id \\\n and acc_part_brw.property_account_payable_id.id or False\n\n # part_brw = self.ids and rp_obj._find_accounting_partner(self.browse(\n # self, self.ids[0]).partner_id)\n wh_lines = self.ids and wh_line_obj.search(\n [('wh_id', '=', self.ids[0])])\n if not self.partner_id:\n if wh_lines:\n wh_line_obj.unlink(wh_lines)\n wh_lines = []\n if self.partner_id and acc_part_brw and self.partner_id.id != acc_part_brw.id:\n if wh_lines:\n wh_line_obj.unlink(wh_lines)\n wh_lines = []\n\n return {'value': {\n 'line_ids': wh_lines,\n 'account_id': acc_id,\n }\n }", "def get_details(self):", "def test_get_order_buyer_info(self):\n pass", "def party_id(self):\n pass", "def JointAccount(self):\n joint_accounts = []\n if self.IsGeneralPartner():\n for contact in self.__contact.Party().Contacts(): \n if contact.AdditionalInfo().RegGeneralPartner(): \n joint_accounts.append(contact)\n else:\n FRegulatoryLogger.WARN(logger, \"<%s> is not a General Partner. Hence JointAccount is None\"%self.__contact.Fullname())\n joint_accounts = None\n return joint_accounts", "def get_data():\n pass", "def _get_config_data(self, cr, uid):\n\n model_conf = self.pool.get('customer.support.settings')\n args = [('selected', '=', True)] \n ids = model_conf.search(cr, uid, args)\n config = model_conf.browse(cr, uid, ids[0])\n\n return {\n 'tor_api_key': config.tor_api_key,\n 'tor_domain': config.tor_domain,\n 'company': config.company\n }", "def fhir_enquiry(request, context_override={}):\n\n state = get_state(CLIENT_ID,AUTH_URL)\n code = get_code(CLIENT_ID,AUTH_URL)\n\n # set default context\n context = {}\n context['template'] = \"result.html\"\n context['get_fmt'] = \"json\"\n context['display'] = \"Me\"\n context['code'] = code\n context['state'] = state\n context['ask'] = \"/api/v1/me?_format=json\"\n context['url'] = settings.OAUTH_TEST_INFO['BASE']\n context['headers'] = {'content-type': 'application/x-www-form-urlencoded',\n 'Authorization': \"Bearer \"+ get_code(CLIENT_ID, AUTH_URL)},\n\n # add / overwrite anything in context_override\n context = update_dict(context, context_override)\n\n data = {'code': code,\n 'grant_type': 'authorization_code',\n 'key': 'access_token',\n #'key': 'refresh_token',\n 'access_token': get_access(state),\n 'refresh_token': get_refresh(state),\n 'redirect_uri': REDIRECT_URI}\n\n if settings.DEBUG:\n print(\"Context after update:\", context)\n print(\"Data:\", data)\n\n print(\"SERVICE:\", SERVICE )\n\n # Get access_token\n headers = {}\n print('Context Headers:', dict(context['headers'][0]))\n #headers = {'headers': update_dict(headers, context_override=dict(context['headers'][0]))}\n headers = update_dict(headers, context_override=dict(context['headers'][0]))\n print(\"Headers:\", headers)\n\n kw_to_send = {'data': data, 'headers': headers}\n\n #session = SERVICE.get_auth_session(method=\"POST\",**kw_to_send)\n #session = SERVICE.get_session(get_access(state))\n #session = SERVICE.get_raw_access_token(method=\"POST\", **kw_to_send)\n session = SERVICE.get_raw_access_token(data=data)\n\n #response = SERVICE.get_access_token(method=\"POST\")\n # response = SERVICE.get_auth_session(data=data)\n print(\"Auth Session\", session)\n #response = SERVICE.get_raw_access_token(data=data, **headers)\n\n get_text = session.json()\n\n if 'access_token' in get_text:\n print(\"got an access token\")\n access = save_tokens(state,\n get_text['access_token'],\n get_text['refresh_token'])\n\n print(\"RESPONSE:\", get_text)\n # RESPONSE: {\"expires_in\": 36000,\n # \"access_token\": \"h1vY5eDu69JKfV4nPpdu8xEan63hKl\",\n # \"scope\": \"patient/*.read write_consent\",\n # \"token_type\": \"Bearer\",\n # \"refresh_token\": \"6HZnSwhfsGvfr9Aguw5n0e5CoGr8CQ\"}\n\n\n sesn = SERVICE.get_session(get_text['access_token'])\n print(\"SESSION:\", sesn)\n\n r = sesn.get(context['url'] + context['ask'])\n\n if settings.DEBUG:\n print(\"R:\", r.content)\n\n return r", "def user_data(self, token, *args, **kwargs):\n url = \"https://api.intra.42.fr/v2/me\"\n auth_header = {\"Authorization\": \"Bearer %s\" % token}\n try:\n return self.get_json(url, headers=auth_header)\n except ValueError:\n return None", "def get_bill_details(request):\n\n print request\n\n context = request['context']\n print context\n try:\n telephone_number = first_entity_value(request['entities'], 'phone_number')\n with open(os.path.join(sys.path[0], \"app/wit/static/users.json\"), \"r\") as data_file:\n data = json.load(data_file)\n customer_billing = data[telephone_number]['last_month_billing']\n print customer_billing\n\n customer_type = data[telephone_number]['type_customer']\n if customer_type == 'postpaid':\n\n reply = \"Our Initial Investigation shows that you're a \" + data[telephone_number]['type_customer'] + \" Customer and currently using \" + data[telephone_number]['plan_details'] + \" plan type.\"\n if customer_billing['roaming'] == 'True':\n reply += \"You had used your cellphone while on roaming for which you were charged extra.\"\n elif customer_billing['data_exhaust'] == 'True':\n reply += \"You had used your data network after your allocated limit was exhausted. You were charged for these services\"\n elif customer_billing['subscribed'] == 'True':\n reply += \"You had subscribed to some promotional services for which you were charged in extra.\"\n else:\n reply = \"Our Initial Investigation shows that you're a \" + data[telephone_number]['type_customer'] + \". We believe that this might be a mistake from our side and would like you to speak to our customer care executives separately.\"\n\n\n except:\n telephone_number = None\n reply = \"Your number is not subscribed with Airtel. Please contact your network operator for your query\"\n\n\n print reply\n\n context['bill_details'] = reply\n\n return context", "def inspect_incoming(self, cr, uid, phone_number, context=None):\n # Retrieve the default URL for the web client, use to:\n # - default URL when no action found\n # - Compose the URL to return to the customer\n ir_config_obj = self.pool.get('ir.config_parameter')\n url = ir_config_obj.get_param(cr, 1, 'web.base.url', 'http://localhost:8069')\n\n # Retrieve the context for the user\n user_obj = self.pool.get('res.users')\n user_context = user_obj.context_get(cr, uid, context=context)\n if context is not None:\n user_context.update(context)\n\n # Search address and partner for this phone number\n (partner_id, address_id) = self.find_partner_from_phone_number(cr, uid, phone_number, context=user_context)\n if not partner_id and not address_id:\n return url\n\n # Check if default action is define from the company\n company_id = user_obj.browse(cr, uid, uid, context=user_context).company_id\n if not company_id:\n return url\n\n company = self.pool.get('res.company').browse(cr, uid, company_id.id, context=user_context)\n if not company.cti_action_id:\n # No action found on this company\n return url\n\n # Check custom for this action\n current_act = self.browse(cr, uid, company.cti_action_id.id, context=user_context)\n if current_act.create_entry:\n entry_id = self.pool.get(current_act.model_id.model).create(cr, uid, {'partner_id': partner_id}, context=user_context)\n cr.commit()\n else:\n entry_id = partner_id or address_id\n\n return self._format_url_from_action(cr, uid, url, company.cti_action_id.model_id.model, company.cti_action_id.act_window_id.id, entry_id, context=user_context)", "def get_info(self, charger):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_info\",\n \"token\": charger.token(),\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json", "def got_info(self, cloud_obj):", "def input_payment_details(self):\n pass", "def __init__(__self__, *,\n partner_name: str,\n secret: str):\n pulumi.set(__self__, \"partner_name\", partner_name)\n pulumi.set(__self__, \"secret\", secret)", "def rpc_info():", "def submitting_party(self):\n party = {\n 'businessName': self.name,\n 'address': address_utils.get_address_from_db2(self.legacy_address)\n }\n if self.phone_number:\n party['phoneNumber'] = self.phone_number\n return party", "def info(self):\n if self.integration is None:\n return None\n return self.integration.info", "def gen_CRM(call_text, response_text):\n pass", "def device_info(self) -> Dict[str, Any]:\n return {\n 'name': 'Boiler',\n 'identifiers': {\n (DOMAIN, self.toon.agreement.id, 'boiler'),\n },\n 'via_device': (DOMAIN, self.toon.agreement.id, 'boiler_module'),\n }", "def scrapeFollowingFromAnAccount():\n global api", "def test_get_pay_in_details(self):\n pass", "def get_data(self):", "def getPayment(self):\n pass", "def partner_name(self) -> str:\n return pulumi.get(self, \"partner_name\")", "def partner_name(self) -> str:\n return pulumi.get(self, \"partner_name\")", "def send_counterparty(self) -> None:\n object_ = self.objects[0]\n ticket_text = ''\n if 'сб' in object_.counterparty_name.lower() and self.keyword == 'closing':\n # order_id = sberinkas.main(\n # object_.object_SAP_code,\n # object_.object_address,\n # object_.lat,\n # object_.lon\n # )\n # ticket_text = f\"<br>Номер заявки на портале инкассация - {order_id}.\"\n pass\n\n body = '<p>Добрый день!<br><br>' \\\n f'Прошу принять в работу письмо на {self.letter_text}<br>' \\\n f'Скан подписанного письма вышлю позднее.{ticket_text}'\n if 'сб' in object_.counterparty_name.lower():\n self.send_sber_manager_service(body)\n else:\n self.sendmail(\n self.outlook,\n self.to,\n \"\",\n self.letter_name,\n body,\n self.attachment,\n 2\n )", "def get_another_number(request):\n \n user = request.user \n phone_number = generate_phn_number()\n \n user_email = user.username\n \n try: \n # Create stripe account\n stripe_customer = stripe.Customer.create(\n email = user_email\n )\n \n # Set a default card for account\n s_card = stripe.Customer.create_source(\n stripe_customer.id,\n source=\"tok_amex\",\n ) \n \n plan_id = \"price_1JsHMxSDkRo5FXlkOsq2QHSV\"\n\n # if data[\"subscription_plan\"]== \"Globalnet Silver\":\n # plan_id = \"price_1JsHOJSDkRo5FXlkQmfEQzhN\"\n \n # if data[\"subscription_plan\"]== \"Globalnet Gold\":\n # plan_id = \"price_1JsHPFSDkRo5FXlk9VSl41rV\"\n\n # Create a default subscription for customer \n subscription = stripe.Subscription.create(\n customer = stripe_customer.id,\n items = [{'plan':plan_id}]\n )\n \n \n start_date = datetime.datetime.now().strftime(\"%c\")\n end_date = (datetime.datetime.now() + datetime.timedelta(30)).strftime(\"%x\")\n\n subscription_plan = SubscriptionPlan.objects.get(subscription_plan_name=\"Globalnet Bronze\")\n \n # Create customer data\n customer_data = SecondaryNumber.objects.create(\n user = user,\n phn_number = phone_number,\n subscription_plan = subscription_plan,\n stripe_id = stripe_customer.id,\n start_date = start_date,\n end_date = end_date,\n subscription_id = subscription.id\n \n )\n \n # Entry Subscription data\n SubscriptionData.objects.create(\n subscriber = phone_number,\n subscription = subscription_plan.subscription_plan_name,\n subscription_start = start_date,\n subscription_end = end_date \n \n )\n \n \n serializer= SeconderyNumberSerializer(customer_data,many=False)\n return Response(serializer.data)\n\n except Exception as e:\n message = {\"detail\":str(e)}\n print(e)\n return Response(message)", "def reqData(self):\r\n #self.reqGlobalCancel()\r\n #self.add_historical(\"Stock('TSLA', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('IBM', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('MSFT', 'SMART', 'USD')\")\r\n self.add_historical(\"Stock('FB', 'SMART', 'USD')\")", "def get_info(self):\n pass", "def get_info(self):\n pass", "def local_bonds_prices():\n url1 = \"https://api.invertironline.com/token\"\n\n data = {\n \"username\": usuario,\n \"password\": password,\n \"grant_type\": \"password\" \n }\n response = requests.post(url1, data=data)\n if response.status_code == 200:\n content = response.text\n access_key = token_key(content)\n\n url2 = f'https://api.invertironline.com/api/v2/Cotizaciones/Bonos/Merval/argentina'\n datos = requests.get(url2, headers={\n 'Authorization': 'Bearer '+access_key\n })\n datos = json.loads(datos.text)\n datos = datos['titulos']\n datos = clean_assets(datos)\n return datos", "def get_info(user):\n from Game.models import Ownership\n response = {}\n wallet = Wallet.objects.get(user=user)\n response['liquid'] = wallet.liquid_with_loans\n value_wallet = wallet.liquid_with_loans\n ownerships = Ownership.objects.filter(wallet=wallet, quantity__gt=0)\n assets = []\n asset_communication = ACommunication(settings.API_URL)\n for o in ownerships:\n asset = asset_communication.get_asset_quote(o.asset)\n asset.quantity = o.quantity\n value_wallet += o.quantity * asset.sell\n assets.append(asset)\n response['assets'] = assets\n response['value_wallet'] = value_wallet\n response['error'] = False\n return response", "def device_info(self) -> Dict[str, Any]:\n return {\n 'name': 'Boiler Module',\n 'manufacturer': 'Eneco',\n 'identifiers': {\n (DOMAIN, self.toon.agreement.id, 'boiler_module'),\n },\n 'via_device': (DOMAIN, self.toon.agreement.id),\n }", "def getIntervenciones():", "def get_tenants(self):", "def _generate_cybersource_sa_payload(*, order, receipt_url, cancel_url, ip_address):\n # http://apps.cybersource.com/library/documentation/dev_guides/Secure_Acceptance_WM/Secure_Acceptance_WM.pdf\n # Section: API Fields\n\n # NOTE: be careful about max length here, many (all?) string fields have a max\n # length of 255. At the moment none of these fields should go over that, due to database\n # constraints or other reasons\n\n coupon_redemption = CouponRedemption.objects.filter(order=order).first()\n coupon_version = (\n coupon_redemption.coupon_version if coupon_redemption is not None else None\n )\n\n line_items = {}\n total = 0\n for i, line in enumerate(order.lines.all()):\n product_version = line.product_version\n unit_price = get_product_version_price_with_discount(\n coupon_version=coupon_version, product_version=product_version\n )\n line_items[f\"item_{i}_code\"] = str(product_version.product.content_type)\n line_items[f\"item_{i}_name\"] = str(product_version.description)[:254]\n line_items[f\"item_{i}_quantity\"] = line.quantity\n line_items[f\"item_{i}_sku\"] = product_version.product.content_object.id\n line_items[f\"item_{i}_tax_amount\"] = \"0\"\n line_items[f\"item_{i}_unit_price\"] = str(unit_price)\n\n total += unit_price\n\n # At the moment there should only be one line\n product_version = order.lines.first().product_version\n product = product_version.product\n content_object = product.content_object\n readable_id = get_readable_id(content_object)\n\n merchant_fields = {\n \"merchant_defined_data1\": str(product.content_type),\n \"merchant_defined_data2\": readable_id,\n \"merchant_defined_data3\": \"1\",\n }\n\n if coupon_version is not None:\n merchant_fields[\"merchant_defined_data4\"] = coupon_version.coupon.coupon_code\n merchant_fields[\"merchant_defined_data5\"] = ( # company name\n coupon_version.payment_version.company.name\n if coupon_version.payment_version.company\n else \"\"\n )\n merchant_fields[\"merchant_defined_data6\"] = (\n coupon_version.payment_version.payment_transaction or \"\"\n )\n merchant_fields[\"merchant_defined_data7\"] = (\n coupon_version.payment_version.payment_type or \"\"\n )\n\n return {\n \"access_key\": settings.CYBERSOURCE_ACCESS_KEY,\n \"amount\": str(total),\n \"consumer_id\": order.purchaser.username,\n \"currency\": \"USD\",\n \"locale\": \"en-us\",\n **line_items,\n \"line_item_count\": order.lines.count(),\n **merchant_fields,\n \"reference_number\": order.reference_number,\n \"profile_id\": settings.CYBERSOURCE_PROFILE_ID,\n \"signed_date_time\": now_in_utc().strftime(ISO_8601_FORMAT),\n \"override_custom_receipt_page\": receipt_url,\n \"override_custom_cancel_page\": cancel_url,\n \"transaction_type\": \"sale\",\n \"transaction_uuid\": uuid.uuid4().hex,\n \"unsigned_field_names\": \"\",\n \"customer_ip_address\": ip_address if ip_address else None,\n }", "def post(self,\n partner_id):\n abort(501)", "def post(self,\n partner_id):\n abort(501)", "def parse_whoxy_results(self,whoxy_data,reverse=False):\n results = {}\n results['domain'] = whoxy_data['domain_name']\n # Check for the registrar information\n if \"domain_registrar\" in whoxy_data:\n results['registrar'] = whoxy_data['domain_registrar']['registrar_name']\n elif \"registrar\" in whoxy_data:\n results['registrar'] = whoxy_data['registrar_name']\n else:\n results['registrar'] = \"None Listed\"\n # Check for an expiration date\n if \"expiry_date\" in whoxy_data:\n results['expiry_date'] = whoxy_data['expiry_date']\n else:\n results['expiry_date'] = \"None Listed\"\n # Check for a company name\n if \"company_name\" in whoxy_data:\n results['organization'] = whoxy_data['registrant_contact']['company_name']\n else:\n results['organization'] = \"None Listed\"\n # Check for a registrant's name\n if \"full_name\" in whoxy_data:\n results['registrant'] = whoxy_data['registrant_contact']['full_name']\n else:\n results['registrant'] = \"None Listed\"\n # A few pieces of information are unavailable from WhoXY's reverse WHOIS lookups\n if reverse:\n results['address'] = \"Unavailable for Reverse WHOIS\"\n results['admin_contact'] = \"Unavailable for Reverse WHOIS\"\n results['tech_contact'] = \"Unavailable for Reverse WHOIS\"\n # Try to assemble different pieces of information from the record\n else:\n try:\n reg_address = whoxy_data['registrant_contact']['mailing_address']\n reg_city = whoxy_data['registrant_contact']['city_name']\n reg_state = whoxy_data['registrant_contact']['state_name']\n reg_zip = whoxy_data['registrant_contact']['zip_code']\n reg_email = whoxy_data['registrant_contact']['email_address']\n reg_phone = whoxy_data['registrant_contact']['phone_number']\n results['address'] = \"{} {}, {} {} {} {}\".format(reg_address,reg_city,reg_state,reg_zip,reg_email,reg_phone)\n except:\n results['address'] = \"None Listed\"\n try:\n admin_name = whoxy_data['administrative_contact']['full_name']\n admin_address = whoxy_data['administrative_contact']['mailing_address']\n admin_city = whoxy_data['administrative_contact']['city_name']\n admin_state = whoxy_data['administrative_contact']['state_name']\n admin_zip = whoxy_data['administrative_contact']['zip_code']\n admin_email = whoxy_data['administrative_contact']['email_address']\n admin_phone = whoxy_data['administrative_contact']['phone_number']\n results['admin_contact'] = \"{} {} {}, {} {} {} {}\".format(admin_name,admin_address,admin_city,admin_state,admin_zip,admin_email,admin_phone)\n except:\n results['admin_contact'] = \"None Listed\"\n try:\n tech_name = whoxy_data['technical_contact']['full_name']\n tech_address = whoxy_data['technical_contact']['mailing_address']\n tech_city = whoxy_data['technical_contact']['city_name']\n tech_state = whoxy_data['technical_contact']['state_name']\n tech_zip = whoxy_data['technical_contact']['zip_code']\n tech_email = whoxy_data['technical_contact']['email_address']\n tech_phone = whoxy_data['technical_contact']['phone_number']\n results['tech_contact'] = \"{} {} {}, {} {} {} {}\".format(tech_name,tech_address,tech_city,tech_state,tech_zip,tech_email,tech_phone)\n except:\n results['tech_contact'] = \"None Listed\" \n return results", "def lab03_extra_credit():\n okpy_email = \"[email protected]\"\n practice_result_code = \"xxxx...xxxxx\"\n return (okpy_email, practice_result_code)", "def getStockData():\n pass", "async def info(self, context):\n await context.send('creador: [email protected]\\ncolabs:\\n emi: https://twitter.com/emilianosce/ o https://www.instagram.com/emilianosce/ \\n garza: https://twitter.com/Matias_Garcia00 o https://www.twitch.tv/garzangb')", "def test_client_verification_retrieve(self):\n pass", "def init():\n output, code = partners_view.init()\n return jsonify(output), code", "def get_incoming_data(self):\n\n fields = self.cleaned_data.copy()\n action = self.cleaned_data['action']\n \n logger.debug(\"Fields that we got are %s\" % fields)\n\n return_data = {}\n\n #determine our further PoA based on the action varible passed by envaya phone\n return_data['action'] = action\n return_data['events'] = {}\n #add ISD code to any missing 'From' number missing it.\n from_number = self.cleaned_data[self.identity_name]\n if len(from_number) == 8 and from_number.startswith('7'):\n from_number = '+257' + from_number\n\n if action == 'incoming':\n logger.info(\"We have an incoming message!\")\n\n return_data['text'] = self.cleaned_data[self.text_name]\n return_data['connection'] = self.lookup_connections([from_number])[0]\n return_data['from_phone'] = self.cleaned_data['phone_number']\n\n elif action == 'outgoing':\n\n logger.info(\"Received a poll for outgoing message!\")\n return_data['events'] = [{'event': 'log', 'message': \"We do not deliver outgoing messages via EnvayaSMS Android app!\"}]\n\n elif action == 'test':\n logger.info(\"Received a test connection request!\")\n return_data['events'] = [{'event': 'log', 'message': \"Alpha to Charlie: This software by FortyPlusTwo-Hewe rocks! :)\"}]\n\n elif action == 'send_status':\n logger.error(\"NOT IMPLEMENTED: send_status action\")\n\n elif action == 'device_status':\n logger.error(\"NOT IMPLEMENTED: device_status action\")\n\n elif action == 'forward_sent':\n logger.error(\"NOT IMPLEMENTED: forward_status action\")\n\n elif action == 'amqp_started':\n logger.error(\"NOT IMPLEMENTED: amqp_status action\")\n\n else:\n logger.exception(\"UNSUPPORTED ACTION %s requested by EnvayaSMS Android app\" % action)\n raise NotImplementedError(\"Action %s not implemented!\" % action)\n\n print return_data\n return return_data", "def view_party(request):\n result = {}\n\n u = request.user\n other = Party.objects.get(id=request.POST['party_id'])\n if other in u.friends():\n # this other person is a friend so show all details\n bought = TransactionLineItem.objects.filter(transaction__party=other).order_by('-transaction__timestamp')\n wishes = Wishlist.objects.filter(party=other).order_by('-added')\n\n result['bought'] = [b.details() for b in bought[:10]]\n result['wished'] = [w.details() for w in wishes[:10]]\n \n else:\n # just show some details\n bought = TransactionLineItem.objects.filter(transaction__party=other).order_by('-transaction__timestamp')\n wishes = Wishlist.objects.filter(party=other).order_by('-added')\n\n result['bought'] = [b.details() for b in bought[:3]]\n result['wished'] = [w.details() for w in wishes[:3]]\n \n return JSONHttpResponse(result)", "def _get_uid_wh_agent(self):\n context = self._context or {}\n rp_obj = self.env['res.partner']\n ru_obj = self.env['res.users']\n ru_brw = ru_obj.browse()\n acc_part_brw = rp_obj._find_accounting_partner(\n ru_brw.company_id.partner_id)\n return acc_part_brw.wh_src_agent", "def get_info(self):\n return None", "def _online_data(self):\n\t\treport = RCReport()\n\t\t\n\t\ttry:\n\t\t\t# Importe la bonne API.\n\t\t\tapi_name = self.config.get(self.system, 'online_api')\n\t\t\tapi_class_name = 'RC' + api_name.capitalize() + 'API'\n\t\t\tapi_mod = importlib.import_module('RCOnlineAPI.' + api_class_name)\n\t\t\tapi = getattr(api_mod, api_class_name)(self.system, self.config)\n\t\texcept ImportError as e:\n\t\t\treport.log('\\tOnline API \"' + api_class_name + '\" does not exist.')\n\t\t\treturn\n\t\texcept RCException as e:\n\t\t\treport.log('\\t' + e.message)\n\t\t\treturn\n\t\t\n\t\treport.log('\\tUsing \"' + api_class_name + '\" API', 2)\n\t\t\n\t\t# On récupère les langues autorisées pour la recherche.\n\t\tlang = self.config.get(self.system, 'online_data_lang').split(',')\n\t\tself.generate = True\n\t\t\n\t\t# On créé le dossier \"covers\" si besoin\n\t\tif self.config.get(self.system, 'download_covers') and not os.path.exists(COVERS_DIR):\n\t\t\tos.mkdir(COVERS_DIR)\n\t\t\n\t\tfor (game, infos) in self.games.items():\n\t\t\t# On ne cherche pas de données si il y en a déjà ou si aucune donnée n'a été trouvée la fois précédente avec l'API utilisée.\n\t\t\tif infos['onlineData']['state'] or (api_name in infos['onlineData'] and not infos['onlineData'][api_name]):\n\t\t\t\tcontinue\n\t\t\telif len(lang) > 0 and lang[0] != '' and infos['country'] not in lang:\n\t\t\t\tcontinue\n\t\t\t\n\t\t\treport.log('\\tGetting data for ' + game, 2)\n\t\t\t\n\t\t\tdata = api.search(game)\n\t\t\t\n\t\t\tif data == -1:\n\t\t\t\tinfos['onlineData'][api_name] = False\n\t\t\t\treport.log('\\t\\t>> Unable to find data.', 2)\n\t\t\telif data == -2:\n\t\t\t\treport.log('\\t\\t>> HTTP Error, stop looking for online data.')\n\t\t\t\tbreak\n\t\t\telif data != None:\n\t\t\t\trelease_date = data['release_date']\n\t\t\t\tgenre = data['genre']\n\t\t\t\teditor = data['editor']\n\t\t\t\tresume = data['resume']\n\t\t\t\tnote = data['note']\n\t\t\t\trating = data['rating']\n\t\t\t\timage = data['image']\n\t\t\t\t\n\t\t\t\t# Je procède comme ceci afin d'éviter de perdre des données qui peuvent être déjà présentes\n\t\t\t\tinfos['year'] = release_date or infos['year']\n\t\t\t\tinfos['genre'] = genre or infos['genre']\n\t\t\t\tinfos['editor'] = editor or infos['editor']\n\t\t\t\tinfos['resume'] = resume or infos['resume']\n\t\t\t\tinfos['note'] = note or infos['note']\n\t\t\t\tinfos['rating'] = rating or infos['rating']\n\t\t\t\t\n\t\t\t\t# Récupération de la cover\n\t\t\t\tif image != None:\n\t\t\t\t\tfile = open('covers/' + infos['game_name'] + image['ext'], 'wb')\n\t\t\t\t\t\n\t\t\t\t\tfile.write(image['file'].read())\n\t\t\t\t\tfile.close()\n\t\t\t\t\timage['file'].close()\n\t\t\t\t\n\t\t\t\tinfos['onlineData']['state'] = True\n\t\t\t\tinfos['onlineData'][api_name] = True", "def getAutopilotInfo(self, request, context): \n\n response = droneconnect_pb2.AutopilotInfo(identifier=request.identifier,\n autopilot_firmware_version = str(self.vehicle.version),\n major_version_number = self.vehicle.version.major,\n minor_version_number = self.vehicle.version.minor,\n patch_version_number = self.vehicle.version.patch,\n release_type = self.vehicle.version.release_type(),\n release_version = self.vehicle.version.release_version(),\n stable_release = self.vehicle.version.is_stable())\n \n return response", "def extra_data(self, user, uid, response, details):\n try:\n return self.get_steam_profile(response)\n except:\n return \"\"", "def _commercial_fields(self):\n return ['website']", "def get_com_data(self):\n self.form_url_str()\n if self.__print_url: print self.com_data_full_url\n self.download_json()\n self.get_datalist_fr_json()", "def get_self_info_client(request: Request) -> ReturnDict:\n client_info_serializer = ClientInfoSerializer(request.user.client)\n customer_account = CustomerAccountSerializer(request.user.customeraccount)\n data = client_info_serializer.data\n data['customer_account'] = customer_account.data\n return data", "def get_data():\n return", "def main(self, name):\n\t\tapi_results = [] \n\t\tparams = self.get_search_parameters(name)\n\t\tapi_results.append(self.api_connect(params))\n\t\ttime.sleep(1.0)\n\t\tkey = api_results[0]['businesses'][0]\n\t\tbusiness_information = [key['name'], self.phone_number_organizer(key), key['rating'],\\\n\t\tkey['review_count']]\n\t\treturn business_information", "def get_servicech(self, conf, phone_num):\n\t\tpass", "def get_info(self):\n return \"TODO !\"", "def get_info(self):\n\n (app_key,app_secret,access_type) = self.get_dropbox_app_keys()\n sess = session.DropboxSession(app_key, app_secret, access_type)\n sess.set_token(self.access_token_key,self.access_token_secret)\n\n db_client = client.DropboxClient(sess)\n\n #can throw ErrorResponse\n info = db_client.account_info()\n\n message = info\n\n return message", "def get_data_from_web():\n pass", "def who():\n cleanup()\n return {'available': userlist(), 'eta': data['etas'], 'etd': data['etds'], 'lastlocation': data['lastlocation'], 'ceitloch': ceitloch(), 'reminder': data['reminder']}", "def data(self, user=None):\n return {\n \"provider\": self.BACKEND,\n \"access_token\": self.access_token,\n \"client_id\": self.client_id,\n \"honor_code\": \"true\",\n \"country\": \"US\",\n \"username\": user.username if user else \"test_username\",\n \"name\": user.first_name if user else \"test name\",\n \"email\": user.email if user else \"[email protected]\"\n }", "def config(request):\n\tlusers = User.objects.all()\n\tresp = {'location':{'lon':settings.SNP_DEFAULT_LON, 'lat':settings.SNP_DEFAULT_LAT, 'zoomlevel':settings.SNP_DEFAULT_ZOOMLEVEL},\n\t\t'poi_types':[poi_type[1] for poi_type in settings.SNP_POI_TYPES],\n\t\t'live_users':list(),\n\t\t'advertisement':dict()\n\t\t}\n\n\ttopad_dict = dict()\n\ttry:\n\t\ttopad = Top_advertisement.objects.filter(active__exact=True)[0]\n\t\ttopad_dict = dict()\n\t\ttopad_dict['title'] = topad.title\n\t\ttopad_dict['transparency'] = topad.transparency\n\t\ttopad_dict['url'] = topad.url\n\t\ttopad_dict['image'] = topad.image.url\n\texcept IndexError:\n\t\tpass\n\tresp['advertisement']['top'] = topad_dict\n\n\tsidead_dict = dict()\n\ttry:\n\t\tsidead = Side_advertisement.objects.filter(active__exact=True)[0]\n\t\tsidead_dict['title'] = sidead.title\n\t\tsidead_dict['url'] = sidead.url\n\t\tsidead_dict['image'] = sidead.image.url\n\texcept IndexError:\n\t\tpass\n\tresp['advertisement']['side'] = sidead_dict\n\n\tfor luser in lusers:\n\t\ttracks = list()\n\t\tfor track in Track.objects.filter(user=luser):\n\t\t\t# check if we have some message for this track\n\t\t\tif Message.objects.filter(track=track):\n\t\t\t\ttrack_last_time = Message.objects.filter(track=track).latest().time.isoformat()\n\t\t\t\ttracks.append({'id':track.pk, 'name':track.name,\n\t\t\t\t 'description':track.description,\n\t\t\t\t 'is_active':track.is_active,\n\t\t\t\t 'last_location_time':track_last_time})\n\n\t\t# return only users with at least one track with at least one message\n\t\tif len(tracks) > 0:\n\t\t\tresp['live_users'].append({'id':luser.id, 'username':luser.username,\n\t\t\t\t\t 'first_name':luser.first_name,\n\t\t\t\t\t 'last_name':luser.last_name,\n\t\t\t\t\t 'email':luser.email, 'phone':luser.phone,\n\t\t\t\t\t 'tracks':tracks})\n\treturn HttpResponse(simplejson.dumps(resp), mimetype='application/json')", "def get_infos(self):\n infos = dict()\n infos['dataset'] = self._dataset_name()\n infos['task'] = self.task\n if self.task == 'sep_clean':\n data_license = [librispeech_license]\n else:\n data_license = [librispeech_license, wham_noise_license]\n infos['licenses'] = data_license\n return infos", "def get_data(self):\r\n pass", "def fetch_data(self):", "def account_info(request):\r\n user = request.user\r\n\r\n return _api_response(request, user.safe_data())", "def get_info(self,honeypotids):\n req = {\"type\":\"get_info\",\n \"from\":self.network.mc_id,\n \"to\": honeypotids}\n expect_dict = {\"type\":\"send_info\"}\n msg_list = self.send_receive(req,honeypotids,expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"info\"]\n return answer", "def local_stocks_prices():\n url1 = \"https://api.invertironline.com/token\"\n\n data = {\n \"username\": usuario,\n \"password\": password,\n \"grant_type\": \"password\" \n }\n response = requests.post(url1, data=data)\n if response.status_code == 200:\n content = response.text\n access_key = token_key(content)\n\n url2 = f'https://api.invertironline.com/api/v2/Cotizaciones/Acciones/Merval/argentina'\n datos = requests.get(url2, headers={\n 'Authorization': 'Bearer '+access_key\n })\n datos = json.loads(datos.text)\n datos = datos['titulos']\n datos = clean_assets(datos)\n return datos", "def accounts():", "def get_data(self):\n pass", "def get_data(self):\n pass", "def parse_url(url):\n results = NotifyBase.parse_url(url, verify_host=False)\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n # Store our access code\n access_token = NotifyStreamlabs.unquote(results['host'])\n results['access_token'] = access_token\n\n # call\n if 'call' in results['qsd'] and results['qsd']['call']:\n results['call'] = NotifyStreamlabs.unquote(\n results['qsd']['call'].strip().upper())\n # donation - amount\n if 'amount' in results['qsd'] and results['qsd']['amount']:\n results['amount'] = NotifyStreamlabs.unquote(\n results['qsd']['amount'])\n # donation - currency\n if 'currency' in results['qsd'] and results['qsd']['currency']:\n results['currency'] = NotifyStreamlabs.unquote(\n results['qsd']['currency'].strip().upper())\n # donation - name\n if 'name' in results['qsd'] and results['qsd']['name']:\n results['name'] = NotifyStreamlabs.unquote(\n results['qsd']['name'].strip().upper())\n # donation - identifier\n if 'identifier' in results['qsd'] and results['qsd']['identifier']:\n results['identifier'] = NotifyStreamlabs.unquote(\n results['qsd']['identifier'].strip().upper())\n # alert - alert_type\n if 'alert_type' in results['qsd'] and results['qsd']['alert_type']:\n results['alert_type'] = NotifyStreamlabs.unquote(\n results['qsd']['alert_type'])\n # alert - image_href\n if 'image_href' in results['qsd'] and results['qsd']['image_href']:\n results['image_href'] = NotifyStreamlabs.unquote(\n results['qsd']['image_href'])\n # alert - sound_href\n if 'sound_href' in results['qsd'] and results['qsd']['sound_href']:\n results['sound_href'] = NotifyStreamlabs.unquote(\n results['qsd']['sound_href'].strip().upper())\n # alert - duration\n if 'duration' in results['qsd'] and results['qsd']['duration']:\n results['duration'] = NotifyStreamlabs.unquote(\n results['qsd']['duration'].strip().upper())\n # alert - special_text_color\n if 'special_text_color' in results['qsd'] \\\n and results['qsd']['special_text_color']:\n results['special_text_color'] = NotifyStreamlabs.unquote(\n results['qsd']['special_text_color'].strip().upper())\n\n return results", "def test_client_nationlity_retrieve(self):\n pass", "def runAnalytics():\n #gets OAuth from the API\n analytics = get_Analytics_service()\n #get the object return from the API\n #send that object to print out useful fields\n response = get_report(analytics)\n print_response(response)", "def get_partner_requests(request):\n try:\n partner_requests = PartnerRequest.objects.filter(to_user=request.user)\n except:\n partner_requests = []\n\n return partner_requests" ]
[ "0.62221545", "0.56236756", "0.56236756", "0.5579682", "0.5457257", "0.5414618", "0.5382873", "0.5349557", "0.53429776", "0.5338125", "0.5333075", "0.53239155", "0.5318248", "0.5313936", "0.53106076", "0.5300438", "0.528713", "0.52863556", "0.52863556", "0.528309", "0.5242493", "0.52211165", "0.52005583", "0.5187517", "0.5162123", "0.51608396", "0.51598436", "0.5112094", "0.5104729", "0.5103881", "0.50963", "0.5087667", "0.5084241", "0.50824", "0.5071942", "0.50688374", "0.5066455", "0.50618774", "0.50616634", "0.5058476", "0.5048972", "0.5025594", "0.5021749", "0.50172764", "0.49990255", "0.49986178", "0.49974847", "0.49894542", "0.49894542", "0.49720117", "0.49711353", "0.49646822", "0.49623203", "0.49623203", "0.49489766", "0.49459228", "0.49412006", "0.4941075", "0.49395558", "0.49252185", "0.49140123", "0.49140123", "0.49083754", "0.49073812", "0.48938", "0.4885585", "0.48850584", "0.48831856", "0.48783916", "0.4876319", "0.4874738", "0.48689792", "0.48678026", "0.48664492", "0.4862322", "0.4861348", "0.48602504", "0.48533988", "0.48440075", "0.48426527", "0.4838257", "0.48370284", "0.48308468", "0.4820755", "0.48178896", "0.4817492", "0.48173314", "0.481589", "0.48004037", "0.4788735", "0.47880468", "0.47834393", "0.47713283", "0.47652733", "0.47575593", "0.47575593", "0.4756295", "0.4750324", "0.47493172", "0.4746309" ]
0.5107046
28
Security assessment on a resource response format
def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, assessment_name: Optional[pulumi.Input[str]] = None, metadata: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentMetadataPropertiesArgs']]] = None, partners_data: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentPartnerDataArgs']]] = None, resource_details: Optional[pulumi.Input[Union[pulumi.InputType['AzureResourceDetailsArgs'], pulumi.InputType['OnPremiseResourceDetailsArgs'], pulumi.InputType['OnPremiseSqlResourceDetailsArgs']]]] = None, resource_id: Optional[pulumi.Input[str]] = None, status: Optional[pulumi.Input[pulumi.InputType['AssessmentStatusArgs']]] = None, __props__=None): ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_response(self, response):\n pass", "def get_secured():\n return jsonify({'isSecured': config.requires_auth()})", "def getSecurity(self):\n return self.client.get(self.name +\"/_security\").getBodyData()", "def ExtractSecurityMarksFromResponse(response, args):\n del args\n list_asset_response = list(response)\n assert list_asset_response, (\"Asset or resource does not exist.\")\n assert len(list_asset_response) == 1, (\n \"ListAssetResponse must only return one asset since it is filtered \"\n \"by Asset Name.\")\n for asset_result in list_asset_response:\n return asset_result.asset.securityMarks", "def parse_resource(self, skip_deprecated=False):\n self.resource = parse_resource(self, skip_deprecated=skip_deprecated)\n if self.resource:\n self.authenticate()\n resource = getattr(self.v2, self.resource)\n if is_control_resource(self.resource):\n # control resources are special endpoints that you can only\n # do an HTTP GET to, and which return plain JSON metadata\n # examples are `/api/v2/ping/`, `/api/v2/config/`, etc...\n if self.help:\n self.subparsers[self.resource].print_help()\n raise SystemExit()\n self.method = 'get'\n response = getattr(resource, self.method)()\n else:\n response = self.parse_action(resource)\n\n _filter = self.get_config('filter')\n\n # human format for metrics, settings is special\n if (\n self.resource in ('metrics', 'settings') and\n self.get_config('format') == 'human'\n ):\n response.json = {\n 'count': len(response.json),\n 'results': [\n {'key': k, 'value': v}\n for k, v in response.json.items()\n ]\n }\n _filter = 'key, value'\n\n if (\n self.get_config('format') == 'human' and\n _filter == '.' and\n self.resource in UNIQUENESS_RULES\n ):\n _filter = ', '.join(UNIQUENESS_RULES[self.resource])\n\n formatted = format_response(\n response,\n fmt=self.get_config('format'),\n filter=_filter,\n changed=self.original_action in (\n 'modify', 'create', 'associate', 'disassociate'\n )\n )\n if formatted:\n print(utils.to_str(formatted), file=self.stdout)\n if hasattr(response, 'rc'):\n raise SystemExit(response.rc)\n else:\n self.parser.print_help()", "def check_vulnerability_in_result(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for vulnerability in vulnerabilities:\n assert \"cvss\" in vulnerability\n assert \"is_private\" in vulnerability\n assert \"vendor_cve_ids\" in vulnerability", "def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME", "def validate(self, data):\n # if data['is_private'] and data['contestants']:\n # raise serializers.ValidationError(\"Can not be private and compete for an award.\")\n return data", "def private_resource():\n return create_response(\n status_value=True,\n code=200,\n message=\"You have accessed the private resource.\"\n )", "def auth_failure():\n return \"Request denied due to failed authorization\", 201, {'Content-Type': 'text/html'}", "def testsecurity(self,id=0):\n return 'failed test security'", "def verify(self, response):", "def protected():\n return jsonify(message=f'protected endpoint (allowed user {flask_praetorian.current_user().username})')", "def getResourceDef(url, user, pWd, resourceName):\n \n print(\"getting resource for catalog:-\" + url + \" resource=\" + resourceName +\n ' user=' + user)\n apiURL = url + '/access/1/catalog/resources/' + resourceName\n # print(\"\\turl=\" + apiURL)\n header = {\"Accept\": \"application/json\"} \n tResp = requests.get(apiURL, params={}, headers=header, auth=HTTPBasicAuth(user,pWd))\n print(\"\\tresponse=\" + str(tResp.status_code))\n if tResp.status_code == 200:\n # valid - return the jsom\n return tResp.status_code, json.loads(tResp.text)\n else:\n # not valid\n return tResp.status_code, None", "def view_deny_page():\n response = make_response()\n response.data = ANGRY_ASCII\n response.content_type = \"text/plain\"\n return response\n # return \"YOU SHOULDN'T BE HERE\"", "def _process_resource(cls, resource):\n urn = resource['component_id']\n hrn, type = urn_to_hrn(resource['component_id'])\n\n resource['urn'] = urn\n resource['hrn'] = hrn\n\n resource['network_hrn'] = Xrn(resource['component_id']).authority[0] # network ? XXX\n\n # We also add 'facility' and 'testbed' fields\n resource['facility_name'] = cls.get_resource_facility_name(urn)\n resource['testbed_name'] = cls.get_resource_testbed_name(urn)\n\n if 'exclusive' not in resource:\n resource['exclusive'] = 'true'\n elif resource['exclusive'] is None:\n resource['exclusive'] = 'true'\n else:\n Log.warning(\"EXCLUSIVE = \",resource['exclusive'])\n\n #if 'location' in node:\n # if node['location']:\n # node['latitude'] = node['location']['latitude']\n # node['longitude'] = node['location']['longitude']\n # del node['location']\n #else:\n # if the location is not provided, aproximate it from the city\n t_urn = resource['urn'].split('+')\n city = t_urn[3].split('.')[1]\n if city == 'iii':\n city = 'Institute for Information Industry, Taïwan 106'\n resource['country'] = 'Taiwan'\n else:\n resource['country'] = 'France'\n location = cls.get_location(city)\n if location is not None:\n resource['latitude'] = str(location.latitude)\n resource['longitude'] = str(location.longitude)\n\n return resource", "def check_for_exposed(context):\n json_data = context.response.json()\n if \"exploitable_vulnerabilities_count\" in json_data:\n raise Exception(\"Field exploitable_vulnerabilities_count Exposed in\"\n \" Free user result\")\n if \"vendor_package_link\" in json_data:\n raise Exception(\"Field vendor_package_link has been exposed for free user\")", "def test_text_get_logged_in(self):\n\n resource = Resource(AnonymousTextHandler)\n request = HttpRequest()\n user = User.objects.get(pk=1)\n setattr(request, 'user' , user)\n request.method = 'GET'\n \n response = resource(request, key='text_key_3', emitter_format='json')\n self.assertEquals(200, response.status_code)", "def response_handling(self) -> global___Snippet.SimpleResponseHandling:", "def response_handling(self) -> global___Snippet.SimpleResponseHandling:", "def test_client_risk_assessment_retrieve(self):\n pass", "def mex_validation(resource):\n resource_name = [n for n in list(resource._fields) if getattr(resource,n) != '']\n for name in list(resource_name):\n url = getattr(resource,name)\n log.debug(\"resource: %s\" % url)\n try:\n o = urlparse.urlsplit(url)\n url_path = o.path\n log.debug('url_path :%s' % url_path)\n m = re.match('\\/(?P<service>[\\w-]+)\\/(image[s]?\\/|)(?P<id>[\\w-]+)', url_path)\n if m is not None:\n if m.group('service') == 'image_service' or m.group('service') == 'data_service': #check for data_service\n if 'pixels' not in url_path: #if false requires a redirect\n ident = m.group('id') #seaching a plan image_service or data_service url\n if check_access(ident) is True:\n continue #check next resource\n\n# # Try to route internally through bisque\n# resp = request_internally(url)\n# if resp.status_int < 400:\n# if resp.status_int == 302:\n# #reset the url to the redirected url\n# redirect_url = resp.headers.get('Location')\n# if redirect_url is not None: #did not find the redirect\n# log.debug('Redirect Url: %s' % redirect_url)\n# resource = resource._replace(**{name:redirect_url})\n# continue\n# else:\n# continue\n\n # Try to route externally\n resp = request_externally(url)\n if resp.status_code < 400:\n if resp.status_code == 302:\n #reset the url to the redirected url\n redirect_url = resp.headers.get('Location')\n if redirect_url is not None: #did not find the redirect\n log.debug('Redirect Url: %s' % redirect_url)\n resource = resource._replace(**{name:redirect_url})\n continue\n else:\n continue\n\n raise InvalidResourceError(resource_url=url, error_code=403, error_message='Resource: %s Not Found' % url)\n\n except StandardError:\n log.exception (\"While retrieving URL %s\" %str(resource))\n raise InvalidResourceError(resource_url=url, error_code=403, error_message='Resource: %s Not Found' % url)\n\n return resource", "def _get_nitro_response(self, service, response) :\n\t\ttry :\n\t\t\tresult = service.payload_formatter.string_to_resource(audit_response, response, self.__class__.__name__.replace('_stats',''))\n\t\t\tif(result.errorcode != 0) :\n\t\t\t\tif (result.errorcode == 444) :\n\t\t\t\t\tservice.clear_session(self)\n\t\t\t\tif result.severity :\n\t\t\t\t\tif (result.severity == \"ERROR\") :\n\t\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\t\telse :\n\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\treturn result.audit\n\t\texcept Exception as e :\n\t\t\traise e", "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def get_resource_state():\n output = [f'{\"S. No.\":6}\\t{\"Resource\":50}\\t{\"Health State\":12}\\t{\"Reason\":100}\\n']\n\n for index, resource in enumerate(HEALTH_AGGREGATOR.resource_state):\n output.append(\n f'{index + 1:<6}\\t{resource:<50}\\t'\n f'{\"Healthy\" if HEALTH_AGGREGATOR.resource_state[resource][\"is_healthy\"] else \"Unhealthy\":<12}\\t'\n f'{HEALTH_AGGREGATOR.resource_state[resource][\"reason\"]:<100}\\n'\n )\n\n return Response('\\n'.join(output), 200, mimetype='text/plain')", "def check_vulnerability(self):\n\t\tpass", "def test_security_on_get(self):\n # test the listing url\n product = Product.objects.all()[0]\n url = '/product/xml/'\n response = self.client.get(url)\n self.failUnlessEqual(response.status_code, 401)\n # test the product detail url\n url = '/product/xml/%s/' % product.item_number\n Response = self.client.get(url)\n self.failUnlessEqual(response.status_code, 401)", "def __getattr__(self, attr):\n actual_resource = getattr(self.swagger_client, attr)\n if attr in [\"Authorization\", \"Effects\", \"Identify\", \"Info\",\n \"PanelLayout\", \"State\"]:\n return WrappedResource(actual_resource, attr)\n else:\n return actual_resource", "def main_response(self, data):", "def main_response(self, data):", "def test_detail_is_hacker_permission(self):\n self.user_1.username = 'pythonhacker'\n self.user_1.save()\n\n token = Token.objects.create(user=self.user_1)\n headers = {\n 'HTTP_AUTHORIZATION': 'Token ' + str(token)\n }\n response = self.client.get(\n '/api/products/{}/'.format(self.product_1.id), **headers)\n\n expected = {'detail': 'You do not have permission to perform this action.'}\n self.assertEqual(response.status_code, 403)\n self.assertEqual(response.json(), expected)", "def get_response_serializers(self):\n responses = OrderedDict({\n '400': 'Invalid arguments',\n '401': 'Not authenticated',\n '403': \"You don't have access to do this operation on this company\",\n 'error': ErrorSerializer,\n })\n\n responses.update(super().get_response_serializers())\n\n return responses", "def response_unauthorised():\n\n response = {\n 'status': 'failed',\n 'error': 'Not Authorised'\n }\n\n return response_json(response, status=401)", "def get_student_response(request):\n unique_id = request.session.get('unique_id')\n\n try:\n if request.method != 'GET':\n raise NotImplementedError('Only GET is allowed on this endpoint.')\n\n params = ast.literal_eval(Parameters.objects.filter(key=unique_id)[0].value)\n\n\n bank_id = unquote(params['custom_bank_id'])\n taken_id = unquote(params['taken_id'])\n question_id = unquote(params['question_id'])\n student_req = AssessmentRequests(unique_id,'taaccct_student')\n\n\n response_url = (student_req.url + bank_id + \"/assessmentstaken/\" + taken_id +\n \"/questions/\" + question_id + '/responses/')\n student_response = student_req.get(response_url)\n if student_response.status_code == 200:\n data = student_response.json()\n else:\n raise LookupError\n\n return HttpResponse(json.dumps(data), content_type='application/json')\n except LookupError:\n raise Http404('Bank, assessment taken, question, or response not found.')\n except Exception as ex:\n import logging\n logging.info('get_response exception: ' + str(ex.args[0]))\n raise Http404", "def SecurityPolicyFromFile(input_file, messages, file_format):\n\n if file_format == 'yaml':\n parsed_security_policy = yaml.load(input_file)\n else:\n try:\n parsed_security_policy = json.load(input_file)\n except ValueError as e:\n raise exceptions.BadFileException('Error parsing JSON: {0}'.format(\n six.text_type(e)))\n\n security_policy = messages.SecurityPolicy()\n if 'description' in parsed_security_policy:\n security_policy.description = parsed_security_policy['description']\n if 'fingerprint' in parsed_security_policy:\n security_policy.fingerprint = base64.urlsafe_b64decode(\n parsed_security_policy['fingerprint'].encode('ascii'))\n if 'type' in parsed_security_policy:\n security_policy.type = (\n messages.SecurityPolicy.TypeValueValuesEnum(\n parsed_security_policy['type']))\n if 'cloudArmorConfig' in parsed_security_policy:\n security_policy.cloudArmorConfig = messages.SecurityPolicyCloudArmorConfig(\n enableMl=parsed_security_policy['cloudArmorConfig']['enableMl'])\n if 'adaptiveProtectionConfig' in parsed_security_policy:\n security_policy.adaptiveProtectionConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfig(\n layer7DdosDefenseConfig=messages\n .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(\n enable=parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['enable'])))\n if 'ruleVisibility' in parsed_security_policy['adaptiveProtectionConfig'][\n 'layer7DdosDefenseConfig']:\n security_policy.adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility = (\n messages.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig\n .RuleVisibilityValueValuesEnum(\n parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['ruleVisibility']))\n if 'advancedOptionsConfig' in parsed_security_policy:\n security_policy.advancedOptionsConfig = (\n messages.SecurityPolicyAdvancedOptionsConfig())\n if 'jsonParsing' in parsed_security_policy['advancedOptionsConfig']:\n security_policy.advancedOptionsConfig.jsonParsing = (\n messages.SecurityPolicyAdvancedOptionsConfig\n .JsonParsingValueValuesEnum(\n parsed_security_policy['advancedOptionsConfig']['jsonParsing']))\n if 'logLevel' in parsed_security_policy['advancedOptionsConfig']:\n security_policy.advancedOptionsConfig.logLevel = (\n messages.SecurityPolicyAdvancedOptionsConfig.LogLevelValueValuesEnum(\n parsed_security_policy['advancedOptionsConfig']['logLevel']))\n if 'ddosProtectionConfig' in parsed_security_policy:\n security_policy.ddosProtectionConfig = (\n messages.SecurityPolicyDdosProtectionConfig(\n ddosProtection=messages.SecurityPolicyDdosProtectionConfig\n .DdosProtectionValueValuesEnum(\n parsed_security_policy['ddosProtectionConfig']\n ['ddosProtection'])))\n\n rules = []\n for rule in parsed_security_policy['rules']:\n security_policy_rule = messages.SecurityPolicyRule()\n security_policy_rule.action = rule['action']\n if 'description' in rule:\n security_policy_rule.description = rule['description']\n match = messages.SecurityPolicyRuleMatcher()\n if 'srcIpRanges' in rule['match']:\n match.srcIpRanges = rule['match']['srcIpRanges']\n if 'versionedExpr' in rule['match']:\n match.versionedExpr = ConvertToEnum(rule['match']['versionedExpr'],\n messages)\n if 'expr' in rule['match']:\n match.expr = messages.Expr(expression=rule['match']['expr']['expression'])\n if 'config' in rule['match']:\n if 'srcIpRanges' in rule['match']['config']:\n match.config = messages.SecurityPolicyRuleMatcherConfig(\n srcIpRanges=rule['match']['config']['srcIpRanges'])\n security_policy_rule.match = match\n security_policy_rule.priority = int(rule['priority'])\n if 'preview' in rule:\n security_policy_rule.preview = rule['preview']\n rules.append(security_policy_rule)\n if 'redirectTarget' in rule:\n security_policy_rule.redirectTarget = rule['redirectTarget']\n if 'ruleNumber' in rule:\n security_policy_rule.ruleNumber = int(rule['ruleNumber'])\n if 'redirectOptions' in rule:\n redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rule['redirectOptions']:\n redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rule['redirectOptions']['type']))\n if 'target' in rule['redirectOptions']:\n redirect_options.target = rule['redirectOptions']['target']\n security_policy_rule.redirectOptions = redirect_options\n if 'headerAction' in rule:\n header_action = messages.SecurityPolicyRuleHttpHeaderAction()\n headers_to_add = []\n for header_to_add in rule['headerAction']['requestHeadersToAdds']:\n headers_to_add.append(\n messages.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(\n headerName=header_to_add['headerName'],\n headerValue=header_to_add['headerValue']))\n header_action.requestHeadersToAdds = headers_to_add\n security_policy_rule.headerAction = header_action\n if 'rateLimitOptions' in rule:\n rate_limit_options = rule['rateLimitOptions']\n security_policy_rule.rateLimitOptions = (\n messages.SecurityPolicyRuleRateLimitOptions(\n rateLimitThreshold=messages\n .SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['rateLimitThreshold']['count'],\n intervalSec=rate_limit_options['rateLimitThreshold']\n ['intervalSec']),\n conformAction=rate_limit_options['conformAction'],\n exceedAction=rate_limit_options['exceedAction'],\n banThreshold=messages.SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['banThreshold']['count'],\n intervalSec=rate_limit_options['banThreshold']\n ['intervalSec']),\n banDurationSec=rate_limit_options['banDurationSec']))\n if 'enforceOnKey' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKey = (\n messages.SecurityPolicyRuleRateLimitOptions\n .EnforceOnKeyValueValuesEnum(rate_limit_options['enforceOnKey']))\n if 'enforceOnKeyName' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKeyName = (\n rate_limit_options['enforceOnKeyName'])\n\n security_policy.rules = rules\n\n return security_policy", "def meta():\n\n if current_user.is_anonymous:\n return {\"status\": 401, \"error\": \"Not Authenticated\"}, 401\n\n return {\n \"id\": current_user.id,\n \"name\": current_user.name,\n \"email\": current_user.email,\n \"access\": current_user.access,\n }, 200", "def _process_resource(cls, resource):\n urn = resource['component_id']\n hrn, type = urn_to_hrn(resource['component_id'])\n\n resource['urn'] = urn\n resource['hrn'] = hrn\n\n resource['network_hrn'] = Xrn(resource['component_id']).authority[0] # network ? XXX\n\n # We also add 'facility' and 'testbed' fields\n resource['facility_name'] = cls.get_resource_facility_name(urn)\n resource['testbed_name'] = cls.get_resource_testbed_name(urn)\n\n return resource", "def security(self) -> pulumi.Output['outputs.ServiceSecurity']:\n return pulumi.get(self, \"security\")", "def check_for_private_vul(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for v in vulnerabilities:\n if v[\"is_private\"]:\n return\n raise Exception(\"No private vulnerability found\")", "def SecurityPolicyFromFile(input_file, messages, file_format):\n\n if file_format == 'yaml':\n parsed_security_policy = yaml.load(input_file)\n else:\n try:\n parsed_security_policy = json.load(input_file)\n except ValueError as e:\n raise exceptions.BadFileException('Error parsing JSON: {0}'.format(\n six.text_type(e)))\n\n security_policy = messages.SecurityPolicy()\n if 'description' in parsed_security_policy:\n security_policy.description = parsed_security_policy['description']\n if 'fingerprint' in parsed_security_policy:\n security_policy.fingerprint = base64.urlsafe_b64decode(\n parsed_security_policy['fingerprint'].encode('ascii'))\n if 'type' in parsed_security_policy:\n security_policy.type = (\n messages.SecurityPolicy.TypeValueValuesEnum(\n parsed_security_policy['type']))\n if 'cloudArmorConfig' in parsed_security_policy:\n security_policy.cloudArmorConfig = messages.SecurityPolicyCloudArmorConfig(\n enableMl=parsed_security_policy['cloudArmorConfig']['enableMl'])\n if 'adaptiveProtectionConfig' in parsed_security_policy:\n security_policy.adaptiveProtectionConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfig(\n layer7DdosDefenseConfig=messages\n .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(\n enable=parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['enable']),))\n if 'autoDeployConfig' in parsed_security_policy['adaptiveProtectionConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfigAutoDeployConfig())\n if 'loadThreshold' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.loadThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['loadThreshold'])\n if 'confidenceThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.confidenceThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['confidenceThreshold'])\n if 'impactedBaselineThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.impactedBaselineThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['impactedBaselineThreshold'])\n if 'expirationSec' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.expirationSec = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['expirationSec'])\n if 'ruleVisibility' in parsed_security_policy['adaptiveProtectionConfig'][\n 'layer7DdosDefenseConfig']:\n security_policy.adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility = (\n messages.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig\n .RuleVisibilityValueValuesEnum(\n parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['ruleVisibility']))\n if 'advancedOptionsConfig' in parsed_security_policy:\n advanced_options_config = parsed_security_policy['advancedOptionsConfig']\n security_policy.advancedOptionsConfig = (\n messages.SecurityPolicyAdvancedOptionsConfig())\n if 'jsonParsing' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonParsing = (\n messages.SecurityPolicyAdvancedOptionsConfig\n .JsonParsingValueValuesEnum(\n advanced_options_config['jsonParsing']))\n if 'jsonCustomConfig' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonCustomConfig = (\n messages.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(\n contentTypes=advanced_options_config\n ['jsonCustomConfig'].get('contentTypes', [])))\n if 'logLevel' in advanced_options_config:\n security_policy.advancedOptionsConfig.logLevel = (\n messages.SecurityPolicyAdvancedOptionsConfig.LogLevelValueValuesEnum(\n advanced_options_config['logLevel']))\n if 'userIpRequestHeaders' in advanced_options_config:\n security_policy.advancedOptionsConfig.userIpRequestHeaders = (\n advanced_options_config['userIpRequestHeaders'])\n if 'ddosProtectionConfig' in parsed_security_policy:\n security_policy.ddosProtectionConfig = (\n messages.SecurityPolicyDdosProtectionConfig(\n ddosProtection=messages.SecurityPolicyDdosProtectionConfig\n .DdosProtectionValueValuesEnum(\n parsed_security_policy['ddosProtectionConfig']\n ['ddosProtection'])))\n if 'recaptchaOptionsConfig' in parsed_security_policy:\n security_policy.recaptchaOptionsConfig = (\n messages.SecurityPolicyRecaptchaOptionsConfig())\n if 'redirectSiteKey' in parsed_security_policy['recaptchaOptionsConfig']:\n security_policy.recaptchaOptionsConfig.redirectSiteKey = (\n parsed_security_policy['recaptchaOptionsConfig']['redirectSiteKey'])\n\n if 'userDefinedFields' in parsed_security_policy:\n user_defined_fields = []\n for udf in parsed_security_policy['userDefinedFields']:\n user_defined_field = messages.SecurityPolicyUserDefinedField()\n user_defined_field.name = udf['name']\n user_defined_field.base = (\n messages.SecurityPolicyUserDefinedField.BaseValueValuesEnum(\n udf['base']\n )\n )\n user_defined_field.offset = udf['offset']\n user_defined_field.size = udf['size']\n if 'mask' in udf:\n user_defined_field.mask = udf['mask']\n user_defined_fields.append(user_defined_field)\n security_policy.userDefinedFields = user_defined_fields\n\n rules = []\n for rule in parsed_security_policy['rules']:\n security_policy_rule = messages.SecurityPolicyRule()\n security_policy_rule.action = rule['action']\n if 'description' in rule:\n security_policy_rule.description = rule['description']\n if 'match' in rule:\n match = messages.SecurityPolicyRuleMatcher()\n if 'versionedExpr' in rule['match']:\n match.versionedExpr = ConvertToEnum(\n rule['match']['versionedExpr'], messages\n )\n if 'expr' in rule['match']:\n match.expr = messages.Expr(\n expression=rule['match']['expr']['expression']\n )\n if 'exprOptions' in rule['match']:\n expr_options = messages.SecurityPolicyRuleMatcherExprOptions()\n if 'recaptchaOptions' in rule['match']['exprOptions']:\n expr_options.recaptchaOptions = (\n messages.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions(\n actionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('actionTokenSiteKeys', []),\n sessionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('sessionTokenSiteKeys', []),\n )\n )\n match.exprOptions = expr_options\n if 'config' in rule['match']:\n if 'srcIpRanges' in rule['match']['config']:\n match.config = messages.SecurityPolicyRuleMatcherConfig(\n srcIpRanges=rule['match']['config']['srcIpRanges']\n )\n security_policy_rule.match = match\n if 'networkMatch' in rule:\n network_match = messages.SecurityPolicyRuleNetworkMatcher()\n if 'userDefinedFields' in rule['networkMatch']:\n user_defined_fields = []\n for udf in rule['networkMatch']['userDefinedFields']:\n user_defined_field_match = (\n messages.SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch()\n )\n user_defined_field_match.name = udf['name']\n user_defined_field_match.values = udf['values']\n user_defined_fields.append(user_defined_field_match)\n network_match.userDefinedFields = user_defined_fields\n if 'srcIpRanges' in rule['networkMatch']:\n network_match.srcIpRanges = rule['networkMatch']['srcIpRanges']\n if 'destIpRanges' in rule['networkMatch']:\n network_match.destIpRanges = rule['networkMatch']['destIpRanges']\n if 'ipProtocols' in rule['networkMatch']:\n network_match.ipProtocols = rule['networkMatch']['ipProtocols']\n if 'srcPorts' in rule['networkMatch']:\n network_match.srcPorts = rule['networkMatch']['srcPorts']\n if 'destPorts' in rule['networkMatch']:\n network_match.destPorts = rule['networkMatch']['destPorts']\n if 'srcRegionCodes' in rule['networkMatch']:\n network_match.srcRegionCodes = rule['networkMatch']['srcRegionCodes']\n if 'srcAsns' in rule['networkMatch']:\n network_match.srcAsns = rule['networkMatch']['srcAsns']\n security_policy_rule.networkMatch = network_match\n security_policy_rule.priority = int(rule['priority'])\n if 'preview' in rule:\n security_policy_rule.preview = rule['preview']\n rules.append(security_policy_rule)\n if 'redirectTarget' in rule:\n security_policy_rule.redirectTarget = rule['redirectTarget']\n if 'ruleNumber' in rule:\n security_policy_rule.ruleNumber = int(rule['ruleNumber'])\n if 'redirectOptions' in rule:\n redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rule['redirectOptions']:\n redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rule['redirectOptions']['type']))\n if 'target' in rule['redirectOptions']:\n redirect_options.target = rule['redirectOptions']['target']\n security_policy_rule.redirectOptions = redirect_options\n if 'headerAction' in rule:\n header_action = messages.SecurityPolicyRuleHttpHeaderAction()\n headers_in_rule = rule['headerAction'].get('requestHeadersToAdds', [])\n headers_to_add = []\n for header_to_add in headers_in_rule:\n headers_to_add.append(\n messages.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(\n headerName=header_to_add['headerName'],\n headerValue=header_to_add['headerValue']))\n if headers_to_add:\n header_action.requestHeadersToAdds = headers_to_add\n security_policy_rule.headerAction = header_action\n if 'rateLimitOptions' in rule:\n rate_limit_options = rule['rateLimitOptions']\n security_policy_rule.rateLimitOptions = (\n messages.SecurityPolicyRuleRateLimitOptions(\n rateLimitThreshold=messages\n .SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['rateLimitThreshold']['count'],\n intervalSec=rate_limit_options['rateLimitThreshold']\n ['intervalSec']),\n conformAction=rate_limit_options['conformAction'],\n exceedAction=rate_limit_options['exceedAction']))\n if 'exceedActionRpcStatus' in rate_limit_options:\n exceed_action_rpc_status = (\n messages.SecurityPolicyRuleRateLimitOptionsRpcStatus()\n )\n if 'code' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.code = rate_limit_options[\n 'exceedActionRpcStatus']['code']\n if 'message' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.message = rate_limit_options[\n 'exceedActionRpcStatus']['message']\n security_policy_rule.rateLimitOptions.exceedActionRpcStatus = (\n exceed_action_rpc_status\n )\n if 'exceedRedirectOptions' in rate_limit_options:\n exceed_redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rate_limit_options['exceedRedirectOptions']['type']))\n if 'target' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.target = rate_limit_options[\n 'exceedRedirectOptions']['target']\n security_policy_rule.rateLimitOptions.exceedRedirectOptions = (\n exceed_redirect_options)\n if 'banThreshold' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banThreshold = (\n messages.SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['banThreshold']['count'],\n intervalSec=rate_limit_options['banThreshold']['intervalSec']))\n if 'banDurationSec' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banDurationSec = (\n rate_limit_options['banDurationSec'])\n if 'enforceOnKey' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKey = (\n messages.SecurityPolicyRuleRateLimitOptions\n .EnforceOnKeyValueValuesEnum(rate_limit_options['enforceOnKey']))\n if 'enforceOnKeyName' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKeyName = (\n rate_limit_options['enforceOnKeyName'])\n if 'preconfiguredWafConfig' in rule:\n preconfig_waf_config = messages.SecurityPolicyRulePreconfiguredWafConfig()\n for exclusion in rule['preconfiguredWafConfig'].get('exclusions', []):\n exclusion_to_add = (\n messages.SecurityPolicyRulePreconfiguredWafConfigExclusion())\n if 'targetRuleSet' in exclusion:\n exclusion_to_add.targetRuleSet = exclusion['targetRuleSet']\n for target_rule_id in exclusion.get('targetRuleIds', []):\n exclusion_to_add.targetRuleIds.append(target_rule_id)\n for request_header in exclusion.get('requestHeadersToExclude', []):\n exclusion_to_add.requestHeadersToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_header,\n messages))\n for request_cookie in exclusion.get('requestCookiesToExclude', []):\n exclusion_to_add.requestCookiesToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_cookie,\n messages))\n for request_query_param in exclusion.get('requestQueryParamsToExclude',\n []):\n exclusion_to_add.requestQueryParamsToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_query_param,\n messages))\n for request_uri in exclusion.get('requestUrisToExclude', []):\n exclusion_to_add.requestUrisToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_uri, messages))\n preconfig_waf_config.exclusions.append(exclusion_to_add)\n security_policy_rule.preconfiguredWafConfig = preconfig_waf_config\n\n security_policy.rules = rules\n\n return security_policy", "def supports(self, resource, resourceType = None):\n pass;", "def test_cannot_get_other_attendant_sales(self):\n response = self.client.get(\n '/self.base_url/sales/1',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"You can only view your sales\")\n self.assertEqual(response.status_code,401)", "def verify(self, response):\n\n from requests import Response\n wrapped_response = Response()\n wrapped_response.headers = response.headers\n wrapped_response.status_code = response._status_code\n wrapped_response._content = response.get_data()\n\n return super(FlaskResponse, self).verify(wrapped_response)", "def get_authenticated_denied(self):", "def servicenow_sspm_jsonv2_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.jsonv2\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.3] Instance should enforce basic authentication for JSONv2 requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for JSONv2 requests. Use the 'glide.basicauth.required.jsonv2' property to designate if incoming JSONv2 requests should require basic authorization. Without appropriate authorization configured on the data source JSON requests, an unauthorized user can access sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the Basic auth: JSONv2 requests (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/basic-auth-jsonv2-requests.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.3] Instance should enforce basic authentication for JSONv2 requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} enforces basic authentication for JSONv2 requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the Basic auth: JSONv2 requests (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/basic-auth-jsonv2-requests.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def healthcare():", "def validate(self, response):\n return response[\"status_code\"] == 1", "def lro_handling(self) -> global___Snippet.LroResponseHandling:", "def transform_misses(record):\n \n response = {}\n response[\"datasetId\"] = dict(record).get(\"stableId\") \n response[\"internalId\"] = dict(record).get(\"datasetId\")\n response[\"exists\"] = False\n # response[\"datasetId\"] = '' \n response[\"variantCount\"] = 0\n response[\"callCount\"] = 0\n response[\"sampleCount\"] = 0\n response[\"frequency\"] = 0 \n response[\"numVariants\"] = 0 \n response[\"info\"] = {\"access_type\": dict(record).get(\"accessType\")}\n\n return response", "def risk_assess(s):", "def consolidated_risks(self):\n privilege_escalation_results = {}\n resource_exposure_results = []\n data_exfiltration_results = []\n\n # Get it from each inline policy\n if self.inline_policies:\n for inline_policy in self.inline_policies:\n # Privilege Escalation\n if inline_policy.policy_document.allows_privilege_escalation:\n for entry in inline_policy.policy_document.allows_privilege_escalation:\n if entry[\"type\"] not in privilege_escalation_results.keys():\n privilege_escalation_results[entry[\"type\"]] = entry[\"actions\"]\n # Resource Exposure\n if inline_policy.policy_document.permissions_management_without_constraints:\n for action in inline_policy.policy_document.permissions_management_without_constraints:\n if action not in resource_exposure_results:\n resource_exposure_results.append(action)\n # Data Exfiltration\n if inline_policy.policy_document.allows_data_exfiltration_actions:\n for action in inline_policy.policy_document.allows_data_exfiltration_actions:\n if action not in data_exfiltration_results:\n data_exfiltration_results.append(action)\n\n if self.attached_managed_policies:\n for managed_policy in self.attached_managed_policies:\n # Privilege Escalation\n if managed_policy.policy_document.allows_privilege_escalation:\n for entry in managed_policy.policy_document.allows_privilege_escalation:\n if entry[\"type\"] not in privilege_escalation_results.keys():\n privilege_escalation_results[entry[\"type\"]] = entry[\"actions\"]\n # Resource Exposure\n if managed_policy.policy_document.permissions_management_without_constraints:\n for action in managed_policy.policy_document.permissions_management_without_constraints:\n if action not in resource_exposure_results:\n resource_exposure_results.append(action)\n # Data Exfiltration\n if managed_policy.policy_document.allows_data_exfiltration_actions:\n for action in managed_policy.policy_document.allows_data_exfiltration_actions:\n if action not in data_exfiltration_results:\n data_exfiltration_results.append(action)\n\n # turn it into a list because we want to be able to count the number of results\n these_privilege_escalation_results = []\n\n for key in privilege_escalation_results:\n result = {\n \"type\": key,\n \"actions\": privilege_escalation_results[key]\n }\n these_privilege_escalation_results.append(result)\n\n resource_exposure_results.sort()\n data_exfiltration_results.sort()\n\n results = {\n \"PrivilegeEscalation\": these_privilege_escalation_results,\n \"ResourceExposure\": resource_exposure_results,\n \"DataExfiltration\": data_exfiltration_results,\n }\n return results", "def testGetAccessAllowed(self):\n for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):\n response = self.runGet(user, sequencer=self.hiseq2000.vendor_id)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"sodar_uuid\"], str(self.hiseq2000.sodar_uuid))", "def _extract_resource(resource: Optional[dict],\n allowed_vals: tuple[tuple[str, ...]],\n exc: Type[exception.CinderException],\n resource_name: str,\n props: tuple[str] = ('status',)) -> Optional[str]:\n\n resource_id = None\n if resource:\n for prop, allowed_states in zip(props, allowed_vals):\n if resource[prop] not in allowed_states:\n msg = _(\"Originating %(res)s %(prop)s must be one of \"\n \"'%(vals)s' values\")\n msg = msg % {'res': resource_name,\n 'prop': prop,\n 'vals': ', '.join(allowed_states)}\n # TODO(harlowja): what happens if the status changes after\n # this initial resource status check occurs??? Seems like\n # someone could delete the resource after this check passes\n # but before the volume is officially created?\n raise exc(reason=msg)\n resource_id = resource['id']\n return resource_id", "def test__parse_allow(input_data):\n output = parse_allow(input_data)\n vampytest.assert_instance(output, Permission)\n return output", "def look_for_other_attributes(context):\n json_data = context.response.json()\n assert \"recommended_versions\" in json_data, \"No recommended version found\"\n assert \"registration_link\" in json_data, \"No snyk registration link found\"\n assert \"component_analyses\" in json_data, \"No component analyses data found\"\n assert \"message\" in json_data, \"No message found\"\n assert \"severity\" in json_data, \"No severity found\"\n assert \"known_security_vulnerability_count\" in json_data\n assert \"security_advisory_count\" in json_data", "def test_security_on_post(self):\n url = '/product/xml/'\n response = self.client.post(url,{'description':'my new description'})\n self.failUnlessEqual(response.status_code, 401)", "def assert_response_resource_not_accessible(self, response):\n self.assertEqual(response.status_code, 403)\n self.assertEqual(\n response.json(),\n {\"detail\": \"You do not have permission to perform this action.\"},\n )", "def protection_error_details(self) -> 'outputs.UserFacingErrorResponse':\n return pulumi.get(self, \"protection_error_details\")", "def testGetAccessAllowed(self):\n for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):\n response = self.runGet(user, sequencer=self.hiseq2000.sodar_uuid)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"sodar_uuid\"], str(self.hiseq2000.sodar_uuid))", "def check_for_no_privates(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for v in vulnerabilities:\n assert \"cvss\" in v\n assert \"is_private\" in v\n assert \"vendor_cve_ids\" in v\n if v[\"is_private\"]:\n raise Exception(\"Private vulnerability found\")", "def is_acceptable(self):", "def test_transform_misses(self):\n response = {\"referenceBases\": '', \"alternateBases\": '', \"variantType\": \"\",\n \"frequency\": 0, \"callCount\": 0, \"sampleCount\": 0, \"variantCount\": 0,\n \"start\": 0, \"end\": 0, \"info\": {\"accessType\": \"PUBLIC\"}}\n record = Record(\"PUBLIC\")\n result = transform_misses(record)\n self.assertEqual(result, response)", "def protected():\n message = \"\"\n if flask_praetorian.current_user().roles == \"admin\":\n message = f\"welcome {flask_praetorian.current_user().username}, this is protected endpoint\"\n else:\n message = f'Endpoint not allowed for user {flask_praetorian.current_user().username}'\n return {\"message\": message}", "def process_resource_api(self, resources, resource, api, context):\n pass", "def servicenow_sspm_xml_request_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.xml\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.25] Instance should enforce basic authentication for XML requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for XML requests. Use the 'glide.basicauth.required.xml' property to designate if incoming XML requests should require basic authentication. Without appropriate authorization configured on the incoming XML requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XML request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xml-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.25] Instance should enforce basic authentication for XML requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does enforce basic authentication for XML requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XML request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xml-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def process_request(self, req, resp, resource, params):", "def denied_response(self, req):\n if req.remote_user:\n return HTTPForbidden(request=req)\n else:\n return HTTPUnauthorized(request=req)", "def test_nonstandard_resource(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['resources']['scalar'].append({'name': 'chocolate', 'value': 1.0 })\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertEqual(len(results['warnings']), 1)\n self.assertEqual(results['warnings'][0]['name'], 'NONSTANDARD_RESOURCE')", "def test_kyc_get_validation_legal(self):\n pass", "def obj_res(data, fail_on=['type', 'obj', 'res']):\n errors = []\n if not data.get('type', None) and 'type' in fail_on:\n errors += ['You must provide a role type to use this command.']\n\n # Find the grantee, and remove them from resource_list\n obj = None\n obj_type = None\n for fd in ACTOR_FIELDS:\n if data.get(fd, False):\n if not obj:\n obj = data[fd]\n obj_type = fd\n else:\n errors += ['You can not give a role to a user '\n 'and team at the same time.']\n break\n if not obj and 'obj' in fail_on:\n errors += ['You must specify either user or '\n 'team to use this command.']\n\n # Out of the resource list, pick out available valid resource field\n res = None\n res_type = None\n for fd in RESOURCE_FIELDS:\n if data.get(fd, False):\n if not res:\n res = data[fd]\n res_type = fd\n if res_type == 'target_team':\n res_type = 'team'\n else:\n errors += ['You can only give a role to one '\n 'type of resource at a time.']\n break\n if not res and 'res' in fail_on:\n errors += ['You must specify a target resource '\n 'to use this command.']\n\n if errors:\n raise exc.UsageError(\"\\n\".join(errors))\n return obj, obj_type, res, res_type", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def test_object_permissions(self):\n class ExampleSerializer(serializers.Serializer):\n choice_field = serializers.ChoiceField(['red', 'green', 'blue'])\n integer_field = serializers.IntegerField(max_value=10)\n char_field = serializers.CharField(required=False)\n\n class ExampleView(views.APIView):\n \"\"\"Example view.\"\"\"\n def post(self, request):\n pass\n\n def put(self, request):\n pass\n\n def get_serializer(self):\n return ExampleSerializer()\n\n def get_object(self):\n if self.request.method == 'PUT':\n raise exceptions.PermissionDenied()\n\n view = ExampleView.as_view()\n response = view(request=request)\n assert response.status_code == status.HTTP_200_OK\n assert list(response.data['actions'].keys()) == ['POST']", "def test_attendant_cannot_view_all_sales(self):\n response = self.client.get(\n '/self.base_url/sales',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"You dont have rights to list all sales, contact the system admin\")\n self.assertEqual(response.status_code,401)", "def test_create_namespaced_resource_access_review(self):\n pass", "def resource_forbidden(exc, request):\r\n request.response_status = \"403 Forbidden\"\r\n return {'message': str(exc)}", "def test_author_list_equality_with_valid_authentication(self) -> None:\n\n # Set the Authorization header to the appropriate\n # format as the rest_framework expects using utils.\n self.client.credentials(HTTP_AUTHORIZATION=u.auth_header(\n self.super_author.get_key()\n ))\n\n response = self.client.get(self.url)\n data = u.get_json(response)\n\n self.assertEqual(data, self.serialized_data, msg=data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_access_resource(self):\n test_resource = ResourceTypeName.get()\n role_name = 'test_role'\n resp = self.app.post(f'/v1/resource/{test_resource}', data=json.dumps({'actions': ['tr:action1']}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 201)\n with self.subTest(\"Permission is denied\"):\n resp = self.app.get(f'/v1/resource/{test_resource}', headers=user_header)\n self.assertEqual(resp.status_code, 403)\n\n role_request_body = {\n \"role_id\": role_name,\n \"policy\": {\n 'Statement': [{\n 'Sid': role_name,\n 'Action': [\n \"fus:DeleteResources\",\n \"fus:GetResources\"],\n 'Effect': 'Allow',\n 'Resource': [f\"arn:hca:fus:*:*:resource/{test_resource}\"]\n }]\n }\n }\n resp = self.app.post(f'/v1/role', data=json.dumps(role_request_body), headers=admin_headers)\n self.assertEqual(resp.status_code, 201)\n resp = self.app.put(f\"/v1/user/{service_accounts['user']['client_email']}/roles?action=add\",\n data=json.dumps({'roles': [role_name]}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n\n with self.subTest(\"Permission is granted\"):\n resp = self.app.get(f'/v1/resource/{test_resource}', headers=user_header)\n self.assertEqual(resp.status_code, 200)", "def generate_security_data(self):\n timestamp = int(time.time())\n security_dict = {\n 'content_type': str(self.target_object._meta),\n 'object_pk': str(self.target_object._get_pk_val()),\n 'timestamp': str(timestamp),\n 'security_hash': self.initial_security_hash(timestamp),\n }\n return security_dict", "def test_get_resource_string(self):\n # pylint: disable=protected-access\n student_view_html = self.xblock.student_view().content\n test_result = AdaptiveNumericInput.get_resource_string('view.html')\n test_result = test_result.format(\n self=self,\n attempts_message=self.xblock.get_attempts_message(),\n display_name=self.xblock.display_name,\n feedback_label='',\n feedback_message='',\n hint_message='',\n hintdisplay_class=self.xblock.get_css_hint_button_display(),\n hide_submit_class=self.xblock.get_css_hide_submit(),\n indicator_class=self.xblock.get_css_indicator(),\n indicator_visibility_class=self.xblock.get_css_indicator_hidden(),\n progress_message=self.xblock.get_progress_message(),\n prompt=self.xblock.prompt,\n saved_message='',\n student_answer=self.xblock.student_answer,\n submitted_message='',\n )\n self.assertEquals(student_view_html, test_result)", "def test_authorization(self):\n res = self.get(url=\"/products/1/pricehistory\")\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.UnauthorizedAccess)\n res = self.get(url=\"/products/1/pricehistory\", role=\"user\")\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.UnauthorizedAccess)", "def __str__(self):\n return \"Improperly formatted request: \" + self.source + \", resulting in exception: \" + self.bad", "def test_kyc_get_legal(self):\n pass", "def security_style(self):\n return self._security_style", "def serialize_response(self, response):\n raise NotImplementedError()", "def public_resource():\n return create_response(\n status_value=True,\n code=200,\n message=\"You have access the public resource\"\n )", "def test(self, resource):\n return resource.meta.fields[self.name].present(resource)", "def authenticate():\n resp = {\"status\": 401, \"message\": \"Could not verify your access level for that URL\"}\n return Response(dumps(resp), status=404, mimetype='application/json')", "def security(self):\n return self._security", "def servicenow_sspm_rss_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.rss\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.19] Instance should enforce basic authentication for RSS requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for RSS requests. Use the glide.basicauth.required.rss property to designate if incoming RSS requests should require basic authentication. Without appropriate authorization configured on the incoming RSS requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the RSS request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/rss-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.19] Instance should enforce basic authentication for RSS requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} enforces basic authentication for RSS requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the RSS request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/rss-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def authorization():\n pass", "def test_authorization_is_enforced(self):\n new_client = APIClient()\n response = new_client.get('/posts/', kwargs={'pk': 3}, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def get_security(self):\n return self.cloudant_database.get_security_document()", "def validate(self):\n # Validate all mandatory keys are present\n if not self.mandatory_keys.issubset(set(self.resource)):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] miss a \"\n \"mandatory key. Please check the model.\" % (\n self.__class__.MODEL_TYPE,\n self.id))\n\n # Validate the resource does not contains extra keys\n if not set(self.resource).issubset(self.keys):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] contains \"\n \"extra keys. Please check the model.\" % (\n self.__class__.MODEL_TYPE,\n self.id))\n\n # Validate the resource value type\n for key, value in self.resource.items():\n if not isinstance(value, self.__class__.MODEL[key][0]):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data type (expected: %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][0]))\n # For str type validate the content as according the regex\n if self.__class__.MODEL[key][0] is str:\n if not re.match(self.__class__.MODEL[key][1], value):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data content (expected match : %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][1]))\n # For list type validate the content as according the regex\n if self.__class__.MODEL[key][0] is list:\n if not all([re.match(self.__class__.MODEL[key][1], v)\n for v in value]):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data content (expected match : %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][1]))", "def protectAPI(call, istty, *args, **kwargs):\n\ttry:\n\t\treturn call(*args, **kwargs)\n\texcept HTTPException, e:\n\t\tif istty:\n\t\t\tprint 'Error while calling API function %s: Returned code %d' % (call.__name__, e.retcode)\n\t\t\tif e.response:\n\t\t\t\tif raw_input('View entire web response? (y/n) > ') == 'y':\n\t\t\t\t\tprint e.response\n\t\traise", "def test_get_authorization_status_vendor_v3(self):\n pass", "def process (self, data):\n\n if data['method'] == 'GET':\n code, page = self.get(data['resource'])\n elif data['method'] == 'PUT':\n code, page = self.put(data['resource'], data['body'])\n elif data['method'] == 'POST':\n code, page = self.post(data['resource'], data['body'])\n else:\n code, page = \"405 Method not allowed\",\\\n PAGE_NOT_ALLOWED.format(method=data['method'])\n return (code, page)", "def servicenow_sspm_xsd_request_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.xsd\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.26] Instance should enforce basic authentication for XSD requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for XSD requests. Use the 'glide.basicauth.required.xsd' property to designate if incoming XSD (XML Schema Definition) requests should require basic authentication. Without appropriate authorization configured on the incoming XSD requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XD request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xsd-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.26] Instance should enforce basic authentication for XSD requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does enforce basic authentication for XSD requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XD request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xsd-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def user_should_get_an_ok_response():\n assert web_app.validate_reponse()", "def test_validate_get_single_resource(client):\n response = client.get('/user/1')\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def getYamlInstructions():\n with open('role_file_template.yaml', 'r') as yamlfile:\n output = yamlfile.read()\n if request.headers['Accept'] == 'application/json':\n return output, 200\n else:\n return render_template(\"output.html\", output=output)" ]
[ "0.5803334", "0.57779896", "0.5722596", "0.55923194", "0.5541996", "0.5463912", "0.54613954", "0.54241943", "0.5412314", "0.53474534", "0.53294784", "0.5312035", "0.52927554", "0.5291069", "0.5280915", "0.52559435", "0.5234564", "0.52308244", "0.5222391", "0.5222391", "0.52076674", "0.51712364", "0.5148108", "0.51227427", "0.51227427", "0.51165247", "0.51163125", "0.51089525", "0.5108425", "0.5100958", "0.5100958", "0.50995386", "0.5094954", "0.5094727", "0.5089133", "0.50768286", "0.5072614", "0.5071628", "0.50673914", "0.5066748", "0.50632167", "0.50575846", "0.50523317", "0.50495887", "0.5027393", "0.50208855", "0.50180954", "0.5017579", "0.5012663", "0.4996177", "0.49928442", "0.49917793", "0.49875835", "0.49856582", "0.4980959", "0.4972951", "0.4954714", "0.49451515", "0.49412447", "0.49174273", "0.49145836", "0.49063683", "0.49026877", "0.4900432", "0.48992735", "0.489814", "0.48980278", "0.48943815", "0.48884395", "0.48861337", "0.488269", "0.48825935", "0.4881476", "0.48730123", "0.48661432", "0.4863448", "0.4860011", "0.48596862", "0.4859593", "0.48562685", "0.4851677", "0.48512444", "0.48500192", "0.48487288", "0.48482674", "0.48429248", "0.4842031", "0.48389882", "0.48359397", "0.4833578", "0.4829877", "0.48281583", "0.4827509", "0.48245403", "0.4818478", "0.4812211", "0.48099622", "0.4804576", "0.48039773", "0.48012084", "0.47986868" ]
0.0
-1
Security assessment on a resource response format
def __init__(__self__, resource_name: str, args: AssessmentArgs, opts: Optional[pulumi.ResourceOptions] = None): ...
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def validate_response(self, response):\n pass", "def get_secured():\n return jsonify({'isSecured': config.requires_auth()})", "def getSecurity(self):\n return self.client.get(self.name +\"/_security\").getBodyData()", "def ExtractSecurityMarksFromResponse(response, args):\n del args\n list_asset_response = list(response)\n assert list_asset_response, (\"Asset or resource does not exist.\")\n assert len(list_asset_response) == 1, (\n \"ListAssetResponse must only return one asset since it is filtered \"\n \"by Asset Name.\")\n for asset_result in list_asset_response:\n return asset_result.asset.securityMarks", "def parse_resource(self, skip_deprecated=False):\n self.resource = parse_resource(self, skip_deprecated=skip_deprecated)\n if self.resource:\n self.authenticate()\n resource = getattr(self.v2, self.resource)\n if is_control_resource(self.resource):\n # control resources are special endpoints that you can only\n # do an HTTP GET to, and which return plain JSON metadata\n # examples are `/api/v2/ping/`, `/api/v2/config/`, etc...\n if self.help:\n self.subparsers[self.resource].print_help()\n raise SystemExit()\n self.method = 'get'\n response = getattr(resource, self.method)()\n else:\n response = self.parse_action(resource)\n\n _filter = self.get_config('filter')\n\n # human format for metrics, settings is special\n if (\n self.resource in ('metrics', 'settings') and\n self.get_config('format') == 'human'\n ):\n response.json = {\n 'count': len(response.json),\n 'results': [\n {'key': k, 'value': v}\n for k, v in response.json.items()\n ]\n }\n _filter = 'key, value'\n\n if (\n self.get_config('format') == 'human' and\n _filter == '.' and\n self.resource in UNIQUENESS_RULES\n ):\n _filter = ', '.join(UNIQUENESS_RULES[self.resource])\n\n formatted = format_response(\n response,\n fmt=self.get_config('format'),\n filter=_filter,\n changed=self.original_action in (\n 'modify', 'create', 'associate', 'disassociate'\n )\n )\n if formatted:\n print(utils.to_str(formatted), file=self.stdout)\n if hasattr(response, 'rc'):\n raise SystemExit(response.rc)\n else:\n self.parser.print_help()", "def check_vulnerability_in_result(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for vulnerability in vulnerabilities:\n assert \"cvss\" in vulnerability\n assert \"is_private\" in vulnerability\n assert \"vendor_cve_ids\" in vulnerability", "def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME", "def validate(self, data):\n # if data['is_private'] and data['contestants']:\n # raise serializers.ValidationError(\"Can not be private and compete for an award.\")\n return data", "def private_resource():\n return create_response(\n status_value=True,\n code=200,\n message=\"You have accessed the private resource.\"\n )", "def auth_failure():\n return \"Request denied due to failed authorization\", 201, {'Content-Type': 'text/html'}", "def testsecurity(self,id=0):\n return 'failed test security'", "def verify(self, response):", "def protected():\n return jsonify(message=f'protected endpoint (allowed user {flask_praetorian.current_user().username})')", "def getResourceDef(url, user, pWd, resourceName):\n \n print(\"getting resource for catalog:-\" + url + \" resource=\" + resourceName +\n ' user=' + user)\n apiURL = url + '/access/1/catalog/resources/' + resourceName\n # print(\"\\turl=\" + apiURL)\n header = {\"Accept\": \"application/json\"} \n tResp = requests.get(apiURL, params={}, headers=header, auth=HTTPBasicAuth(user,pWd))\n print(\"\\tresponse=\" + str(tResp.status_code))\n if tResp.status_code == 200:\n # valid - return the jsom\n return tResp.status_code, json.loads(tResp.text)\n else:\n # not valid\n return tResp.status_code, None", "def view_deny_page():\n response = make_response()\n response.data = ANGRY_ASCII\n response.content_type = \"text/plain\"\n return response\n # return \"YOU SHOULDN'T BE HERE\"", "def _process_resource(cls, resource):\n urn = resource['component_id']\n hrn, type = urn_to_hrn(resource['component_id'])\n\n resource['urn'] = urn\n resource['hrn'] = hrn\n\n resource['network_hrn'] = Xrn(resource['component_id']).authority[0] # network ? XXX\n\n # We also add 'facility' and 'testbed' fields\n resource['facility_name'] = cls.get_resource_facility_name(urn)\n resource['testbed_name'] = cls.get_resource_testbed_name(urn)\n\n if 'exclusive' not in resource:\n resource['exclusive'] = 'true'\n elif resource['exclusive'] is None:\n resource['exclusive'] = 'true'\n else:\n Log.warning(\"EXCLUSIVE = \",resource['exclusive'])\n\n #if 'location' in node:\n # if node['location']:\n # node['latitude'] = node['location']['latitude']\n # node['longitude'] = node['location']['longitude']\n # del node['location']\n #else:\n # if the location is not provided, aproximate it from the city\n t_urn = resource['urn'].split('+')\n city = t_urn[3].split('.')[1]\n if city == 'iii':\n city = 'Institute for Information Industry, Taïwan 106'\n resource['country'] = 'Taiwan'\n else:\n resource['country'] = 'France'\n location = cls.get_location(city)\n if location is not None:\n resource['latitude'] = str(location.latitude)\n resource['longitude'] = str(location.longitude)\n\n return resource", "def check_for_exposed(context):\n json_data = context.response.json()\n if \"exploitable_vulnerabilities_count\" in json_data:\n raise Exception(\"Field exploitable_vulnerabilities_count Exposed in\"\n \" Free user result\")\n if \"vendor_package_link\" in json_data:\n raise Exception(\"Field vendor_package_link has been exposed for free user\")", "def test_text_get_logged_in(self):\n\n resource = Resource(AnonymousTextHandler)\n request = HttpRequest()\n user = User.objects.get(pk=1)\n setattr(request, 'user' , user)\n request.method = 'GET'\n \n response = resource(request, key='text_key_3', emitter_format='json')\n self.assertEquals(200, response.status_code)", "def response_handling(self) -> global___Snippet.SimpleResponseHandling:", "def response_handling(self) -> global___Snippet.SimpleResponseHandling:", "def test_client_risk_assessment_retrieve(self):\n pass", "def mex_validation(resource):\n resource_name = [n for n in list(resource._fields) if getattr(resource,n) != '']\n for name in list(resource_name):\n url = getattr(resource,name)\n log.debug(\"resource: %s\" % url)\n try:\n o = urlparse.urlsplit(url)\n url_path = o.path\n log.debug('url_path :%s' % url_path)\n m = re.match('\\/(?P<service>[\\w-]+)\\/(image[s]?\\/|)(?P<id>[\\w-]+)', url_path)\n if m is not None:\n if m.group('service') == 'image_service' or m.group('service') == 'data_service': #check for data_service\n if 'pixels' not in url_path: #if false requires a redirect\n ident = m.group('id') #seaching a plan image_service or data_service url\n if check_access(ident) is True:\n continue #check next resource\n\n# # Try to route internally through bisque\n# resp = request_internally(url)\n# if resp.status_int < 400:\n# if resp.status_int == 302:\n# #reset the url to the redirected url\n# redirect_url = resp.headers.get('Location')\n# if redirect_url is not None: #did not find the redirect\n# log.debug('Redirect Url: %s' % redirect_url)\n# resource = resource._replace(**{name:redirect_url})\n# continue\n# else:\n# continue\n\n # Try to route externally\n resp = request_externally(url)\n if resp.status_code < 400:\n if resp.status_code == 302:\n #reset the url to the redirected url\n redirect_url = resp.headers.get('Location')\n if redirect_url is not None: #did not find the redirect\n log.debug('Redirect Url: %s' % redirect_url)\n resource = resource._replace(**{name:redirect_url})\n continue\n else:\n continue\n\n raise InvalidResourceError(resource_url=url, error_code=403, error_message='Resource: %s Not Found' % url)\n\n except StandardError:\n log.exception (\"While retrieving URL %s\" %str(resource))\n raise InvalidResourceError(resource_url=url, error_code=403, error_message='Resource: %s Not Found' % url)\n\n return resource", "def _get_nitro_response(self, service, response) :\n\t\ttry :\n\t\t\tresult = service.payload_formatter.string_to_resource(audit_response, response, self.__class__.__name__.replace('_stats',''))\n\t\t\tif(result.errorcode != 0) :\n\t\t\t\tif (result.errorcode == 444) :\n\t\t\t\t\tservice.clear_session(self)\n\t\t\t\tif result.severity :\n\t\t\t\t\tif (result.severity == \"ERROR\") :\n\t\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\t\telse :\n\t\t\t\t\traise nitro_exception(result.errorcode, str(result.message), str(result.severity))\n\t\t\treturn result.audit\n\t\texcept Exception as e :\n\t\t\traise e", "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def resource_details(self) -> pulumi.Output[Any]:\n return pulumi.get(self, \"resource_details\")", "def get_resource_state():\n output = [f'{\"S. No.\":6}\\t{\"Resource\":50}\\t{\"Health State\":12}\\t{\"Reason\":100}\\n']\n\n for index, resource in enumerate(HEALTH_AGGREGATOR.resource_state):\n output.append(\n f'{index + 1:<6}\\t{resource:<50}\\t'\n f'{\"Healthy\" if HEALTH_AGGREGATOR.resource_state[resource][\"is_healthy\"] else \"Unhealthy\":<12}\\t'\n f'{HEALTH_AGGREGATOR.resource_state[resource][\"reason\"]:<100}\\n'\n )\n\n return Response('\\n'.join(output), 200, mimetype='text/plain')", "def check_vulnerability(self):\n\t\tpass", "def test_security_on_get(self):\n # test the listing url\n product = Product.objects.all()[0]\n url = '/product/xml/'\n response = self.client.get(url)\n self.failUnlessEqual(response.status_code, 401)\n # test the product detail url\n url = '/product/xml/%s/' % product.item_number\n Response = self.client.get(url)\n self.failUnlessEqual(response.status_code, 401)", "def __getattr__(self, attr):\n actual_resource = getattr(self.swagger_client, attr)\n if attr in [\"Authorization\", \"Effects\", \"Identify\", \"Info\",\n \"PanelLayout\", \"State\"]:\n return WrappedResource(actual_resource, attr)\n else:\n return actual_resource", "def main_response(self, data):", "def main_response(self, data):", "def test_detail_is_hacker_permission(self):\n self.user_1.username = 'pythonhacker'\n self.user_1.save()\n\n token = Token.objects.create(user=self.user_1)\n headers = {\n 'HTTP_AUTHORIZATION': 'Token ' + str(token)\n }\n response = self.client.get(\n '/api/products/{}/'.format(self.product_1.id), **headers)\n\n expected = {'detail': 'You do not have permission to perform this action.'}\n self.assertEqual(response.status_code, 403)\n self.assertEqual(response.json(), expected)", "def get_response_serializers(self):\n responses = OrderedDict({\n '400': 'Invalid arguments',\n '401': 'Not authenticated',\n '403': \"You don't have access to do this operation on this company\",\n 'error': ErrorSerializer,\n })\n\n responses.update(super().get_response_serializers())\n\n return responses", "def response_unauthorised():\n\n response = {\n 'status': 'failed',\n 'error': 'Not Authorised'\n }\n\n return response_json(response, status=401)", "def get_student_response(request):\n unique_id = request.session.get('unique_id')\n\n try:\n if request.method != 'GET':\n raise NotImplementedError('Only GET is allowed on this endpoint.')\n\n params = ast.literal_eval(Parameters.objects.filter(key=unique_id)[0].value)\n\n\n bank_id = unquote(params['custom_bank_id'])\n taken_id = unquote(params['taken_id'])\n question_id = unquote(params['question_id'])\n student_req = AssessmentRequests(unique_id,'taaccct_student')\n\n\n response_url = (student_req.url + bank_id + \"/assessmentstaken/\" + taken_id +\n \"/questions/\" + question_id + '/responses/')\n student_response = student_req.get(response_url)\n if student_response.status_code == 200:\n data = student_response.json()\n else:\n raise LookupError\n\n return HttpResponse(json.dumps(data), content_type='application/json')\n except LookupError:\n raise Http404('Bank, assessment taken, question, or response not found.')\n except Exception as ex:\n import logging\n logging.info('get_response exception: ' + str(ex.args[0]))\n raise Http404", "def SecurityPolicyFromFile(input_file, messages, file_format):\n\n if file_format == 'yaml':\n parsed_security_policy = yaml.load(input_file)\n else:\n try:\n parsed_security_policy = json.load(input_file)\n except ValueError as e:\n raise exceptions.BadFileException('Error parsing JSON: {0}'.format(\n six.text_type(e)))\n\n security_policy = messages.SecurityPolicy()\n if 'description' in parsed_security_policy:\n security_policy.description = parsed_security_policy['description']\n if 'fingerprint' in parsed_security_policy:\n security_policy.fingerprint = base64.urlsafe_b64decode(\n parsed_security_policy['fingerprint'].encode('ascii'))\n if 'type' in parsed_security_policy:\n security_policy.type = (\n messages.SecurityPolicy.TypeValueValuesEnum(\n parsed_security_policy['type']))\n if 'cloudArmorConfig' in parsed_security_policy:\n security_policy.cloudArmorConfig = messages.SecurityPolicyCloudArmorConfig(\n enableMl=parsed_security_policy['cloudArmorConfig']['enableMl'])\n if 'adaptiveProtectionConfig' in parsed_security_policy:\n security_policy.adaptiveProtectionConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfig(\n layer7DdosDefenseConfig=messages\n .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(\n enable=parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['enable'])))\n if 'ruleVisibility' in parsed_security_policy['adaptiveProtectionConfig'][\n 'layer7DdosDefenseConfig']:\n security_policy.adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility = (\n messages.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig\n .RuleVisibilityValueValuesEnum(\n parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['ruleVisibility']))\n if 'advancedOptionsConfig' in parsed_security_policy:\n security_policy.advancedOptionsConfig = (\n messages.SecurityPolicyAdvancedOptionsConfig())\n if 'jsonParsing' in parsed_security_policy['advancedOptionsConfig']:\n security_policy.advancedOptionsConfig.jsonParsing = (\n messages.SecurityPolicyAdvancedOptionsConfig\n .JsonParsingValueValuesEnum(\n parsed_security_policy['advancedOptionsConfig']['jsonParsing']))\n if 'logLevel' in parsed_security_policy['advancedOptionsConfig']:\n security_policy.advancedOptionsConfig.logLevel = (\n messages.SecurityPolicyAdvancedOptionsConfig.LogLevelValueValuesEnum(\n parsed_security_policy['advancedOptionsConfig']['logLevel']))\n if 'ddosProtectionConfig' in parsed_security_policy:\n security_policy.ddosProtectionConfig = (\n messages.SecurityPolicyDdosProtectionConfig(\n ddosProtection=messages.SecurityPolicyDdosProtectionConfig\n .DdosProtectionValueValuesEnum(\n parsed_security_policy['ddosProtectionConfig']\n ['ddosProtection'])))\n\n rules = []\n for rule in parsed_security_policy['rules']:\n security_policy_rule = messages.SecurityPolicyRule()\n security_policy_rule.action = rule['action']\n if 'description' in rule:\n security_policy_rule.description = rule['description']\n match = messages.SecurityPolicyRuleMatcher()\n if 'srcIpRanges' in rule['match']:\n match.srcIpRanges = rule['match']['srcIpRanges']\n if 'versionedExpr' in rule['match']:\n match.versionedExpr = ConvertToEnum(rule['match']['versionedExpr'],\n messages)\n if 'expr' in rule['match']:\n match.expr = messages.Expr(expression=rule['match']['expr']['expression'])\n if 'config' in rule['match']:\n if 'srcIpRanges' in rule['match']['config']:\n match.config = messages.SecurityPolicyRuleMatcherConfig(\n srcIpRanges=rule['match']['config']['srcIpRanges'])\n security_policy_rule.match = match\n security_policy_rule.priority = int(rule['priority'])\n if 'preview' in rule:\n security_policy_rule.preview = rule['preview']\n rules.append(security_policy_rule)\n if 'redirectTarget' in rule:\n security_policy_rule.redirectTarget = rule['redirectTarget']\n if 'ruleNumber' in rule:\n security_policy_rule.ruleNumber = int(rule['ruleNumber'])\n if 'redirectOptions' in rule:\n redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rule['redirectOptions']:\n redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rule['redirectOptions']['type']))\n if 'target' in rule['redirectOptions']:\n redirect_options.target = rule['redirectOptions']['target']\n security_policy_rule.redirectOptions = redirect_options\n if 'headerAction' in rule:\n header_action = messages.SecurityPolicyRuleHttpHeaderAction()\n headers_to_add = []\n for header_to_add in rule['headerAction']['requestHeadersToAdds']:\n headers_to_add.append(\n messages.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(\n headerName=header_to_add['headerName'],\n headerValue=header_to_add['headerValue']))\n header_action.requestHeadersToAdds = headers_to_add\n security_policy_rule.headerAction = header_action\n if 'rateLimitOptions' in rule:\n rate_limit_options = rule['rateLimitOptions']\n security_policy_rule.rateLimitOptions = (\n messages.SecurityPolicyRuleRateLimitOptions(\n rateLimitThreshold=messages\n .SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['rateLimitThreshold']['count'],\n intervalSec=rate_limit_options['rateLimitThreshold']\n ['intervalSec']),\n conformAction=rate_limit_options['conformAction'],\n exceedAction=rate_limit_options['exceedAction'],\n banThreshold=messages.SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['banThreshold']['count'],\n intervalSec=rate_limit_options['banThreshold']\n ['intervalSec']),\n banDurationSec=rate_limit_options['banDurationSec']))\n if 'enforceOnKey' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKey = (\n messages.SecurityPolicyRuleRateLimitOptions\n .EnforceOnKeyValueValuesEnum(rate_limit_options['enforceOnKey']))\n if 'enforceOnKeyName' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKeyName = (\n rate_limit_options['enforceOnKeyName'])\n\n security_policy.rules = rules\n\n return security_policy", "def meta():\n\n if current_user.is_anonymous:\n return {\"status\": 401, \"error\": \"Not Authenticated\"}, 401\n\n return {\n \"id\": current_user.id,\n \"name\": current_user.name,\n \"email\": current_user.email,\n \"access\": current_user.access,\n }, 200", "def _process_resource(cls, resource):\n urn = resource['component_id']\n hrn, type = urn_to_hrn(resource['component_id'])\n\n resource['urn'] = urn\n resource['hrn'] = hrn\n\n resource['network_hrn'] = Xrn(resource['component_id']).authority[0] # network ? XXX\n\n # We also add 'facility' and 'testbed' fields\n resource['facility_name'] = cls.get_resource_facility_name(urn)\n resource['testbed_name'] = cls.get_resource_testbed_name(urn)\n\n return resource", "def security(self) -> pulumi.Output['outputs.ServiceSecurity']:\n return pulumi.get(self, \"security\")", "def check_for_private_vul(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for v in vulnerabilities:\n if v[\"is_private\"]:\n return\n raise Exception(\"No private vulnerability found\")", "def SecurityPolicyFromFile(input_file, messages, file_format):\n\n if file_format == 'yaml':\n parsed_security_policy = yaml.load(input_file)\n else:\n try:\n parsed_security_policy = json.load(input_file)\n except ValueError as e:\n raise exceptions.BadFileException('Error parsing JSON: {0}'.format(\n six.text_type(e)))\n\n security_policy = messages.SecurityPolicy()\n if 'description' in parsed_security_policy:\n security_policy.description = parsed_security_policy['description']\n if 'fingerprint' in parsed_security_policy:\n security_policy.fingerprint = base64.urlsafe_b64decode(\n parsed_security_policy['fingerprint'].encode('ascii'))\n if 'type' in parsed_security_policy:\n security_policy.type = (\n messages.SecurityPolicy.TypeValueValuesEnum(\n parsed_security_policy['type']))\n if 'cloudArmorConfig' in parsed_security_policy:\n security_policy.cloudArmorConfig = messages.SecurityPolicyCloudArmorConfig(\n enableMl=parsed_security_policy['cloudArmorConfig']['enableMl'])\n if 'adaptiveProtectionConfig' in parsed_security_policy:\n security_policy.adaptiveProtectionConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfig(\n layer7DdosDefenseConfig=messages\n .SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(\n enable=parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['enable']),))\n if 'autoDeployConfig' in parsed_security_policy['adaptiveProtectionConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig = (\n messages.SecurityPolicyAdaptiveProtectionConfigAutoDeployConfig())\n if 'loadThreshold' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.loadThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['loadThreshold'])\n if 'confidenceThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.confidenceThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['confidenceThreshold'])\n if 'impactedBaselineThreshold' in parsed_security_policy[\n 'adaptiveProtectionConfig']['autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.impactedBaselineThreshold = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['impactedBaselineThreshold'])\n if 'expirationSec' in parsed_security_policy['adaptiveProtectionConfig'][\n 'autoDeployConfig']:\n security_policy.adaptiveProtectionConfig.autoDeployConfig.expirationSec = (\n parsed_security_policy['adaptiveProtectionConfig']\n ['autoDeployConfig']['expirationSec'])\n if 'ruleVisibility' in parsed_security_policy['adaptiveProtectionConfig'][\n 'layer7DdosDefenseConfig']:\n security_policy.adaptiveProtectionConfig.layer7DdosDefenseConfig.ruleVisibility = (\n messages.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig\n .RuleVisibilityValueValuesEnum(\n parsed_security_policy['adaptiveProtectionConfig']\n ['layer7DdosDefenseConfig']['ruleVisibility']))\n if 'advancedOptionsConfig' in parsed_security_policy:\n advanced_options_config = parsed_security_policy['advancedOptionsConfig']\n security_policy.advancedOptionsConfig = (\n messages.SecurityPolicyAdvancedOptionsConfig())\n if 'jsonParsing' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonParsing = (\n messages.SecurityPolicyAdvancedOptionsConfig\n .JsonParsingValueValuesEnum(\n advanced_options_config['jsonParsing']))\n if 'jsonCustomConfig' in advanced_options_config:\n security_policy.advancedOptionsConfig.jsonCustomConfig = (\n messages.SecurityPolicyAdvancedOptionsConfigJsonCustomConfig(\n contentTypes=advanced_options_config\n ['jsonCustomConfig'].get('contentTypes', [])))\n if 'logLevel' in advanced_options_config:\n security_policy.advancedOptionsConfig.logLevel = (\n messages.SecurityPolicyAdvancedOptionsConfig.LogLevelValueValuesEnum(\n advanced_options_config['logLevel']))\n if 'userIpRequestHeaders' in advanced_options_config:\n security_policy.advancedOptionsConfig.userIpRequestHeaders = (\n advanced_options_config['userIpRequestHeaders'])\n if 'ddosProtectionConfig' in parsed_security_policy:\n security_policy.ddosProtectionConfig = (\n messages.SecurityPolicyDdosProtectionConfig(\n ddosProtection=messages.SecurityPolicyDdosProtectionConfig\n .DdosProtectionValueValuesEnum(\n parsed_security_policy['ddosProtectionConfig']\n ['ddosProtection'])))\n if 'recaptchaOptionsConfig' in parsed_security_policy:\n security_policy.recaptchaOptionsConfig = (\n messages.SecurityPolicyRecaptchaOptionsConfig())\n if 'redirectSiteKey' in parsed_security_policy['recaptchaOptionsConfig']:\n security_policy.recaptchaOptionsConfig.redirectSiteKey = (\n parsed_security_policy['recaptchaOptionsConfig']['redirectSiteKey'])\n\n if 'userDefinedFields' in parsed_security_policy:\n user_defined_fields = []\n for udf in parsed_security_policy['userDefinedFields']:\n user_defined_field = messages.SecurityPolicyUserDefinedField()\n user_defined_field.name = udf['name']\n user_defined_field.base = (\n messages.SecurityPolicyUserDefinedField.BaseValueValuesEnum(\n udf['base']\n )\n )\n user_defined_field.offset = udf['offset']\n user_defined_field.size = udf['size']\n if 'mask' in udf:\n user_defined_field.mask = udf['mask']\n user_defined_fields.append(user_defined_field)\n security_policy.userDefinedFields = user_defined_fields\n\n rules = []\n for rule in parsed_security_policy['rules']:\n security_policy_rule = messages.SecurityPolicyRule()\n security_policy_rule.action = rule['action']\n if 'description' in rule:\n security_policy_rule.description = rule['description']\n if 'match' in rule:\n match = messages.SecurityPolicyRuleMatcher()\n if 'versionedExpr' in rule['match']:\n match.versionedExpr = ConvertToEnum(\n rule['match']['versionedExpr'], messages\n )\n if 'expr' in rule['match']:\n match.expr = messages.Expr(\n expression=rule['match']['expr']['expression']\n )\n if 'exprOptions' in rule['match']:\n expr_options = messages.SecurityPolicyRuleMatcherExprOptions()\n if 'recaptchaOptions' in rule['match']['exprOptions']:\n expr_options.recaptchaOptions = (\n messages.SecurityPolicyRuleMatcherExprOptionsRecaptchaOptions(\n actionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('actionTokenSiteKeys', []),\n sessionTokenSiteKeys=rule['match']['exprOptions'][\n 'recaptchaOptions'\n ].get('sessionTokenSiteKeys', []),\n )\n )\n match.exprOptions = expr_options\n if 'config' in rule['match']:\n if 'srcIpRanges' in rule['match']['config']:\n match.config = messages.SecurityPolicyRuleMatcherConfig(\n srcIpRanges=rule['match']['config']['srcIpRanges']\n )\n security_policy_rule.match = match\n if 'networkMatch' in rule:\n network_match = messages.SecurityPolicyRuleNetworkMatcher()\n if 'userDefinedFields' in rule['networkMatch']:\n user_defined_fields = []\n for udf in rule['networkMatch']['userDefinedFields']:\n user_defined_field_match = (\n messages.SecurityPolicyRuleNetworkMatcherUserDefinedFieldMatch()\n )\n user_defined_field_match.name = udf['name']\n user_defined_field_match.values = udf['values']\n user_defined_fields.append(user_defined_field_match)\n network_match.userDefinedFields = user_defined_fields\n if 'srcIpRanges' in rule['networkMatch']:\n network_match.srcIpRanges = rule['networkMatch']['srcIpRanges']\n if 'destIpRanges' in rule['networkMatch']:\n network_match.destIpRanges = rule['networkMatch']['destIpRanges']\n if 'ipProtocols' in rule['networkMatch']:\n network_match.ipProtocols = rule['networkMatch']['ipProtocols']\n if 'srcPorts' in rule['networkMatch']:\n network_match.srcPorts = rule['networkMatch']['srcPorts']\n if 'destPorts' in rule['networkMatch']:\n network_match.destPorts = rule['networkMatch']['destPorts']\n if 'srcRegionCodes' in rule['networkMatch']:\n network_match.srcRegionCodes = rule['networkMatch']['srcRegionCodes']\n if 'srcAsns' in rule['networkMatch']:\n network_match.srcAsns = rule['networkMatch']['srcAsns']\n security_policy_rule.networkMatch = network_match\n security_policy_rule.priority = int(rule['priority'])\n if 'preview' in rule:\n security_policy_rule.preview = rule['preview']\n rules.append(security_policy_rule)\n if 'redirectTarget' in rule:\n security_policy_rule.redirectTarget = rule['redirectTarget']\n if 'ruleNumber' in rule:\n security_policy_rule.ruleNumber = int(rule['ruleNumber'])\n if 'redirectOptions' in rule:\n redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rule['redirectOptions']:\n redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rule['redirectOptions']['type']))\n if 'target' in rule['redirectOptions']:\n redirect_options.target = rule['redirectOptions']['target']\n security_policy_rule.redirectOptions = redirect_options\n if 'headerAction' in rule:\n header_action = messages.SecurityPolicyRuleHttpHeaderAction()\n headers_in_rule = rule['headerAction'].get('requestHeadersToAdds', [])\n headers_to_add = []\n for header_to_add in headers_in_rule:\n headers_to_add.append(\n messages.SecurityPolicyRuleHttpHeaderActionHttpHeaderOption(\n headerName=header_to_add['headerName'],\n headerValue=header_to_add['headerValue']))\n if headers_to_add:\n header_action.requestHeadersToAdds = headers_to_add\n security_policy_rule.headerAction = header_action\n if 'rateLimitOptions' in rule:\n rate_limit_options = rule['rateLimitOptions']\n security_policy_rule.rateLimitOptions = (\n messages.SecurityPolicyRuleRateLimitOptions(\n rateLimitThreshold=messages\n .SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['rateLimitThreshold']['count'],\n intervalSec=rate_limit_options['rateLimitThreshold']\n ['intervalSec']),\n conformAction=rate_limit_options['conformAction'],\n exceedAction=rate_limit_options['exceedAction']))\n if 'exceedActionRpcStatus' in rate_limit_options:\n exceed_action_rpc_status = (\n messages.SecurityPolicyRuleRateLimitOptionsRpcStatus()\n )\n if 'code' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.code = rate_limit_options[\n 'exceedActionRpcStatus']['code']\n if 'message' in rate_limit_options['exceedActionRpcStatus']:\n exceed_action_rpc_status.message = rate_limit_options[\n 'exceedActionRpcStatus']['message']\n security_policy_rule.rateLimitOptions.exceedActionRpcStatus = (\n exceed_action_rpc_status\n )\n if 'exceedRedirectOptions' in rate_limit_options:\n exceed_redirect_options = messages.SecurityPolicyRuleRedirectOptions()\n if 'type' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.type = (\n messages.SecurityPolicyRuleRedirectOptions.TypeValueValuesEnum(\n rate_limit_options['exceedRedirectOptions']['type']))\n if 'target' in rate_limit_options['exceedRedirectOptions']:\n exceed_redirect_options.target = rate_limit_options[\n 'exceedRedirectOptions']['target']\n security_policy_rule.rateLimitOptions.exceedRedirectOptions = (\n exceed_redirect_options)\n if 'banThreshold' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banThreshold = (\n messages.SecurityPolicyRuleRateLimitOptionsThreshold(\n count=rate_limit_options['banThreshold']['count'],\n intervalSec=rate_limit_options['banThreshold']['intervalSec']))\n if 'banDurationSec' in rate_limit_options:\n security_policy_rule.rateLimitOptions.banDurationSec = (\n rate_limit_options['banDurationSec'])\n if 'enforceOnKey' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKey = (\n messages.SecurityPolicyRuleRateLimitOptions\n .EnforceOnKeyValueValuesEnum(rate_limit_options['enforceOnKey']))\n if 'enforceOnKeyName' in rate_limit_options:\n security_policy_rule.rateLimitOptions.enforceOnKeyName = (\n rate_limit_options['enforceOnKeyName'])\n if 'preconfiguredWafConfig' in rule:\n preconfig_waf_config = messages.SecurityPolicyRulePreconfiguredWafConfig()\n for exclusion in rule['preconfiguredWafConfig'].get('exclusions', []):\n exclusion_to_add = (\n messages.SecurityPolicyRulePreconfiguredWafConfigExclusion())\n if 'targetRuleSet' in exclusion:\n exclusion_to_add.targetRuleSet = exclusion['targetRuleSet']\n for target_rule_id in exclusion.get('targetRuleIds', []):\n exclusion_to_add.targetRuleIds.append(target_rule_id)\n for request_header in exclusion.get('requestHeadersToExclude', []):\n exclusion_to_add.requestHeadersToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_header,\n messages))\n for request_cookie in exclusion.get('requestCookiesToExclude', []):\n exclusion_to_add.requestCookiesToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_cookie,\n messages))\n for request_query_param in exclusion.get('requestQueryParamsToExclude',\n []):\n exclusion_to_add.requestQueryParamsToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_query_param,\n messages))\n for request_uri in exclusion.get('requestUrisToExclude', []):\n exclusion_to_add.requestUrisToExclude.append(\n ConvertPreconfigWafExclusionRequestField(request_uri, messages))\n preconfig_waf_config.exclusions.append(exclusion_to_add)\n security_policy_rule.preconfiguredWafConfig = preconfig_waf_config\n\n security_policy.rules = rules\n\n return security_policy", "def supports(self, resource, resourceType = None):\n pass;", "def test_cannot_get_other_attendant_sales(self):\n response = self.client.get(\n '/self.base_url/sales/1',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"You can only view your sales\")\n self.assertEqual(response.status_code,401)", "def verify(self, response):\n\n from requests import Response\n wrapped_response = Response()\n wrapped_response.headers = response.headers\n wrapped_response.status_code = response._status_code\n wrapped_response._content = response.get_data()\n\n return super(FlaskResponse, self).verify(wrapped_response)", "def get_authenticated_denied(self):", "def servicenow_sspm_jsonv2_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.jsonv2\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"HIGH\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.3] Instance should enforce basic authentication for JSONv2 requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for JSONv2 requests. Use the 'glide.basicauth.required.jsonv2' property to designate if incoming JSONv2 requests should require basic authorization. Without appropriate authorization configured on the data source JSON requests, an unauthorized user can access sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the Basic auth: JSONv2 requests (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/basic-auth-jsonv2-requests.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.3] Instance should enforce basic authentication for JSONv2 requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} enforces basic authentication for JSONv2 requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the Basic auth: JSONv2 requests (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/basic-auth-jsonv2-requests.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def healthcare():", "def validate(self, response):\n return response[\"status_code\"] == 1", "def lro_handling(self) -> global___Snippet.LroResponseHandling:", "def transform_misses(record):\n \n response = {}\n response[\"datasetId\"] = dict(record).get(\"stableId\") \n response[\"internalId\"] = dict(record).get(\"datasetId\")\n response[\"exists\"] = False\n # response[\"datasetId\"] = '' \n response[\"variantCount\"] = 0\n response[\"callCount\"] = 0\n response[\"sampleCount\"] = 0\n response[\"frequency\"] = 0 \n response[\"numVariants\"] = 0 \n response[\"info\"] = {\"access_type\": dict(record).get(\"accessType\")}\n\n return response", "def risk_assess(s):", "def consolidated_risks(self):\n privilege_escalation_results = {}\n resource_exposure_results = []\n data_exfiltration_results = []\n\n # Get it from each inline policy\n if self.inline_policies:\n for inline_policy in self.inline_policies:\n # Privilege Escalation\n if inline_policy.policy_document.allows_privilege_escalation:\n for entry in inline_policy.policy_document.allows_privilege_escalation:\n if entry[\"type\"] not in privilege_escalation_results.keys():\n privilege_escalation_results[entry[\"type\"]] = entry[\"actions\"]\n # Resource Exposure\n if inline_policy.policy_document.permissions_management_without_constraints:\n for action in inline_policy.policy_document.permissions_management_without_constraints:\n if action not in resource_exposure_results:\n resource_exposure_results.append(action)\n # Data Exfiltration\n if inline_policy.policy_document.allows_data_exfiltration_actions:\n for action in inline_policy.policy_document.allows_data_exfiltration_actions:\n if action not in data_exfiltration_results:\n data_exfiltration_results.append(action)\n\n if self.attached_managed_policies:\n for managed_policy in self.attached_managed_policies:\n # Privilege Escalation\n if managed_policy.policy_document.allows_privilege_escalation:\n for entry in managed_policy.policy_document.allows_privilege_escalation:\n if entry[\"type\"] not in privilege_escalation_results.keys():\n privilege_escalation_results[entry[\"type\"]] = entry[\"actions\"]\n # Resource Exposure\n if managed_policy.policy_document.permissions_management_without_constraints:\n for action in managed_policy.policy_document.permissions_management_without_constraints:\n if action not in resource_exposure_results:\n resource_exposure_results.append(action)\n # Data Exfiltration\n if managed_policy.policy_document.allows_data_exfiltration_actions:\n for action in managed_policy.policy_document.allows_data_exfiltration_actions:\n if action not in data_exfiltration_results:\n data_exfiltration_results.append(action)\n\n # turn it into a list because we want to be able to count the number of results\n these_privilege_escalation_results = []\n\n for key in privilege_escalation_results:\n result = {\n \"type\": key,\n \"actions\": privilege_escalation_results[key]\n }\n these_privilege_escalation_results.append(result)\n\n resource_exposure_results.sort()\n data_exfiltration_results.sort()\n\n results = {\n \"PrivilegeEscalation\": these_privilege_escalation_results,\n \"ResourceExposure\": resource_exposure_results,\n \"DataExfiltration\": data_exfiltration_results,\n }\n return results", "def testGetAccessAllowed(self):\n for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):\n response = self.runGet(user, sequencer=self.hiseq2000.vendor_id)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"sodar_uuid\"], str(self.hiseq2000.sodar_uuid))", "def _extract_resource(resource: Optional[dict],\n allowed_vals: tuple[tuple[str, ...]],\n exc: Type[exception.CinderException],\n resource_name: str,\n props: tuple[str] = ('status',)) -> Optional[str]:\n\n resource_id = None\n if resource:\n for prop, allowed_states in zip(props, allowed_vals):\n if resource[prop] not in allowed_states:\n msg = _(\"Originating %(res)s %(prop)s must be one of \"\n \"'%(vals)s' values\")\n msg = msg % {'res': resource_name,\n 'prop': prop,\n 'vals': ', '.join(allowed_states)}\n # TODO(harlowja): what happens if the status changes after\n # this initial resource status check occurs??? Seems like\n # someone could delete the resource after this check passes\n # but before the volume is officially created?\n raise exc(reason=msg)\n resource_id = resource['id']\n return resource_id", "def test__parse_allow(input_data):\n output = parse_allow(input_data)\n vampytest.assert_instance(output, Permission)\n return output", "def look_for_other_attributes(context):\n json_data = context.response.json()\n assert \"recommended_versions\" in json_data, \"No recommended version found\"\n assert \"registration_link\" in json_data, \"No snyk registration link found\"\n assert \"component_analyses\" in json_data, \"No component analyses data found\"\n assert \"message\" in json_data, \"No message found\"\n assert \"severity\" in json_data, \"No severity found\"\n assert \"known_security_vulnerability_count\" in json_data\n assert \"security_advisory_count\" in json_data", "def test_security_on_post(self):\n url = '/product/xml/'\n response = self.client.post(url,{'description':'my new description'})\n self.failUnlessEqual(response.status_code, 401)", "def assert_response_resource_not_accessible(self, response):\n self.assertEqual(response.status_code, 403)\n self.assertEqual(\n response.json(),\n {\"detail\": \"You do not have permission to perform this action.\"},\n )", "def protection_error_details(self) -> 'outputs.UserFacingErrorResponse':\n return pulumi.get(self, \"protection_error_details\")", "def testGetAccessAllowed(self):\n for user in (self.guest, self.contributor, self.delegate, self.owner, self.root):\n response = self.runGet(user, sequencer=self.hiseq2000.sodar_uuid)\n self.response_200(response)\n data = json.loads(response.content.decode(\"utf-8\"))\n self.assertEqual(data[\"sodar_uuid\"], str(self.hiseq2000.sodar_uuid))", "def check_for_no_privates(context):\n json_data = context.response.json()\n\n if \"component_analyses\" in json_data:\n vulnerabilities = json_data['component_analyses']['vulnerability']\n for v in vulnerabilities:\n assert \"cvss\" in v\n assert \"is_private\" in v\n assert \"vendor_cve_ids\" in v\n if v[\"is_private\"]:\n raise Exception(\"Private vulnerability found\")", "def is_acceptable(self):", "def test_transform_misses(self):\n response = {\"referenceBases\": '', \"alternateBases\": '', \"variantType\": \"\",\n \"frequency\": 0, \"callCount\": 0, \"sampleCount\": 0, \"variantCount\": 0,\n \"start\": 0, \"end\": 0, \"info\": {\"accessType\": \"PUBLIC\"}}\n record = Record(\"PUBLIC\")\n result = transform_misses(record)\n self.assertEqual(result, response)", "def protected():\n message = \"\"\n if flask_praetorian.current_user().roles == \"admin\":\n message = f\"welcome {flask_praetorian.current_user().username}, this is protected endpoint\"\n else:\n message = f'Endpoint not allowed for user {flask_praetorian.current_user().username}'\n return {\"message\": message}", "def process_resource_api(self, resources, resource, api, context):\n pass", "def servicenow_sspm_xml_request_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.xml\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.25] Instance should enforce basic authentication for XML requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for XML requests. Use the 'glide.basicauth.required.xml' property to designate if incoming XML requests should require basic authentication. Without appropriate authorization configured on the incoming XML requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XML request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xml-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.25] Instance should enforce basic authentication for XML requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does enforce basic authentication for XML requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XML request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xml-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def process_request(self, req, resp, resource, params):", "def denied_response(self, req):\n if req.remote_user:\n return HTTPForbidden(request=req)\n else:\n return HTTPUnauthorized(request=req)", "def test_nonstandard_resource(self):\n manifest = copy.deepcopy(job_test_utils.COMPLETE_MANIFEST)\n manifest['job']['resources']['scalar'].append({'name': 'chocolate', 'value': 1.0 })\n config = copy.deepcopy(self.configuration)\n json_data = {\n 'manifest': manifest,\n 'configuration': config\n }\n\n url = '/%s/job-types/validation/' % self.api\n response = self.client.generic('POST', url, json.dumps(json_data), 'application/json')\n self.assertEqual(response.status_code, status.HTTP_200_OK, response.content)\n\n results = json.loads(response.content)\n self.assertTrue(results['is_valid'])\n self.assertEqual(len(results['warnings']), 1)\n self.assertEqual(results['warnings'][0]['name'], 'NONSTANDARD_RESOURCE')", "def test_kyc_get_validation_legal(self):\n pass", "def obj_res(data, fail_on=['type', 'obj', 'res']):\n errors = []\n if not data.get('type', None) and 'type' in fail_on:\n errors += ['You must provide a role type to use this command.']\n\n # Find the grantee, and remove them from resource_list\n obj = None\n obj_type = None\n for fd in ACTOR_FIELDS:\n if data.get(fd, False):\n if not obj:\n obj = data[fd]\n obj_type = fd\n else:\n errors += ['You can not give a role to a user '\n 'and team at the same time.']\n break\n if not obj and 'obj' in fail_on:\n errors += ['You must specify either user or '\n 'team to use this command.']\n\n # Out of the resource list, pick out available valid resource field\n res = None\n res_type = None\n for fd in RESOURCE_FIELDS:\n if data.get(fd, False):\n if not res:\n res = data[fd]\n res_type = fd\n if res_type == 'target_team':\n res_type = 'team'\n else:\n errors += ['You can only give a role to one '\n 'type of resource at a time.']\n break\n if not res and 'res' in fail_on:\n errors += ['You must specify a target resource '\n 'to use this command.']\n\n if errors:\n raise exc.UsageError(\"\\n\".join(errors))\n return obj, obj_type, res, res_type", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def test_object_permissions(self):\n class ExampleSerializer(serializers.Serializer):\n choice_field = serializers.ChoiceField(['red', 'green', 'blue'])\n integer_field = serializers.IntegerField(max_value=10)\n char_field = serializers.CharField(required=False)\n\n class ExampleView(views.APIView):\n \"\"\"Example view.\"\"\"\n def post(self, request):\n pass\n\n def put(self, request):\n pass\n\n def get_serializer(self):\n return ExampleSerializer()\n\n def get_object(self):\n if self.request.method == 'PUT':\n raise exceptions.PermissionDenied()\n\n view = ExampleView.as_view()\n response = view(request=request)\n assert response.status_code == status.HTTP_200_OK\n assert list(response.data['actions'].keys()) == ['POST']", "def test_attendant_cannot_view_all_sales(self):\n response = self.client.get(\n '/self.base_url/sales',\n headers=dict(Authorization=\"Bearer \" + self.attendant_token),\n content_type = 'application/json'\n )\n response_data = json.loads(response.data)\n self.assertEqual(response_data['message'],\"You dont have rights to list all sales, contact the system admin\")\n self.assertEqual(response.status_code,401)", "def test_create_namespaced_resource_access_review(self):\n pass", "def resource_forbidden(exc, request):\r\n request.response_status = \"403 Forbidden\"\r\n return {'message': str(exc)}", "def test_author_list_equality_with_valid_authentication(self) -> None:\n\n # Set the Authorization header to the appropriate\n # format as the rest_framework expects using utils.\n self.client.credentials(HTTP_AUTHORIZATION=u.auth_header(\n self.super_author.get_key()\n ))\n\n response = self.client.get(self.url)\n data = u.get_json(response)\n\n self.assertEqual(data, self.serialized_data, msg=data)\n self.assertEqual(response.status_code, status.HTTP_200_OK)", "def test_access_resource(self):\n test_resource = ResourceTypeName.get()\n role_name = 'test_role'\n resp = self.app.post(f'/v1/resource/{test_resource}', data=json.dumps({'actions': ['tr:action1']}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 201)\n with self.subTest(\"Permission is denied\"):\n resp = self.app.get(f'/v1/resource/{test_resource}', headers=user_header)\n self.assertEqual(resp.status_code, 403)\n\n role_request_body = {\n \"role_id\": role_name,\n \"policy\": {\n 'Statement': [{\n 'Sid': role_name,\n 'Action': [\n \"fus:DeleteResources\",\n \"fus:GetResources\"],\n 'Effect': 'Allow',\n 'Resource': [f\"arn:hca:fus:*:*:resource/{test_resource}\"]\n }]\n }\n }\n resp = self.app.post(f'/v1/role', data=json.dumps(role_request_body), headers=admin_headers)\n self.assertEqual(resp.status_code, 201)\n resp = self.app.put(f\"/v1/user/{service_accounts['user']['client_email']}/roles?action=add\",\n data=json.dumps({'roles': [role_name]}),\n headers=admin_headers)\n self.assertEqual(resp.status_code, 200)\n\n with self.subTest(\"Permission is granted\"):\n resp = self.app.get(f'/v1/resource/{test_resource}', headers=user_header)\n self.assertEqual(resp.status_code, 200)", "def generate_security_data(self):\n timestamp = int(time.time())\n security_dict = {\n 'content_type': str(self.target_object._meta),\n 'object_pk': str(self.target_object._get_pk_val()),\n 'timestamp': str(timestamp),\n 'security_hash': self.initial_security_hash(timestamp),\n }\n return security_dict", "def test_get_resource_string(self):\n # pylint: disable=protected-access\n student_view_html = self.xblock.student_view().content\n test_result = AdaptiveNumericInput.get_resource_string('view.html')\n test_result = test_result.format(\n self=self,\n attempts_message=self.xblock.get_attempts_message(),\n display_name=self.xblock.display_name,\n feedback_label='',\n feedback_message='',\n hint_message='',\n hintdisplay_class=self.xblock.get_css_hint_button_display(),\n hide_submit_class=self.xblock.get_css_hide_submit(),\n indicator_class=self.xblock.get_css_indicator(),\n indicator_visibility_class=self.xblock.get_css_indicator_hidden(),\n progress_message=self.xblock.get_progress_message(),\n prompt=self.xblock.prompt,\n saved_message='',\n student_answer=self.xblock.student_answer,\n submitted_message='',\n )\n self.assertEquals(student_view_html, test_result)", "def test_authorization(self):\n res = self.get(url=\"/products/1/pricehistory\")\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.UnauthorizedAccess)\n res = self.get(url=\"/products/1/pricehistory\", role=\"user\")\n self.assertEqual(res.status_code, 401)\n self.assertException(res, exc.UnauthorizedAccess)", "def __str__(self):\n return \"Improperly formatted request: \" + self.source + \", resulting in exception: \" + self.bad", "def test_kyc_get_legal(self):\n pass", "def security_style(self):\n return self._security_style", "def serialize_response(self, response):\n raise NotImplementedError()", "def public_resource():\n return create_response(\n status_value=True,\n code=200,\n message=\"You have access the public resource\"\n )", "def test(self, resource):\n return resource.meta.fields[self.name].present(resource)", "def authenticate():\n resp = {\"status\": 401, \"message\": \"Could not verify your access level for that URL\"}\n return Response(dumps(resp), status=404, mimetype='application/json')", "def security(self):\n return self._security", "def servicenow_sspm_rss_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.rss\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.19] Instance should enforce basic authentication for RSS requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for RSS requests. Use the glide.basicauth.required.rss property to designate if incoming RSS requests should require basic authentication. Without appropriate authorization configured on the incoming RSS requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the RSS request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/rss-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.19] Instance should enforce basic authentication for RSS requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} enforces basic authentication for RSS requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the RSS request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/rss-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def authorization():\n pass", "def test_authorization_is_enforced(self):\n new_client = APIClient()\n response = new_client.get('/posts/', kwargs={'pk': 3}, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)", "def get_security(self):\n return self.cloudant_database.get_security_document()", "def validate(self):\n # Validate all mandatory keys are present\n if not self.mandatory_keys.issubset(set(self.resource)):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] miss a \"\n \"mandatory key. Please check the model.\" % (\n self.__class__.MODEL_TYPE,\n self.id))\n\n # Validate the resource does not contains extra keys\n if not set(self.resource).issubset(self.keys):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] contains \"\n \"extra keys. Please check the model.\" % (\n self.__class__.MODEL_TYPE,\n self.id))\n\n # Validate the resource value type\n for key, value in self.resource.items():\n if not isinstance(value, self.__class__.MODEL[key][0]):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data type (expected: %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][0]))\n # For str type validate the content as according the regex\n if self.__class__.MODEL[key][0] is str:\n if not re.match(self.__class__.MODEL[key][1], value):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data content (expected match : %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][1]))\n # For list type validate the content as according the regex\n if self.__class__.MODEL[key][0] is list:\n if not all([re.match(self.__class__.MODEL[key][1], v)\n for v in value]):\n raise ResourceInvalidException(\n \"Resource [type: %s, ID: %s] has an invalid \"\n \"key (%s) data content (expected match : %s)\" % (\n self.__class__.MODEL_TYPE,\n self.id,\n key,\n self.__class__.MODEL[key][1]))", "def protectAPI(call, istty, *args, **kwargs):\n\ttry:\n\t\treturn call(*args, **kwargs)\n\texcept HTTPException, e:\n\t\tif istty:\n\t\t\tprint 'Error while calling API function %s: Returned code %d' % (call.__name__, e.retcode)\n\t\t\tif e.response:\n\t\t\t\tif raw_input('View entire web response? (y/n) > ') == 'y':\n\t\t\t\t\tprint e.response\n\t\traise", "def test_get_authorization_status_vendor_v3(self):\n pass", "def process (self, data):\n\n if data['method'] == 'GET':\n code, page = self.get(data['resource'])\n elif data['method'] == 'PUT':\n code, page = self.put(data['resource'], data['body'])\n elif data['method'] == 'POST':\n code, page = self.post(data['resource'], data['body'])\n else:\n code, page = \"405 Method not allowed\",\\\n PAGE_NOT_ALLOWED.format(method=data['method'])\n return (code, page)", "def servicenow_sspm_xsd_request_enforce_basic_auth_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str):\n iso8601Time = datetime.datetime.now(datetime.timezone.utc).isoformat()\n\n # Name of the property to evaluate against\n evalTarget = \"glide.basicauth.required.xsd\"\n # Get cached props\n sysPropCache = get_servicenow_sys_properties(cache)\n\n # There should not ever be a duplicate system property, use next() and a list comprehension to check if the\n # property we're evaluating is in the list of properties we get from the cache. If it is NOT then set the\n # value as `False` and we can fill in fake values. Not having a property for security hardening is the same\n # as a failed finding with a lot less fan fair\n propFinder = next((sysprop for sysprop in sysPropCache if sysprop[\"name\"] == evalTarget), False)\n # If we cannot find the property set \"NOT_CONFIGURED\" which will fail whatever the value should be\n if propFinder == False:\n propertyValue = \"NOT_CONFIGURED\"\n propDescription = \"\"\n propId = \"\"\n propCreatedOn = \"\"\n propCreatedBy = \"\"\n propUpdatedOn = \"\"\n propUpdatedBy = \"\"\n propScope = \"\"\n assetB64 = None\n else:\n propertyValue = str(propFinder[\"value\"])\n propDescription = str(propFinder[\"description\"]).replace(\"\\n \", \"\")\n propId = str(propFinder[\"sys_id\"])\n propCreatedOn = str(propFinder[\"sys_created_on\"])\n propCreatedBy = str(propFinder[\"sys_created_by\"])\n propUpdatedOn = str(propFinder[\"sys_updated_on\"])\n propUpdatedBy = str(propFinder[\"sys_updated_by\"])\n propScope = str(propFinder[\"sys_scope\"][\"value\"])\n # B64 encode all of the details for the Asset\n assetJson = json.dumps(propFinder,default=str).encode(\"utf-8\")\n assetB64 = base64.b64encode(assetJson) \n # NOTE: This is where the check evaluation happens - in SNOW these may be Bools or Numbers but will come back as Strings\n # always evaluate a failing condition first which should be the OPPOSITE of the SNOW reccomendation as sometimes the values\n # are not a simple Boolean expression\n if propertyValue != \"true\":\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"MEDIUM\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.26] Instance should enforce basic authentication for XSD requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does not enforce basic authentication for XSD requests. Use the 'glide.basicauth.required.xsd' property to designate if incoming XSD (XML Schema Definition) requests should require basic authentication. Without appropriate authorization configured on the incoming XSD requests, an unauthorized user can get access to sensitive content/data on the target instance. Refer to the remediation instructions if this configuration is not intended.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XD request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xsd-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"FAILED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"NEW\"},\n \"RecordState\": \"ACTIVE\"\n }\n yield finding\n else:\n finding = {\n \"SchemaVersion\": \"2018-10-08\",\n \"Id\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"ProductArn\": f\"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default\",\n \"GeneratorId\": f\"servicenow/{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}/check\",\n \"AwsAccountId\": awsAccountId,\n \"Types\": [\"Software and Configuration Checks\"],\n \"FirstObservedAt\": iso8601Time,\n \"CreatedAt\": iso8601Time,\n \"UpdatedAt\": iso8601Time,\n \"Severity\": {\"Label\": \"INFORMATIONAL\"},\n \"Confidence\": 99,\n \"Title\": \"[SSPM.Servicenow.AccessControl.26] Instance should enforce basic authentication for XSD requests\",\n \"Description\": f\"Servicenow instance {SNOW_INSTANCE_NAME} does enforce basic authentication for XSD requests.\",\n \"Remediation\": {\n \"Recommendation\": {\n \"Text\": \"For more information refer to the XD request authorization (instance security hardening) section of the Servicenow Product Documentation.\",\n \"Url\": \"https://docs.servicenow.com/bundle/utah-platform-security/page/administer/security/reference/xsd-request-authorization.html\",\n }\n },\n \"ProductFields\": {\n \"ProductName\": \"ElectricEye\",\n \"Provider\": \"ServiceNow\",\n \"ProviderType\": \"SaaS\",\n \"ProviderAccountId\": SNOW_INSTANCE_NAME,\n \"AssetRegion\": SNOW_INSTANCE_REGION,\n \"AssetDetails\": assetB64,\n \"AssetClass\": \"Management & Governance\",\n \"AssetService\": \"System Properties\",\n \"AssetComponent\": \"System Property\"\n },\n \"Resources\": [\n {\n \"Type\": \"ServicenowInstance\",\n \"Id\": f\"{SNOW_INSTANCE_NAME}/sys_properties/{evalTarget}\",\n \"Partition\": awsPartition,\n \"Region\": awsRegion,\n \"Details\": {\n \"Other\": {\n \"ServicenowInstance\": SNOW_INSTANCE_NAME,\n \"SysId\": propId,\n \"PropertyName\": evalTarget,\n \"PropertyValue\": propertyValue,\n \"Description\": propDescription,\n \"CreatedBy\": propCreatedBy,\n \"CreatedOn\": propCreatedOn,\n \"UpdatedBy\": propUpdatedBy,\n \"UpdatedOn\": propUpdatedOn,\n \"Scope\": propScope\n }\n }\n }\n ],\n \"Compliance\": {\n \"Status\": \"PASSED\",\n \"RelatedRequirements\": [\n \"NIST CSF V1.1 PR.PT-3\",\n \"NIST SP 800-53 Rev. 4 AC-3\",\n \"NIST SP 800-53 Rev. 4 CM-7\",\n \"AICPA TSC CC6.1\",\n \"ISO 27001:2013 A.6.2.2\", \n \"ISO 27001:2013 A.9.1.2\",\n \"ISO 27001:2013 A.9.4.1\",\n \"ISO 27001:2013 A.9.4.4\",\n \"ISO 27001:2013 A.9.4.5\",\n \"ISO 27001:2013 A.13.1.1\",\n \"ISO 27001:2013 A.14.1.2\",\n \"ISO 27001:2013 A.14.1.3\",\n \"ISO 27001:2013 A.18.1.3\"\n ]\n },\n \"Workflow\": {\"Status\": \"RESOLVED\"},\n \"RecordState\": \"ARCHIVED\"\n }\n yield finding", "def user_should_get_an_ok_response():\n assert web_app.validate_reponse()", "def test_validate_get_single_resource(client):\n response = client.get('/user/1')\n assert response.status_code == 400\n assert response.json['message'] == INVALID_ACTION_MESSAGE", "def getYamlInstructions():\n with open('role_file_template.yaml', 'r') as yamlfile:\n output = yamlfile.read()\n if request.headers['Accept'] == 'application/json':\n return output, 200\n else:\n return render_template(\"output.html\", output=output)" ]
[ "0.5803334", "0.57779896", "0.5722596", "0.55923194", "0.5541996", "0.5463912", "0.54613954", "0.54241943", "0.5412314", "0.53474534", "0.53294784", "0.5312035", "0.52927554", "0.5291069", "0.5280915", "0.52559435", "0.5234564", "0.52308244", "0.5222391", "0.5222391", "0.52076674", "0.51712364", "0.5148108", "0.51227427", "0.51227427", "0.51165247", "0.51163125", "0.51089525", "0.5108425", "0.5100958", "0.5100958", "0.50995386", "0.5094954", "0.5094727", "0.5089133", "0.50768286", "0.5072614", "0.5071628", "0.50673914", "0.5066748", "0.50632167", "0.50575846", "0.50523317", "0.50495887", "0.5027393", "0.50208855", "0.50180954", "0.5017579", "0.5012663", "0.4996177", "0.49928442", "0.49917793", "0.49875835", "0.49856582", "0.4980959", "0.4972951", "0.4954714", "0.49451515", "0.49412447", "0.49174273", "0.49145836", "0.49063683", "0.49026877", "0.4900432", "0.48992735", "0.489814", "0.48980278", "0.48943815", "0.48884395", "0.48861337", "0.488269", "0.48825935", "0.4881476", "0.48730123", "0.48661432", "0.4863448", "0.4860011", "0.48596862", "0.4859593", "0.48562685", "0.4851677", "0.48512444", "0.48500192", "0.48487288", "0.48482674", "0.48429248", "0.4842031", "0.48389882", "0.48359397", "0.4833578", "0.4829877", "0.48281583", "0.4827509", "0.48245403", "0.4818478", "0.4812211", "0.48099622", "0.4804576", "0.48039773", "0.48012084", "0.47986868" ]
0.0
-1
Get an existing Assessment resource's state with the given name, id, and optional extra properties used to qualify the lookup.
def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'Assessment': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = AssessmentArgs.__new__(AssessmentArgs) __props__.__dict__["additional_data"] = None __props__.__dict__["display_name"] = None __props__.__dict__["links"] = None __props__.__dict__["metadata"] = None __props__.__dict__["name"] = None __props__.__dict__["partners_data"] = None __props__.__dict__["resource_details"] = None __props__.__dict__["status"] = None __props__.__dict__["type"] = None return Assessment(resource_name, opts=opts, __props__=__props__)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Assessment':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"additional_data\"] = None\n __props__[\"display_name\"] = None\n __props__[\"links\"] = None\n __props__[\"metadata\"] = None\n __props__[\"name\"] = None\n __props__[\"partners_data\"] = None\n __props__[\"resource_details\"] = None\n __props__[\"status\"] = None\n __props__[\"type\"] = None\n return Assessment(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n categories: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n implementation_effort: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n remediation_description: Optional[pulumi.Input[str]] = None,\n severity: Optional[pulumi.Input[str]] = None,\n threats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n user_impact: Optional[pulumi.Input[str]] = None) -> 'AssessmentPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AssessmentPolicyState.__new__(_AssessmentPolicyState)\n\n __props__.__dict__[\"categories\"] = categories\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"implementation_effort\"] = implementation_effort\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"remediation_description\"] = remediation_description\n __props__.__dict__[\"severity\"] = severity\n __props__.__dict__[\"threats\"] = threats\n __props__.__dict__[\"user_impact\"] = user_impact\n return AssessmentPolicy(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auth_mode: Optional[pulumi.Input[str]] = None,\n default_s3_location: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n engine_security_group_id: Optional[pulumi.Input[str]] = None,\n idp_auth_url: Optional[pulumi.Input[str]] = None,\n idp_relay_state_parameter_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n service_role: Optional[pulumi.Input[str]] = None,\n subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n url: Optional[pulumi.Input[str]] = None,\n user_role: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n workspace_security_group_id: Optional[pulumi.Input[str]] = None) -> 'Studio':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _StudioState.__new__(_StudioState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auth_mode\"] = auth_mode\n __props__.__dict__[\"default_s3_location\"] = default_s3_location\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"engine_security_group_id\"] = engine_security_group_id\n __props__.__dict__[\"idp_auth_url\"] = idp_auth_url\n __props__.__dict__[\"idp_relay_state_parameter_name\"] = idp_relay_state_parameter_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"service_role\"] = service_role\n __props__.__dict__[\"subnet_ids\"] = subnet_ids\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"url\"] = url\n __props__.__dict__[\"user_role\"] = user_role\n __props__.__dict__[\"vpc_id\"] = vpc_id\n __props__.__dict__[\"workspace_security_group_id\"] = workspace_security_group_id\n return Studio(resource_name, opts=opts, __props__=__props__)", "def a_state(id):\n state = storage.get(State, id)\n if state is not None:\n return jsonify(state.to_dict())\n abort(404)", "def get_state_by_name(exploration_id, state_name, strict=True):\n exploration = get_exploration_by_id(exploration_id)\n assert state_name\n\n # TODO(sll): This is too slow; improve it.\n state = None\n for candidate_state in exploration.states:\n if candidate_state.name == state_name:\n state = candidate_state\n break\n\n if strict and not state:\n raise Exception('State %s not found' % state_name)\n return state", "def get_state_by_id(state_id):\n my_state = storage.get('State', state_id)\n if my_state is None:\n abort(404)\n return jsonify(my_state.to_dict())", "def get_state_by_id(state_id):\n for key, value in storage.all(\"State\").items():\n if state_id == value.id:\n return jsonify(value.to_dict())\n abort(404)", "def state_by_id(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n return jsonify(state.to_dict())", "def get_state_by_id(state_id):\n state = storage.get(State, state_id)\n if not state:\n abort(404)\n return jsonify(state.to_dict()), 200", "def state_by_id(state_id):\n states_values = storage.all(\"State\").values()\n for obj in states_values:\n if obj.id == state_id:\n return jsonify(obj.to_dict())\n abort(404)", "def get_assessment(assessment_id: Optional[str] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetAssessmentResult:\n __args__ = dict()\n __args__['assessmentId'] = assessment_id\n opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)\n __ret__ = pulumi.runtime.invoke('aws-native:auditmanager:getAssessment', __args__, opts=opts, typ=GetAssessmentResult).value\n\n return AwaitableGetAssessmentResult(\n arn=pulumi.get(__ret__, 'arn'),\n assessment_id=pulumi.get(__ret__, 'assessment_id'),\n assessment_reports_destination=pulumi.get(__ret__, 'assessment_reports_destination'),\n creation_time=pulumi.get(__ret__, 'creation_time'),\n delegations=pulumi.get(__ret__, 'delegations'),\n roles=pulumi.get(__ret__, 'roles'),\n scope=pulumi.get(__ret__, 'scope'),\n status=pulumi.get(__ret__, 'status'),\n tags=pulumi.get(__ret__, 'tags'))", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n comparison: Optional[pulumi.Input[str]] = None,\n created_at: Optional[pulumi.Input[int]] = None,\n critical: Optional[pulumi.Input[pulumi.InputType['InfraAlertConditionCriticalArgs']]] = None,\n description: Optional[pulumi.Input[str]] = None,\n enabled: Optional[pulumi.Input[bool]] = None,\n entity_guid: Optional[pulumi.Input[str]] = None,\n event: Optional[pulumi.Input[str]] = None,\n integration_provider: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n policy_id: Optional[pulumi.Input[int]] = None,\n process_where: Optional[pulumi.Input[str]] = None,\n runbook_url: Optional[pulumi.Input[str]] = None,\n select: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n updated_at: Optional[pulumi.Input[int]] = None,\n violation_close_timer: Optional[pulumi.Input[int]] = None,\n warning: Optional[pulumi.Input[pulumi.InputType['InfraAlertConditionWarningArgs']]] = None,\n where: Optional[pulumi.Input[str]] = None) -> 'InfraAlertCondition':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InfraAlertConditionState.__new__(_InfraAlertConditionState)\n\n __props__.__dict__[\"comparison\"] = comparison\n __props__.__dict__[\"created_at\"] = created_at\n __props__.__dict__[\"critical\"] = critical\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"enabled\"] = enabled\n __props__.__dict__[\"entity_guid\"] = entity_guid\n __props__.__dict__[\"event\"] = event\n __props__.__dict__[\"integration_provider\"] = integration_provider\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"policy_id\"] = policy_id\n __props__.__dict__[\"process_where\"] = process_where\n __props__.__dict__[\"runbook_url\"] = runbook_url\n __props__.__dict__[\"select\"] = select\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"updated_at\"] = updated_at\n __props__.__dict__[\"violation_close_timer\"] = violation_close_timer\n __props__.__dict__[\"warning\"] = warning\n __props__.__dict__[\"where\"] = where\n return InfraAlertCondition(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n force: Optional[pulumi.Input[bool]] = None,\n instance_id: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input[str]] = None) -> 'InstanceState':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceStateState.__new__(_InstanceStateState)\n\n __props__.__dict__[\"force\"] = force\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"state\"] = state\n return InstanceState(resource_name, opts=opts, __props__=__props__)", "def get_state(state_id):\n try:\n ''' Check that state_id exists '''\n query = State.select().where(State.id == state_id)\n if not query.exists():\n raise LookupError('state_id')\n\n state = State.get(State.id == state_id)\n return state.to_dict(), 200\n except LookupError as e:\n abort(404)\n except Exception as e:\n abort(500)", "def get_state_by_id(state_id):\r\n response = Response(json.dumps(json_error(ResponsesREST.INVALID_INPUT.value)),\r\n status=ResponsesREST.INVALID_INPUT.value, mimetype=\"application/json\")\r\n if validator_id.is_valid({\"id\": state_id}):\r\n state_get = State()\r\n state_get.id_state = state_id\r\n result = state_get.get_state()\r\n if result in (ResponsesREST.NOT_FOUND.value, ResponsesREST.SERVER_ERROR.value):\r\n response = Response(json.dumps(json_error(result)),\r\n status=result, mimetype=\"application/json\")\r\n else:\r\n response = Response(json.dumps(result.json_state()),\r\n status=ResponsesREST.SUCCESSFUL.value,\r\n mimetype=\"application/json\")\r\n return response", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n activation_key: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n ip_address: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_link_endpoint: Optional[pulumi.Input[str]] = None,\n security_group_arns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n subnet_arns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n vpc_endpoint_id: Optional[pulumi.Input[str]] = None) -> 'Agent':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AgentState.__new__(_AgentState)\n\n __props__.__dict__[\"activation_key\"] = activation_key\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"ip_address\"] = ip_address\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"private_link_endpoint\"] = private_link_endpoint\n __props__.__dict__[\"security_group_arns\"] = security_group_arns\n __props__.__dict__[\"subnet_arns\"] = subnet_arns\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"vpc_endpoint_id\"] = vpc_endpoint_id\n return Agent(resource_name, opts=opts, __props__=__props__)", "def a_states_id(state_id):\n i = storage.get(\"State\", state_id)\n if i:\n return jsonify(i.to_dict())\n else:\n return (jsonify({\"error\": \"Not found\"}), 404)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'FhirStore':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = FhirStoreArgs.__new__(FhirStoreArgs)\n\n __props__.__dict__[\"complex_data_type_reference_parsing\"] = None\n __props__.__dict__[\"dataset_id\"] = None\n __props__.__dict__[\"default_search_handling_strict\"] = None\n __props__.__dict__[\"disable_referential_integrity\"] = None\n __props__.__dict__[\"disable_resource_versioning\"] = None\n __props__.__dict__[\"enable_update_create\"] = None\n __props__.__dict__[\"fhir_store_id\"] = None\n __props__.__dict__[\"labels\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"notification_config\"] = None\n __props__.__dict__[\"notification_configs\"] = None\n __props__.__dict__[\"project\"] = None\n __props__.__dict__[\"stream_configs\"] = None\n __props__.__dict__[\"validation_config\"] = None\n __props__.__dict__[\"version\"] = None\n return FhirStore(resource_name, opts=opts, __props__=__props__)", "def get(self, request, state_id, format=None):\n try:\n state = State.objects.get(id=state_id)\n except ObjectDoesNotExist:\n raise NotFound(detail=\"State not found\")\n\n return Response(StateSerializer(state).data)", "def get_one_state(state_id):\n state = storage.get('State', state_id)\n if state is None:\n abort(404)\n if request.method == 'DELETE':\n storage.delete(state)\n storage.save()\n return jsonify({}), 200\n elif request.method == 'PUT':\n try:\n res_dict = request.get_json()\n res_dict['id'] = state.id\n res_dict['created_at'] = state.created_at\n state.__init__(**res_dict)\n state.save()\n return jsonify(state.to_dict()), 200\n except:\n abort(400, description='Not a JSON')\n return jsonify(state.to_dict())", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Workflow':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = WorkflowArgs.__new__(WorkflowArgs)\n\n __props__.__dict__[\"acr\"] = None\n __props__.__dict__[\"aks_resource_id\"] = None\n __props__.__dict__[\"app_name\"] = None\n __props__.__dict__[\"auth_status\"] = None\n __props__.__dict__[\"branch_name\"] = None\n __props__.__dict__[\"builder_version\"] = None\n __props__.__dict__[\"deployment_properties\"] = None\n __props__.__dict__[\"docker_build_context\"] = None\n __props__.__dict__[\"dockerfile\"] = None\n __props__.__dict__[\"dockerfile_generation_mode\"] = None\n __props__.__dict__[\"dockerfile_output_directory\"] = None\n __props__.__dict__[\"generation_language\"] = None\n __props__.__dict__[\"image_name\"] = None\n __props__.__dict__[\"image_tag\"] = None\n __props__.__dict__[\"language_version\"] = None\n __props__.__dict__[\"last_workflow_run\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"manifest_generation_mode\"] = None\n __props__.__dict__[\"manifest_output_directory\"] = None\n __props__.__dict__[\"manifest_type\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"namespace\"] = None\n __props__.__dict__[\"oidc_credentials\"] = None\n __props__.__dict__[\"port\"] = None\n __props__.__dict__[\"pr_status\"] = None\n __props__.__dict__[\"pr_url\"] = None\n __props__.__dict__[\"pull_number\"] = None\n __props__.__dict__[\"repository_name\"] = None\n __props__.__dict__[\"repository_owner\"] = None\n __props__.__dict__[\"system_data\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"type\"] = None\n return Workflow(resource_name, opts=opts, __props__=__props__)", "def get_state_by_id(exploration_id, state_id, strict=True):\n # TODO(sll): Generalize this to handle multiple state_ids at a time.\n state_memcache_key = _get_state_memcache_key(exploration_id, state_id)\n memcached_state = memcache_services.get_multi(\n [state_memcache_key]).get(state_memcache_key)\n\n if memcached_state is not None:\n return memcached_state\n else:\n state_model = exp_models.StateModel.get(\n exploration_id, state_id, strict=strict)\n if state_model:\n state = exp_domain.State.from_dict(state_id, state_model.value)\n memcache_services.set_multi({state_memcache_key: state})\n return state\n else:\n return None", "def get_state(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n return jsonify(state.to_dict())", "def get_state(state_id):\n try:\n state = jsonify(storage.get(State, state_id).to_dict())\n return state\n except:\n abort(404)", "def state_id(state_id):\n state = storage.get(State, state_id)\n if state is None:\n abort(404)\n else:\n return jsonify(state.to_dict())", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n as_path_match_mode: Optional[pulumi.Input[str]] = None,\n cen_id: Optional[pulumi.Input[str]] = None,\n cen_region_id: Optional[pulumi.Input[str]] = None,\n cidr_match_mode: Optional[pulumi.Input[str]] = None,\n community_match_mode: Optional[pulumi.Input[str]] = None,\n community_operate_mode: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n destination_child_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n destination_cidr_blocks: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n destination_instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n destination_instance_ids_reverse_match: Optional[pulumi.Input[bool]] = None,\n destination_route_table_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n map_result: Optional[pulumi.Input[str]] = None,\n match_asns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n match_community_sets: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n next_priority: Optional[pulumi.Input[int]] = None,\n operate_community_sets: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n preference: Optional[pulumi.Input[int]] = None,\n prepend_as_paths: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n route_map_id: Optional[pulumi.Input[str]] = None,\n route_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n source_child_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n source_instance_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n source_instance_ids_reverse_match: Optional[pulumi.Input[bool]] = None,\n source_region_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n source_route_table_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n status: Optional[pulumi.Input[str]] = None,\n transit_router_route_table_id: Optional[pulumi.Input[str]] = None,\n transmit_direction: Optional[pulumi.Input[str]] = None) -> 'RouteMap':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RouteMapState.__new__(_RouteMapState)\n\n __props__.__dict__[\"as_path_match_mode\"] = as_path_match_mode\n __props__.__dict__[\"cen_id\"] = cen_id\n __props__.__dict__[\"cen_region_id\"] = cen_region_id\n __props__.__dict__[\"cidr_match_mode\"] = cidr_match_mode\n __props__.__dict__[\"community_match_mode\"] = community_match_mode\n __props__.__dict__[\"community_operate_mode\"] = community_operate_mode\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"destination_child_instance_types\"] = destination_child_instance_types\n __props__.__dict__[\"destination_cidr_blocks\"] = destination_cidr_blocks\n __props__.__dict__[\"destination_instance_ids\"] = destination_instance_ids\n __props__.__dict__[\"destination_instance_ids_reverse_match\"] = destination_instance_ids_reverse_match\n __props__.__dict__[\"destination_route_table_ids\"] = destination_route_table_ids\n __props__.__dict__[\"map_result\"] = map_result\n __props__.__dict__[\"match_asns\"] = match_asns\n __props__.__dict__[\"match_community_sets\"] = match_community_sets\n __props__.__dict__[\"next_priority\"] = next_priority\n __props__.__dict__[\"operate_community_sets\"] = operate_community_sets\n __props__.__dict__[\"preference\"] = preference\n __props__.__dict__[\"prepend_as_paths\"] = prepend_as_paths\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"route_map_id\"] = route_map_id\n __props__.__dict__[\"route_types\"] = route_types\n __props__.__dict__[\"source_child_instance_types\"] = source_child_instance_types\n __props__.__dict__[\"source_instance_ids\"] = source_instance_ids\n __props__.__dict__[\"source_instance_ids_reverse_match\"] = source_instance_ids_reverse_match\n __props__.__dict__[\"source_region_ids\"] = source_region_ids\n __props__.__dict__[\"source_route_table_ids\"] = source_route_table_ids\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"transit_router_route_table_id\"] = transit_router_route_table_id\n __props__.__dict__[\"transmit_direction\"] = transmit_direction\n return RouteMap(resource_name, opts=opts, __props__=__props__)", "def get_state(state_id):\n state = storage.get(\"State\", state_id)\n if state:\n return jsonify(state.to_dict())\n abort(404)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n minimal_action: Optional[pulumi.Input[str]] = None,\n most_disruptive_allowed_action: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n preserved_state: Optional[pulumi.Input[pulumi.InputType['RegionPerInstanceConfigPreservedStateArgs']]] = None,\n project: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n region_instance_group_manager: Optional[pulumi.Input[str]] = None,\n remove_instance_state_on_destroy: Optional[pulumi.Input[bool]] = None) -> 'RegionPerInstanceConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RegionPerInstanceConfigState.__new__(_RegionPerInstanceConfigState)\n\n __props__.__dict__[\"minimal_action\"] = minimal_action\n __props__.__dict__[\"most_disruptive_allowed_action\"] = most_disruptive_allowed_action\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"preserved_state\"] = preserved_state\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"region_instance_group_manager\"] = region_instance_group_manager\n __props__.__dict__[\"remove_instance_state_on_destroy\"] = remove_instance_state_on_destroy\n return RegionPerInstanceConfig(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n accessibility_error_redirect_url: Optional[pulumi.Input[str]] = None,\n accessibility_login_redirect_url: Optional[pulumi.Input[str]] = None,\n accessibility_self_service: Optional[pulumi.Input[bool]] = None,\n admin_note: Optional[pulumi.Input[str]] = None,\n app_links_json: Optional[pulumi.Input[str]] = None,\n app_settings_json: Optional[pulumi.Input[str]] = None,\n authentication_policy: Optional[pulumi.Input[str]] = None,\n auto_key_rotation: Optional[pulumi.Input[bool]] = None,\n auto_submit_toolbar: Optional[pulumi.Input[bool]] = None,\n client_basic_secret: Optional[pulumi.Input[str]] = None,\n client_id: Optional[pulumi.Input[str]] = None,\n client_secret: Optional[pulumi.Input[str]] = None,\n client_uri: Optional[pulumi.Input[str]] = None,\n consent_method: Optional[pulumi.Input[str]] = None,\n enduser_note: Optional[pulumi.Input[str]] = None,\n grant_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n groups_claim: Optional[pulumi.Input[pulumi.InputType['OAuthGroupsClaimArgs']]] = None,\n hide_ios: Optional[pulumi.Input[bool]] = None,\n hide_web: Optional[pulumi.Input[bool]] = None,\n implicit_assignment: Optional[pulumi.Input[bool]] = None,\n issuer_mode: Optional[pulumi.Input[str]] = None,\n jwks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OAuthJwkArgs']]]]] = None,\n jwks_uri: Optional[pulumi.Input[str]] = None,\n label: Optional[pulumi.Input[str]] = None,\n login_mode: Optional[pulumi.Input[str]] = None,\n login_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n login_uri: Optional[pulumi.Input[str]] = None,\n logo: Optional[pulumi.Input[str]] = None,\n logo_uri: Optional[pulumi.Input[str]] = None,\n logo_url: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n omit_secret: Optional[pulumi.Input[bool]] = None,\n pkce_required: Optional[pulumi.Input[bool]] = None,\n policy_uri: Optional[pulumi.Input[str]] = None,\n post_logout_redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n profile: Optional[pulumi.Input[str]] = None,\n redirect_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n refresh_token_leeway: Optional[pulumi.Input[int]] = None,\n refresh_token_rotation: Optional[pulumi.Input[str]] = None,\n response_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n sign_on_mode: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None,\n token_endpoint_auth_method: Optional[pulumi.Input[str]] = None,\n tos_uri: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n user_name_template: Optional[pulumi.Input[str]] = None,\n user_name_template_push_status: Optional[pulumi.Input[str]] = None,\n user_name_template_suffix: Optional[pulumi.Input[str]] = None,\n user_name_template_type: Optional[pulumi.Input[str]] = None,\n wildcard_redirect: Optional[pulumi.Input[str]] = None) -> 'OAuth':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _OAuthState.__new__(_OAuthState)\n\n __props__.__dict__[\"accessibility_error_redirect_url\"] = accessibility_error_redirect_url\n __props__.__dict__[\"accessibility_login_redirect_url\"] = accessibility_login_redirect_url\n __props__.__dict__[\"accessibility_self_service\"] = accessibility_self_service\n __props__.__dict__[\"admin_note\"] = admin_note\n __props__.__dict__[\"app_links_json\"] = app_links_json\n __props__.__dict__[\"app_settings_json\"] = app_settings_json\n __props__.__dict__[\"authentication_policy\"] = authentication_policy\n __props__.__dict__[\"auto_key_rotation\"] = auto_key_rotation\n __props__.__dict__[\"auto_submit_toolbar\"] = auto_submit_toolbar\n __props__.__dict__[\"client_basic_secret\"] = client_basic_secret\n __props__.__dict__[\"client_id\"] = client_id\n __props__.__dict__[\"client_secret\"] = client_secret\n __props__.__dict__[\"client_uri\"] = client_uri\n __props__.__dict__[\"consent_method\"] = consent_method\n __props__.__dict__[\"enduser_note\"] = enduser_note\n __props__.__dict__[\"grant_types\"] = grant_types\n __props__.__dict__[\"groups_claim\"] = groups_claim\n __props__.__dict__[\"hide_ios\"] = hide_ios\n __props__.__dict__[\"hide_web\"] = hide_web\n __props__.__dict__[\"implicit_assignment\"] = implicit_assignment\n __props__.__dict__[\"issuer_mode\"] = issuer_mode\n __props__.__dict__[\"jwks\"] = jwks\n __props__.__dict__[\"jwks_uri\"] = jwks_uri\n __props__.__dict__[\"label\"] = label\n __props__.__dict__[\"login_mode\"] = login_mode\n __props__.__dict__[\"login_scopes\"] = login_scopes\n __props__.__dict__[\"login_uri\"] = login_uri\n __props__.__dict__[\"logo\"] = logo\n __props__.__dict__[\"logo_uri\"] = logo_uri\n __props__.__dict__[\"logo_url\"] = logo_url\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"omit_secret\"] = omit_secret\n __props__.__dict__[\"pkce_required\"] = pkce_required\n __props__.__dict__[\"policy_uri\"] = policy_uri\n __props__.__dict__[\"post_logout_redirect_uris\"] = post_logout_redirect_uris\n __props__.__dict__[\"profile\"] = profile\n __props__.__dict__[\"redirect_uris\"] = redirect_uris\n __props__.__dict__[\"refresh_token_leeway\"] = refresh_token_leeway\n __props__.__dict__[\"refresh_token_rotation\"] = refresh_token_rotation\n __props__.__dict__[\"response_types\"] = response_types\n __props__.__dict__[\"sign_on_mode\"] = sign_on_mode\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"token_endpoint_auth_method\"] = token_endpoint_auth_method\n __props__.__dict__[\"tos_uri\"] = tos_uri\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"user_name_template\"] = user_name_template\n __props__.__dict__[\"user_name_template_push_status\"] = user_name_template_push_status\n __props__.__dict__[\"user_name_template_suffix\"] = user_name_template_suffix\n __props__.__dict__[\"user_name_template_type\"] = user_name_template_type\n __props__.__dict__[\"wildcard_redirect\"] = wildcard_redirect\n return OAuth(resource_name, opts=opts, __props__=__props__)", "def statesById(state_id):\n obj = storage.get(State, state_id)\n if obj:\n return jsonify(obj.to_dict())\n return jsonify({\"error\": \"Not found\"}), 404", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n asset_statuses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ZoneAssetStatusArgs']]]]] = None,\n create_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_spec: Optional[pulumi.Input[pulumi.InputType['ZoneDiscoverySpecArgs']]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n lake: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project: Optional[pulumi.Input[str]] = None,\n resource_spec: Optional[pulumi.Input[pulumi.InputType['ZoneResourceSpecArgs']]] = None,\n state: Optional[pulumi.Input[str]] = None,\n type: Optional[pulumi.Input[str]] = None,\n uid: Optional[pulumi.Input[str]] = None,\n update_time: Optional[pulumi.Input[str]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"asset_statuses\"] = asset_statuses\n __props__.__dict__[\"create_time\"] = create_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"discovery_spec\"] = discovery_spec\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"labels\"] = labels\n __props__.__dict__[\"lake\"] = lake\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project\"] = project\n __props__.__dict__[\"resource_spec\"] = resource_spec\n __props__.__dict__[\"state\"] = state\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"uid\"] = uid\n __props__.__dict__[\"update_time\"] = update_time\n return Zone(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'ResolverRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ResolverRuleArgs.__new__(ResolverRuleArgs)\n\n __props__.__dict__[\"arn\"] = None\n __props__.__dict__[\"domain_name\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"resolver_endpoint_id\"] = None\n __props__.__dict__[\"resolver_rule_id\"] = None\n __props__.__dict__[\"rule_type\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"target_ips\"] = None\n return ResolverRule(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n auto_devops_enabled: Optional[pulumi.Input[bool]] = None,\n avatar: Optional[pulumi.Input[str]] = None,\n avatar_hash: Optional[pulumi.Input[str]] = None,\n avatar_url: Optional[pulumi.Input[str]] = None,\n default_branch_protection: Optional[pulumi.Input[int]] = None,\n description: Optional[pulumi.Input[str]] = None,\n emails_disabled: Optional[pulumi.Input[bool]] = None,\n extra_shared_runners_minutes_limit: Optional[pulumi.Input[int]] = None,\n full_name: Optional[pulumi.Input[str]] = None,\n full_path: Optional[pulumi.Input[str]] = None,\n ip_restriction_ranges: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n lfs_enabled: Optional[pulumi.Input[bool]] = None,\n membership_lock: Optional[pulumi.Input[bool]] = None,\n mentions_disabled: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n parent_id: Optional[pulumi.Input[int]] = None,\n path: Optional[pulumi.Input[str]] = None,\n prevent_forking_outside_group: Optional[pulumi.Input[bool]] = None,\n project_creation_level: Optional[pulumi.Input[str]] = None,\n request_access_enabled: Optional[pulumi.Input[bool]] = None,\n require_two_factor_authentication: Optional[pulumi.Input[bool]] = None,\n runners_token: Optional[pulumi.Input[str]] = None,\n share_with_group_lock: Optional[pulumi.Input[bool]] = None,\n shared_runners_minutes_limit: Optional[pulumi.Input[int]] = None,\n subgroup_creation_level: Optional[pulumi.Input[str]] = None,\n two_factor_grace_period: Optional[pulumi.Input[int]] = None,\n visibility_level: Optional[pulumi.Input[str]] = None,\n web_url: Optional[pulumi.Input[str]] = None) -> 'Group':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _GroupState.__new__(_GroupState)\n\n __props__.__dict__[\"auto_devops_enabled\"] = auto_devops_enabled\n __props__.__dict__[\"avatar\"] = avatar\n __props__.__dict__[\"avatar_hash\"] = avatar_hash\n __props__.__dict__[\"avatar_url\"] = avatar_url\n __props__.__dict__[\"default_branch_protection\"] = default_branch_protection\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"emails_disabled\"] = emails_disabled\n __props__.__dict__[\"extra_shared_runners_minutes_limit\"] = extra_shared_runners_minutes_limit\n __props__.__dict__[\"full_name\"] = full_name\n __props__.__dict__[\"full_path\"] = full_path\n __props__.__dict__[\"ip_restriction_ranges\"] = ip_restriction_ranges\n __props__.__dict__[\"lfs_enabled\"] = lfs_enabled\n __props__.__dict__[\"membership_lock\"] = membership_lock\n __props__.__dict__[\"mentions_disabled\"] = mentions_disabled\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"parent_id\"] = parent_id\n __props__.__dict__[\"path\"] = path\n __props__.__dict__[\"prevent_forking_outside_group\"] = prevent_forking_outside_group\n __props__.__dict__[\"project_creation_level\"] = project_creation_level\n __props__.__dict__[\"request_access_enabled\"] = request_access_enabled\n __props__.__dict__[\"require_two_factor_authentication\"] = require_two_factor_authentication\n __props__.__dict__[\"runners_token\"] = runners_token\n __props__.__dict__[\"share_with_group_lock\"] = share_with_group_lock\n __props__.__dict__[\"shared_runners_minutes_limit\"] = shared_runners_minutes_limit\n __props__.__dict__[\"subgroup_creation_level\"] = subgroup_creation_level\n __props__.__dict__[\"two_factor_grace_period\"] = two_factor_grace_period\n __props__.__dict__[\"visibility_level\"] = visibility_level\n __props__.__dict__[\"web_url\"] = web_url\n return Group(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n minimum_engine_version: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Acl':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AclState.__new__(_AclState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"minimum_engine_version\"] = minimum_engine_version\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_names\"] = user_names\n return Acl(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n add_on: Optional[pulumi.Input[pulumi.InputType['InstanceAddOnArgs']]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n availability_zone: Optional[pulumi.Input[str]] = None,\n blueprint_id: Optional[pulumi.Input[str]] = None,\n bundle_id: Optional[pulumi.Input[str]] = None,\n cpu_count: Optional[pulumi.Input[int]] = None,\n created_at: Optional[pulumi.Input[str]] = None,\n ip_address_type: Optional[pulumi.Input[str]] = None,\n ipv6_address: Optional[pulumi.Input[str]] = None,\n ipv6_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n is_static_ip: Optional[pulumi.Input[bool]] = None,\n key_pair_name: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n private_ip_address: Optional[pulumi.Input[str]] = None,\n public_ip_address: Optional[pulumi.Input[str]] = None,\n ram_size: Optional[pulumi.Input[float]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_data: Optional[pulumi.Input[str]] = None,\n username: Optional[pulumi.Input[str]] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceState.__new__(_InstanceState)\n\n __props__.__dict__[\"add_on\"] = add_on\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"availability_zone\"] = availability_zone\n __props__.__dict__[\"blueprint_id\"] = blueprint_id\n __props__.__dict__[\"bundle_id\"] = bundle_id\n __props__.__dict__[\"cpu_count\"] = cpu_count\n __props__.__dict__[\"created_at\"] = created_at\n __props__.__dict__[\"ip_address_type\"] = ip_address_type\n __props__.__dict__[\"ipv6_address\"] = ipv6_address\n __props__.__dict__[\"ipv6_addresses\"] = ipv6_addresses\n __props__.__dict__[\"is_static_ip\"] = is_static_ip\n __props__.__dict__[\"key_pair_name\"] = key_pair_name\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"private_ip_address\"] = private_ip_address\n __props__.__dict__[\"public_ip_address\"] = public_ip_address\n __props__.__dict__[\"ram_size\"] = ram_size\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_data\"] = user_data\n __props__.__dict__[\"username\"] = username\n return Instance(resource_name, opts=opts, __props__=__props__)", "def lookup(job_id: str) -> JobState:\n job = JobState(job_id)\n job.update()\n return job", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Organization':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = OrganizationArgs.__new__(OrganizationArgs)\n\n __props__.__dict__[\"arn\"] = None\n __props__.__dict__[\"feature_set\"] = None\n __props__.__dict__[\"management_account_arn\"] = None\n __props__.__dict__[\"management_account_email\"] = None\n __props__.__dict__[\"management_account_id\"] = None\n __props__.__dict__[\"root_id\"] = None\n return Organization(resource_name, opts=opts, __props__=__props__)", "def get(isamAppliance, name, check_mode=False, force=False):\n ret_obj = search(isamAppliance, name=name, check_mode=check_mode, force=force)\n id = ret_obj['data']\n\n if id == {}:\n warnings = [\"STS Chain {0} had no match, skipping retrieval.\".format(name)]\n return isamAppliance.create_return_object(warnings=warnings)\n else:\n return _get(isamAppliance, id)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n domain_id: Optional[pulumi.Input[str]] = None,\n group_id: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n role_id: Optional[pulumi.Input[str]] = None,\n user_id: Optional[pulumi.Input[str]] = None) -> 'RoleAssignment':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RoleAssignmentState.__new__(_RoleAssignmentState)\n\n __props__.__dict__[\"domain_id\"] = domain_id\n __props__.__dict__[\"group_id\"] = group_id\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"role_id\"] = role_id\n __props__.__dict__[\"user_id\"] = user_id\n return RoleAssignment(resource_name, opts=opts, __props__=__props__)", "def get_with_inventory(self, context, id_):\n try:\n db_resource_mgr_data = self.db_api.get_resource_manager(\n context, id_)\n db_props_data = self.db_api.get_resource_mgr_properties(context,\n id_, key=eon_const.RESOURCE_MGR_STATE_KEY)\n\n driver_obj = driver.load_resource_mgr_driver(\n db_resource_mgr_data['type'])\n inventory = driver_obj.get_inventory(db_resource_mgr_data)\n resource_mgr_data = _make_response(db_resource_mgr_data,\n property_list=db_props_data,\n inventory=inventory)\n LOG.debug(\"[%s] Resource data %s\"\n % (id_, logging.mask_password(resource_mgr_data)))\n return resource_mgr_data\n\n except exception.NotFound as e:\n LOG.error(e)\n raise e\n except Exception as e:\n msg = \"Error retrieving the 'resource':%s. Reason: %s\" % (\n id_, e.message)\n LOG.exception(msg)\n raise exception.RetrieveException(e.message)", "def get(resource_name, id, opts=None, arn=None, block_device_mappings=None, capacity_reservation_specification=None, credit_specification=None, default_version=None, description=None, disable_api_termination=None, ebs_optimized=None, elastic_gpu_specifications=None, elastic_inference_accelerator=None, iam_instance_profile=None, image_id=None, instance_initiated_shutdown_behavior=None, instance_market_options=None, instance_type=None, kernel_id=None, key_name=None, latest_version=None, license_specifications=None, monitoring=None, name=None, name_prefix=None, network_interfaces=None, placement=None, ram_disk_id=None, security_group_names=None, tag_specifications=None, tags=None, user_data=None, vpc_security_group_ids=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n __props__[\"arn\"] = arn\n __props__[\"block_device_mappings\"] = block_device_mappings\n __props__[\"capacity_reservation_specification\"] = capacity_reservation_specification\n __props__[\"credit_specification\"] = credit_specification\n __props__[\"default_version\"] = default_version\n __props__[\"description\"] = description\n __props__[\"disable_api_termination\"] = disable_api_termination\n __props__[\"ebs_optimized\"] = ebs_optimized\n __props__[\"elastic_gpu_specifications\"] = elastic_gpu_specifications\n __props__[\"elastic_inference_accelerator\"] = elastic_inference_accelerator\n __props__[\"iam_instance_profile\"] = iam_instance_profile\n __props__[\"image_id\"] = image_id\n __props__[\"instance_initiated_shutdown_behavior\"] = instance_initiated_shutdown_behavior\n __props__[\"instance_market_options\"] = instance_market_options\n __props__[\"instance_type\"] = instance_type\n __props__[\"kernel_id\"] = kernel_id\n __props__[\"key_name\"] = key_name\n __props__[\"latest_version\"] = latest_version\n __props__[\"license_specifications\"] = license_specifications\n __props__[\"monitoring\"] = monitoring\n __props__[\"name\"] = name\n __props__[\"name_prefix\"] = name_prefix\n __props__[\"network_interfaces\"] = network_interfaces\n __props__[\"placement\"] = placement\n __props__[\"ram_disk_id\"] = ram_disk_id\n __props__[\"security_group_names\"] = security_group_names\n __props__[\"tag_specifications\"] = tag_specifications\n __props__[\"tags\"] = tags\n __props__[\"user_data\"] = user_data\n __props__[\"vpc_security_group_ids\"] = vpc_security_group_ids\n return LaunchTemplate(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Canary':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = CanaryArgs.__new__(CanaryArgs)\n\n __props__.__dict__[\"artifact_config\"] = None\n __props__.__dict__[\"artifact_s3_location\"] = None\n __props__.__dict__[\"code\"] = None\n __props__.__dict__[\"delete_lambda_resources_on_canary_deletion\"] = None\n __props__.__dict__[\"execution_role_arn\"] = None\n __props__.__dict__[\"failure_retention_period\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"run_config\"] = None\n __props__.__dict__[\"runtime_version\"] = None\n __props__.__dict__[\"schedule\"] = None\n __props__.__dict__[\"start_canary_after_creation\"] = None\n __props__.__dict__[\"state\"] = None\n __props__.__dict__[\"success_retention_period\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"visual_reference\"] = None\n __props__.__dict__[\"vpc_config\"] = None\n return Canary(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n enabled: Optional[pulumi.Input[bool]] = None,\n instance_id: Optional[pulumi.Input[int]] = None,\n message_type: Optional[pulumi.Input[str]] = None,\n queue_regex: Optional[pulumi.Input[str]] = None,\n recipients: Optional[pulumi.Input[Sequence[pulumi.Input[int]]]] = None,\n reminder_interval: Optional[pulumi.Input[int]] = None,\n time_threshold: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_calculation: Optional[pulumi.Input[str]] = None,\n value_threshold: Optional[pulumi.Input[int]] = None,\n vhost_regex: Optional[pulumi.Input[str]] = None) -> 'Alarm':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AlarmState.__new__(_AlarmState)\n\n __props__.__dict__[\"enabled\"] = enabled\n __props__.__dict__[\"instance_id\"] = instance_id\n __props__.__dict__[\"message_type\"] = message_type\n __props__.__dict__[\"queue_regex\"] = queue_regex\n __props__.__dict__[\"recipients\"] = recipients\n __props__.__dict__[\"reminder_interval\"] = reminder_interval\n __props__.__dict__[\"time_threshold\"] = time_threshold\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value_calculation\"] = value_calculation\n __props__.__dict__[\"value_threshold\"] = value_threshold\n __props__.__dict__[\"vhost_regex\"] = vhost_regex\n return Alarm(resource_name, opts=opts, __props__=__props__)", "def get_with_inventory(self, context, id_):\n try:\n db_resource_data = self.db_api.get_resource(context, id_)\n res_properties = self.db_api.get_properties(context, id_)\n\n # for non resource managers return get\n if (db_resource_data['type'] !=\n eon_const.EON_RESOURCE_TYPE_ESX_CLUSTER):\n return _make_response(db_resource_data)\n\n res_mgr_obj = (\n self.db_api.get_resource_managers_by_resource_id(context,\n id_))\n driver_obj = driver.load_resource_driver(db_resource_data['type'])\n _inventory = driver_obj.get_res_inventory(res_mgr_obj,\n res_properties)\n _resource_data = _make_response(db_resource_data,\n inventory=_inventory)\n # (NOTE) Here setting the details of resource manager for the\n # resource\n _res_mgr_data = _make_response(res_mgr_obj, meta_data=False)\n _resource_data[eon_const.RSRC_MGR_INFO] = _res_mgr_data\n\n except exception.NotFound as e:\n LOG.exception(e)\n raise e\n except Exception as e:\n msg = _(\"Error retrieving the 'eon_resource':%s. Reason: %s\") % (\n id_, e)\n log_msg = (\"Error retrieving the 'eon_resource':%s.\"\n \" Reason: %s\") % (id_, e)\n LOG.exception(log_msg)\n raise exception.RetrieveException(msg)\n\n LOG.info(\"The Resource data %s \"\n % logging.mask_password(_resource_data))\n return _resource_data", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'SqlVulnerabilityAssessmentBaselineRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = SqlVulnerabilityAssessmentBaselineRuleArgs.__new__(SqlVulnerabilityAssessmentBaselineRuleArgs)\n\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"properties\"] = None\n __props__.__dict__[\"type\"] = None\n return SqlVulnerabilityAssessmentBaselineRule(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_locations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceAdditionalLocationArgs']]]]] = None,\n certificates: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ServiceCertificateArgs']]]]] = None,\n client_certificate_enabled: Optional[pulumi.Input[bool]] = None,\n delegation: Optional[pulumi.Input[pulumi.InputType['ServiceDelegationArgs']]] = None,\n developer_portal_url: Optional[pulumi.Input[str]] = None,\n gateway_disabled: Optional[pulumi.Input[bool]] = None,\n gateway_regional_url: Optional[pulumi.Input[str]] = None,\n gateway_url: Optional[pulumi.Input[str]] = None,\n hostname_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceHostnameConfigurationArgs']]] = None,\n identity: Optional[pulumi.Input[pulumi.InputType['ServiceIdentityArgs']]] = None,\n location: Optional[pulumi.Input[str]] = None,\n management_api_url: Optional[pulumi.Input[str]] = None,\n min_api_version: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n notification_sender_email: Optional[pulumi.Input[str]] = None,\n policy: Optional[pulumi.Input[pulumi.InputType['ServicePolicyArgs']]] = None,\n portal_url: Optional[pulumi.Input[str]] = None,\n private_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n protocols: Optional[pulumi.Input[pulumi.InputType['ServiceProtocolsArgs']]] = None,\n public_ip_address_id: Optional[pulumi.Input[str]] = None,\n public_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n public_network_access_enabled: Optional[pulumi.Input[bool]] = None,\n publisher_email: Optional[pulumi.Input[str]] = None,\n publisher_name: Optional[pulumi.Input[str]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n scm_url: Optional[pulumi.Input[str]] = None,\n security: Optional[pulumi.Input[pulumi.InputType['ServiceSecurityArgs']]] = None,\n sign_in: Optional[pulumi.Input[pulumi.InputType['ServiceSignInArgs']]] = None,\n sign_up: Optional[pulumi.Input[pulumi.InputType['ServiceSignUpArgs']]] = None,\n sku_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tenant_access: Optional[pulumi.Input[pulumi.InputType['ServiceTenantAccessArgs']]] = None,\n virtual_network_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceVirtualNetworkConfigurationArgs']]] = None,\n virtual_network_type: Optional[pulumi.Input[str]] = None,\n zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Service':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ServiceState.__new__(_ServiceState)\n\n __props__.__dict__[\"additional_locations\"] = additional_locations\n __props__.__dict__[\"certificates\"] = certificates\n __props__.__dict__[\"client_certificate_enabled\"] = client_certificate_enabled\n __props__.__dict__[\"delegation\"] = delegation\n __props__.__dict__[\"developer_portal_url\"] = developer_portal_url\n __props__.__dict__[\"gateway_disabled\"] = gateway_disabled\n __props__.__dict__[\"gateway_regional_url\"] = gateway_regional_url\n __props__.__dict__[\"gateway_url\"] = gateway_url\n __props__.__dict__[\"hostname_configuration\"] = hostname_configuration\n __props__.__dict__[\"identity\"] = identity\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"management_api_url\"] = management_api_url\n __props__.__dict__[\"min_api_version\"] = min_api_version\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"notification_sender_email\"] = notification_sender_email\n __props__.__dict__[\"policy\"] = policy\n __props__.__dict__[\"portal_url\"] = portal_url\n __props__.__dict__[\"private_ip_addresses\"] = private_ip_addresses\n __props__.__dict__[\"protocols\"] = protocols\n __props__.__dict__[\"public_ip_address_id\"] = public_ip_address_id\n __props__.__dict__[\"public_ip_addresses\"] = public_ip_addresses\n __props__.__dict__[\"public_network_access_enabled\"] = public_network_access_enabled\n __props__.__dict__[\"publisher_email\"] = publisher_email\n __props__.__dict__[\"publisher_name\"] = publisher_name\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"scm_url\"] = scm_url\n __props__.__dict__[\"security\"] = security\n __props__.__dict__[\"sign_in\"] = sign_in\n __props__.__dict__[\"sign_up\"] = sign_up\n __props__.__dict__[\"sku_name\"] = sku_name\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tenant_access\"] = tenant_access\n __props__.__dict__[\"virtual_network_configuration\"] = virtual_network_configuration\n __props__.__dict__[\"virtual_network_type\"] = virtual_network_type\n __props__.__dict__[\"zones\"] = zones\n return Service(resource_name, opts=opts, __props__=__props__)", "def get(self, sid: typing.Union[uuid.UUID, int]) -> bytes:\n if not self.tag.training:\n return bytes()\n if isinstance(sid, int):\n sid = self.tag.states[sid]\n if sid not in self.tag.states:\n raise Level.Invalid(f'Unknown state reference for {self}: {sid}')\n LOGGER.debug('%s: Getting state %s', self, sid)\n return STATES(self.registry, self.project.key, self.lineage.key, self.key, sid)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n description: Optional[pulumi.Input[str]] = None,\n disable_status_check: Optional[pulumi.Input[bool]] = None,\n email: Optional[pulumi.Input[str]] = None,\n masters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n project_id: Optional[pulumi.Input[str]] = None,\n region: Optional[pulumi.Input[str]] = None,\n ttl: Optional[pulumi.Input[int]] = None,\n type: Optional[pulumi.Input[str]] = None,\n value_specs: Optional[pulumi.Input[Mapping[str, Any]]] = None) -> 'Zone':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ZoneState.__new__(_ZoneState)\n\n __props__.__dict__[\"attributes\"] = attributes\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"disable_status_check\"] = disable_status_check\n __props__.__dict__[\"email\"] = email\n __props__.__dict__[\"masters\"] = masters\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"project_id\"] = project_id\n __props__.__dict__[\"region\"] = region\n __props__.__dict__[\"ttl\"] = ttl\n __props__.__dict__[\"type\"] = type\n __props__.__dict__[\"value_specs\"] = value_specs\n return Zone(resource_name, opts=opts, __props__=__props__)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n assessment_name: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentMetadataPropertiesArgs']]] = None,\n partners_data: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentPartnerDataArgs']]] = None,\n resource_details: Optional[pulumi.Input[Union[pulumi.InputType['AzureResourceDetailsArgs'], pulumi.InputType['OnPremiseResourceDetailsArgs'], pulumi.InputType['OnPremiseSqlResourceDetailsArgs']]]] = None,\n resource_id: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[pulumi.InputType['AssessmentStatusArgs']]] = None,\n __props__=None,\n __name__=None,\n __opts__=None):\n if __name__ is not None:\n warnings.warn(\"explicit use of __name__ is deprecated\", DeprecationWarning)\n resource_name = __name__\n if __opts__ is not None:\n warnings.warn(\"explicit use of __opts__ is deprecated, use 'opts' instead\", DeprecationWarning)\n opts = __opts__\n if opts is None:\n opts = pulumi.ResourceOptions()\n if not isinstance(opts, pulumi.ResourceOptions):\n raise TypeError('Expected resource options to be a ResourceOptions instance')\n if opts.version is None:\n opts.version = _utilities.get_version()\n if opts.id is None:\n if __props__ is not None:\n raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')\n __props__ = dict()\n\n __props__['additional_data'] = additional_data\n __props__['assessment_name'] = assessment_name\n __props__['metadata'] = metadata\n __props__['partners_data'] = partners_data\n if resource_details is None and not opts.urn:\n raise TypeError(\"Missing required property 'resource_details'\")\n __props__['resource_details'] = resource_details\n if resource_id is None and not opts.urn:\n raise TypeError(\"Missing required property 'resource_id'\")\n __props__['resource_id'] = resource_id\n if status is None and not opts.urn:\n raise TypeError(\"Missing required property 'status'\")\n __props__['status'] = status\n __props__['display_name'] = None\n __props__['links'] = None\n __props__['name'] = None\n __props__['type'] = None\n alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_=\"azure-nextgen:security:Assessment\"), pulumi.Alias(type_=\"azure-native:security/latest:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/latest:Assessment\"), pulumi.Alias(type_=\"azure-native:security/v20190101preview:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/v20190101preview:Assessment\"), pulumi.Alias(type_=\"azure-native:security/v20200101:Assessment\"), pulumi.Alias(type_=\"azure-nextgen:security/v20200101:Assessment\")])\n opts = pulumi.ResourceOptions.merge(opts, alias_opts)\n super(Assessment, __self__).__init__(\n 'azure-native:security:Assessment',\n resource_name,\n __props__,\n opts)", "def view_state_id(state_id):\n states_obj = storage.all(\"State\")\n if request.method == 'GET':\n for state in states_obj.values():\n if state.id == state_id:\n id_found = state.to_dict()\n return jsonify(id_found)\n abort(404)\n\n if request.method == 'DELETE':\n for state in states_obj.values():\n if state.id == state_id:\n storage.delete(state)\n storage.save()\n return make_response(jsonify({}), 200)\n abort(404)\n\n if request.method == 'PUT':\n key = \"State.\" + state_id\n states = storage.all(\"State\")\n instance = states.get(key)\n if instance is None:\n abort(404)\n else:\n if not request.json:\n abort(400, \"Not a JSON\")\n req_var = request.get_json()\n for key, value in req_var.items():\n setattr(instance, key, value)\n storage.save()\n return make_response(jsonify(instance.to_dict()), 200)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n access_string: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n authentication_mode: Optional[pulumi.Input[pulumi.InputType['UserAuthenticationModeArgs']]] = None,\n engine: Optional[pulumi.Input[str]] = None,\n no_password_required: Optional[pulumi.Input[bool]] = None,\n passwords: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_id: Optional[pulumi.Input[str]] = None,\n user_name: Optional[pulumi.Input[str]] = None) -> 'User':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _UserState.__new__(_UserState)\n\n __props__.__dict__[\"access_string\"] = access_string\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"authentication_mode\"] = authentication_mode\n __props__.__dict__[\"engine\"] = engine\n __props__.__dict__[\"no_password_required\"] = no_password_required\n __props__.__dict__[\"passwords\"] = passwords\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_id\"] = user_id\n __props__.__dict__[\"user_name\"] = user_name\n return User(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n inter_region_traffic_qos_policy_description: Optional[pulumi.Input[str]] = None,\n inter_region_traffic_qos_policy_name: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None,\n transit_router_attachment_id: Optional[pulumi.Input[str]] = None,\n transit_router_id: Optional[pulumi.Input[str]] = None) -> 'InterRegionTrafficQosPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InterRegionTrafficQosPolicyState.__new__(_InterRegionTrafficQosPolicyState)\n\n __props__.__dict__[\"inter_region_traffic_qos_policy_description\"] = inter_region_traffic_qos_policy_description\n __props__.__dict__[\"inter_region_traffic_qos_policy_name\"] = inter_region_traffic_qos_policy_name\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"transit_router_attachment_id\"] = transit_router_attachment_id\n __props__.__dict__[\"transit_router_id\"] = transit_router_id\n return InterRegionTrafficQosPolicy(resource_name, opts=opts, __props__=__props__)", "def get_resource(self, name: str) -> Resource:\n return self.get_session.query(self.resource_model).filter_by(name=name).one_or_none()", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n application_insights_id: Optional[pulumi.Input[str]] = None,\n container_registry_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n discovery_url: Optional[pulumi.Input[str]] = None,\n encryption: Optional[pulumi.Input[pulumi.InputType['WorkspaceEncryptionArgs']]] = None,\n friendly_name: Optional[pulumi.Input[str]] = None,\n high_business_impact: Optional[pulumi.Input[bool]] = None,\n identity: Optional[pulumi.Input[pulumi.InputType['WorkspaceIdentityArgs']]] = None,\n image_build_compute_name: Optional[pulumi.Input[str]] = None,\n key_vault_id: Optional[pulumi.Input[str]] = None,\n location: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n primary_user_assigned_identity: Optional[pulumi.Input[str]] = None,\n public_access_behind_virtual_network_enabled: Optional[pulumi.Input[bool]] = None,\n public_network_access_enabled: Optional[pulumi.Input[bool]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n sku_name: Optional[pulumi.Input[str]] = None,\n storage_account_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n v1_legacy_mode_enabled: Optional[pulumi.Input[bool]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None) -> 'Workspace':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _WorkspaceState.__new__(_WorkspaceState)\n\n __props__.__dict__[\"application_insights_id\"] = application_insights_id\n __props__.__dict__[\"container_registry_id\"] = container_registry_id\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"discovery_url\"] = discovery_url\n __props__.__dict__[\"encryption\"] = encryption\n __props__.__dict__[\"friendly_name\"] = friendly_name\n __props__.__dict__[\"high_business_impact\"] = high_business_impact\n __props__.__dict__[\"identity\"] = identity\n __props__.__dict__[\"image_build_compute_name\"] = image_build_compute_name\n __props__.__dict__[\"key_vault_id\"] = key_vault_id\n __props__.__dict__[\"location\"] = location\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"primary_user_assigned_identity\"] = primary_user_assigned_identity\n __props__.__dict__[\"public_access_behind_virtual_network_enabled\"] = public_access_behind_virtual_network_enabled\n __props__.__dict__[\"public_network_access_enabled\"] = public_network_access_enabled\n __props__.__dict__[\"resource_group_name\"] = resource_group_name\n __props__.__dict__[\"sku_name\"] = sku_name\n __props__.__dict__[\"storage_account_id\"] = storage_account_id\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"v1_legacy_mode_enabled\"] = v1_legacy_mode_enabled\n __props__.__dict__[\"workspace_id\"] = workspace_id\n return Workspace(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n admin_role_values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n allowed_organizations: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n editor_role_values: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n email_assertion: Optional[pulumi.Input[str]] = None,\n groups_assertion: Optional[pulumi.Input[str]] = None,\n idp_metadata_url: Optional[pulumi.Input[str]] = None,\n idp_metadata_xml: Optional[pulumi.Input[str]] = None,\n login_assertion: Optional[pulumi.Input[str]] = None,\n login_validity_duration: Optional[pulumi.Input[int]] = None,\n name_assertion: Optional[pulumi.Input[str]] = None,\n org_assertion: Optional[pulumi.Input[str]] = None,\n role_assertion: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None,\n workspace_id: Optional[pulumi.Input[str]] = None) -> 'WorkspaceSamlConfiguration':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _WorkspaceSamlConfigurationState.__new__(_WorkspaceSamlConfigurationState)\n\n __props__.__dict__[\"admin_role_values\"] = admin_role_values\n __props__.__dict__[\"allowed_organizations\"] = allowed_organizations\n __props__.__dict__[\"editor_role_values\"] = editor_role_values\n __props__.__dict__[\"email_assertion\"] = email_assertion\n __props__.__dict__[\"groups_assertion\"] = groups_assertion\n __props__.__dict__[\"idp_metadata_url\"] = idp_metadata_url\n __props__.__dict__[\"idp_metadata_xml\"] = idp_metadata_xml\n __props__.__dict__[\"login_assertion\"] = login_assertion\n __props__.__dict__[\"login_validity_duration\"] = login_validity_duration\n __props__.__dict__[\"name_assertion\"] = name_assertion\n __props__.__dict__[\"org_assertion\"] = org_assertion\n __props__.__dict__[\"role_assertion\"] = role_assertion\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"workspace_id\"] = workspace_id\n return WorkspaceSamlConfiguration(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n direction: Optional[pulumi.Input[str]] = None,\n dry_run: Optional[pulumi.Input[bool]] = None,\n listener_id: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n rule_actions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleRuleActionArgs']]]]] = None,\n rule_conditions: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['RuleRuleConditionArgs']]]]] = None,\n rule_name: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None) -> 'Rule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _RuleState.__new__(_RuleState)\n\n __props__.__dict__[\"direction\"] = direction\n __props__.__dict__[\"dry_run\"] = dry_run\n __props__.__dict__[\"listener_id\"] = listener_id\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"rule_actions\"] = rule_actions\n __props__.__dict__[\"rule_conditions\"] = rule_conditions\n __props__.__dict__[\"rule_name\"] = rule_name\n __props__.__dict__[\"status\"] = status\n return Rule(resource_name, opts=opts, __props__=__props__)", "def get(resource_name, id, opts=None, arn=None, cloudwatch_logging_options=None, code=None, create_timestamp=None, description=None, inputs=None, last_update_timestamp=None, name=None, outputs=None, reference_data_sources=None, status=None, tags=None, version=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"arn\"] = arn\n __props__[\"cloudwatch_logging_options\"] = cloudwatch_logging_options\n __props__[\"code\"] = code\n __props__[\"create_timestamp\"] = create_timestamp\n __props__[\"description\"] = description\n __props__[\"inputs\"] = inputs\n __props__[\"last_update_timestamp\"] = last_update_timestamp\n __props__[\"name\"] = name\n __props__[\"outputs\"] = outputs\n __props__[\"reference_data_sources\"] = reference_data_sources\n __props__[\"status\"] = status\n __props__[\"tags\"] = tags\n __props__[\"version\"] = version\n return AnalyticsApplication(resource_name, opts=opts, __props__=__props__)", "def get_resource_by_name(self, name, **kwargs):\n instance = self.manager.get_by_name(name, **kwargs)\n return instance", "def given_state(id):\n key = 'State.{}'.format(id)\n state = storage.all(State).get(key)\n return render_template('9-states.html', states=state)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n api: Optional[pulumi.Input[pulumi.InputType['ApplicationApiArgs']]] = None,\n app_role_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n app_roles: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationAppRoleArgs']]]]] = None,\n application_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n device_only_auth_enabled: Optional[pulumi.Input[bool]] = None,\n disabled_by_microsoft: Optional[pulumi.Input[str]] = None,\n display_name: Optional[pulumi.Input[str]] = None,\n fallback_public_client_enabled: Optional[pulumi.Input[bool]] = None,\n feature_tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationFeatureTagArgs']]]]] = None,\n group_membership_claims: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n identifier_uris: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n logo_image: Optional[pulumi.Input[str]] = None,\n logo_url: Optional[pulumi.Input[str]] = None,\n marketing_url: Optional[pulumi.Input[str]] = None,\n notes: Optional[pulumi.Input[str]] = None,\n oauth2_permission_scope_ids: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n oauth2_post_response_required: Optional[pulumi.Input[bool]] = None,\n object_id: Optional[pulumi.Input[str]] = None,\n optional_claims: Optional[pulumi.Input[pulumi.InputType['ApplicationOptionalClaimsArgs']]] = None,\n owners: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n prevent_duplicate_names: Optional[pulumi.Input[bool]] = None,\n privacy_statement_url: Optional[pulumi.Input[str]] = None,\n public_client: Optional[pulumi.Input[pulumi.InputType['ApplicationPublicClientArgs']]] = None,\n publisher_domain: Optional[pulumi.Input[str]] = None,\n required_resource_accesses: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ApplicationRequiredResourceAccessArgs']]]]] = None,\n service_management_reference: Optional[pulumi.Input[str]] = None,\n sign_in_audience: Optional[pulumi.Input[str]] = None,\n single_page_application: Optional[pulumi.Input[pulumi.InputType['ApplicationSinglePageApplicationArgs']]] = None,\n support_url: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n template_id: Optional[pulumi.Input[str]] = None,\n terms_of_service_url: Optional[pulumi.Input[str]] = None,\n web: Optional[pulumi.Input[pulumi.InputType['ApplicationWebArgs']]] = None) -> 'Application':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ApplicationState.__new__(_ApplicationState)\n\n __props__.__dict__[\"api\"] = api\n __props__.__dict__[\"app_role_ids\"] = app_role_ids\n __props__.__dict__[\"app_roles\"] = app_roles\n __props__.__dict__[\"application_id\"] = application_id\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"device_only_auth_enabled\"] = device_only_auth_enabled\n __props__.__dict__[\"disabled_by_microsoft\"] = disabled_by_microsoft\n __props__.__dict__[\"display_name\"] = display_name\n __props__.__dict__[\"fallback_public_client_enabled\"] = fallback_public_client_enabled\n __props__.__dict__[\"feature_tags\"] = feature_tags\n __props__.__dict__[\"group_membership_claims\"] = group_membership_claims\n __props__.__dict__[\"identifier_uris\"] = identifier_uris\n __props__.__dict__[\"logo_image\"] = logo_image\n __props__.__dict__[\"logo_url\"] = logo_url\n __props__.__dict__[\"marketing_url\"] = marketing_url\n __props__.__dict__[\"notes\"] = notes\n __props__.__dict__[\"oauth2_permission_scope_ids\"] = oauth2_permission_scope_ids\n __props__.__dict__[\"oauth2_post_response_required\"] = oauth2_post_response_required\n __props__.__dict__[\"object_id\"] = object_id\n __props__.__dict__[\"optional_claims\"] = optional_claims\n __props__.__dict__[\"owners\"] = owners\n __props__.__dict__[\"prevent_duplicate_names\"] = prevent_duplicate_names\n __props__.__dict__[\"privacy_statement_url\"] = privacy_statement_url\n __props__.__dict__[\"public_client\"] = public_client\n __props__.__dict__[\"publisher_domain\"] = publisher_domain\n __props__.__dict__[\"required_resource_accesses\"] = required_resource_accesses\n __props__.__dict__[\"service_management_reference\"] = service_management_reference\n __props__.__dict__[\"sign_in_audience\"] = sign_in_audience\n __props__.__dict__[\"single_page_application\"] = single_page_application\n __props__.__dict__[\"support_url\"] = support_url\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"template_id\"] = template_id\n __props__.__dict__[\"terms_of_service_url\"] = terms_of_service_url\n __props__.__dict__[\"web\"] = web\n return Application(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n access_configuration_id: Optional[pulumi.Input[str]] = None,\n access_configuration_name: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n directory_id: Optional[pulumi.Input[str]] = None,\n force_remove_permission_policies: Optional[pulumi.Input[bool]] = None,\n permission_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AccessConfigurationPermissionPolicyArgs']]]]] = None,\n relay_state: Optional[pulumi.Input[str]] = None,\n session_duration: Optional[pulumi.Input[int]] = None) -> 'AccessConfiguration':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AccessConfigurationState.__new__(_AccessConfigurationState)\n\n __props__.__dict__[\"access_configuration_id\"] = access_configuration_id\n __props__.__dict__[\"access_configuration_name\"] = access_configuration_name\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"directory_id\"] = directory_id\n __props__.__dict__[\"force_remove_permission_policies\"] = force_remove_permission_policies\n __props__.__dict__[\"permission_policies\"] = permission_policies\n __props__.__dict__[\"relay_state\"] = relay_state\n __props__.__dict__[\"session_duration\"] = session_duration\n return AccessConfiguration(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Environment':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = EnvironmentArgs.__new__(EnvironmentArgs)\n\n __props__.__dict__[\"arm_template_display_name\"] = None\n __props__.__dict__[\"created_by_user\"] = None\n __props__.__dict__[\"deployment_properties\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"provisioning_state\"] = None\n __props__.__dict__[\"resource_group_id\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"type\"] = None\n __props__.__dict__[\"unique_identifier\"] = None\n return Environment(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auto_scaling_configuration_arn: Optional[pulumi.Input[str]] = None,\n encryption_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceEncryptionConfigurationArgs']]] = None,\n health_check_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceHealthCheckConfigurationArgs']]] = None,\n instance_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceInstanceConfigurationArgs']]] = None,\n network_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceNetworkConfigurationArgs']]] = None,\n observability_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceObservabilityConfigurationArgs']]] = None,\n service_id: Optional[pulumi.Input[str]] = None,\n service_name: Optional[pulumi.Input[str]] = None,\n service_url: Optional[pulumi.Input[str]] = None,\n source_configuration: Optional[pulumi.Input[pulumi.InputType['ServiceSourceConfigurationArgs']]] = None,\n status: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'Service':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ServiceState.__new__(_ServiceState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auto_scaling_configuration_arn\"] = auto_scaling_configuration_arn\n __props__.__dict__[\"encryption_configuration\"] = encryption_configuration\n __props__.__dict__[\"health_check_configuration\"] = health_check_configuration\n __props__.__dict__[\"instance_configuration\"] = instance_configuration\n __props__.__dict__[\"network_configuration\"] = network_configuration\n __props__.__dict__[\"observability_configuration\"] = observability_configuration\n __props__.__dict__[\"service_id\"] = service_id\n __props__.__dict__[\"service_name\"] = service_name\n __props__.__dict__[\"service_url\"] = service_url\n __props__.__dict__[\"source_configuration\"] = source_configuration\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n return Service(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'RoleManagementPolicyAssignment':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = RoleManagementPolicyAssignmentArgs.__new__(RoleManagementPolicyAssignmentArgs)\n\n __props__.__dict__[\"effective_rules\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"policy_assignment_properties\"] = None\n __props__.__dict__[\"policy_id\"] = None\n __props__.__dict__[\"role_definition_id\"] = None\n __props__.__dict__[\"scope\"] = None\n __props__.__dict__[\"type\"] = None\n return RoleManagementPolicyAssignment(resource_name, opts=opts, __props__=__props__)", "def state_by_id(id):\n states = storage.all('State').values()\n for state in states:\n if state.id == id:\n return render_template('9-states.html', states=state)\n return render_template('9-states.html')", "def get_state(self, entity_id: str, attribute: str = \"state\") -> dict:\n if not self.connected:\n LOGGER.warning(\"Connection is not yet ready.\")\n state_obj = self._states.get(entity_id)\n if state_obj:\n if attribute == \"state\":\n return state_obj[\"state\"]\n if attribute:\n return state_obj[\"attributes\"].get(attribute)\n return state_obj\n return None", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n acl_name: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,\n cluster_endpoints: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterClusterEndpointArgs']]]]] = None,\n data_tiering: Optional[pulumi.Input[bool]] = None,\n description: Optional[pulumi.Input[str]] = None,\n engine_patch_version: Optional[pulumi.Input[str]] = None,\n engine_version: Optional[pulumi.Input[str]] = None,\n final_snapshot_name: Optional[pulumi.Input[str]] = None,\n kms_key_arn: Optional[pulumi.Input[str]] = None,\n maintenance_window: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n node_type: Optional[pulumi.Input[str]] = None,\n num_replicas_per_shard: Optional[pulumi.Input[int]] = None,\n num_shards: Optional[pulumi.Input[int]] = None,\n parameter_group_name: Optional[pulumi.Input[str]] = None,\n port: Optional[pulumi.Input[int]] = None,\n security_group_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n shards: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ClusterShardArgs']]]]] = None,\n snapshot_arns: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n snapshot_name: Optional[pulumi.Input[str]] = None,\n snapshot_retention_limit: Optional[pulumi.Input[int]] = None,\n snapshot_window: Optional[pulumi.Input[str]] = None,\n sns_topic_arn: Optional[pulumi.Input[str]] = None,\n subnet_group_name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tls_enabled: Optional[pulumi.Input[bool]] = None) -> 'Cluster':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ClusterState.__new__(_ClusterState)\n\n __props__.__dict__[\"acl_name\"] = acl_name\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"auto_minor_version_upgrade\"] = auto_minor_version_upgrade\n __props__.__dict__[\"cluster_endpoints\"] = cluster_endpoints\n __props__.__dict__[\"data_tiering\"] = data_tiering\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"engine_patch_version\"] = engine_patch_version\n __props__.__dict__[\"engine_version\"] = engine_version\n __props__.__dict__[\"final_snapshot_name\"] = final_snapshot_name\n __props__.__dict__[\"kms_key_arn\"] = kms_key_arn\n __props__.__dict__[\"maintenance_window\"] = maintenance_window\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"node_type\"] = node_type\n __props__.__dict__[\"num_replicas_per_shard\"] = num_replicas_per_shard\n __props__.__dict__[\"num_shards\"] = num_shards\n __props__.__dict__[\"parameter_group_name\"] = parameter_group_name\n __props__.__dict__[\"port\"] = port\n __props__.__dict__[\"security_group_ids\"] = security_group_ids\n __props__.__dict__[\"shards\"] = shards\n __props__.__dict__[\"snapshot_arns\"] = snapshot_arns\n __props__.__dict__[\"snapshot_name\"] = snapshot_name\n __props__.__dict__[\"snapshot_retention_limit\"] = snapshot_retention_limit\n __props__.__dict__[\"snapshot_window\"] = snapshot_window\n __props__.__dict__[\"sns_topic_arn\"] = sns_topic_arn\n __props__.__dict__[\"subnet_group_name\"] = subnet_group_name\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"tls_enabled\"] = tls_enabled\n return Cluster(resource_name, opts=opts, __props__=__props__)", "def _get(isamAppliance, id):\n return isamAppliance.invoke_get(\"Retrieve a specific STS chain\", \"{0}/{1}\".format(uri, id),\n requires_modules=requires_modules,\n requires_version=requires_version)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n license_count: Optional[pulumi.Input[int]] = None,\n license_count_hard_limit: Optional[pulumi.Input[bool]] = None,\n license_counting_type: Optional[pulumi.Input[str]] = None,\n license_rules: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n name: Optional[pulumi.Input[str]] = None,\n owner_account_id: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None) -> 'LicenseConfiguration':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _LicenseConfigurationState.__new__(_LicenseConfigurationState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"license_count\"] = license_count\n __props__.__dict__[\"license_count_hard_limit\"] = license_count_hard_limit\n __props__.__dict__[\"license_counting_type\"] = license_counting_type\n __props__.__dict__[\"license_rules\"] = license_rules\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"owner_account_id\"] = owner_account_id\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n return LicenseConfiguration(resource_name, opts=opts, __props__=__props__)", "def get_state_by_id(states: [State], state_id: str, id_type: str) -> State:\n if id_type == 'new':\n for state in states:\n if state.new_id == state_id:\n return state\n if id_type == 'old':\n for state in states:\n if state.id == state_id:\n return state\n return states[0]", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = InstanceArgs.__new__(InstanceArgs)\n\n __props__.__dict__[\"create_time\"] = None\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"etag\"] = None\n __props__.__dict__[\"file_shares\"] = None\n __props__.__dict__[\"instance_id\"] = None\n __props__.__dict__[\"kms_key_name\"] = None\n __props__.__dict__[\"labels\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"networks\"] = None\n __props__.__dict__[\"project\"] = None\n __props__.__dict__[\"satisfies_pzs\"] = None\n __props__.__dict__[\"state\"] = None\n __props__.__dict__[\"status_message\"] = None\n __props__.__dict__[\"suspension_reasons\"] = None\n __props__.__dict__[\"tier\"] = None\n return Instance(resource_name, opts=opts, __props__=__props__)", "def get(resource_name, id, opts=None, arn=None, artifact_store=None, name=None, role_arn=None, stages=None, tags=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n __props__[\"arn\"] = arn\n __props__[\"artifact_store\"] = artifact_store\n __props__[\"name\"] = name\n __props__[\"role_arn\"] = role_arn\n __props__[\"stages\"] = stages\n __props__[\"tags\"] = tags\n return Pipeline(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n acl_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n dest_cidr: Optional[pulumi.Input[str]] = None,\n dest_port_range: Optional[pulumi.Input[str]] = None,\n direction: Optional[pulumi.Input[str]] = None,\n ip_protocol: Optional[pulumi.Input[str]] = None,\n policy: Optional[pulumi.Input[str]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n source_cidr: Optional[pulumi.Input[str]] = None,\n source_port_range: Optional[pulumi.Input[str]] = None) -> 'AclRule':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AclRuleState.__new__(_AclRuleState)\n\n __props__.__dict__[\"acl_id\"] = acl_id\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"dest_cidr\"] = dest_cidr\n __props__.__dict__[\"dest_port_range\"] = dest_port_range\n __props__.__dict__[\"direction\"] = direction\n __props__.__dict__[\"ip_protocol\"] = ip_protocol\n __props__.__dict__[\"policy\"] = policy\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"source_cidr\"] = source_cidr\n __props__.__dict__[\"source_port_range\"] = source_port_range\n return AclRule(resource_name, opts=opts, __props__=__props__)", "def find_resource_by_name_or_id(self, resource_name, value):\n try:\n entity = getattr(self.client(), resource_name)\n return entity.get(value).id\n except sahara_base.APIException:\n return self.find_resource_by_name(resource_name, value)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'AccountAlias':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = AccountAliasArgs.__new__(AccountAliasArgs)\n\n __props__.__dict__[\"account_alias\"] = None\n __props__.__dict__[\"account_alias_resource_id\"] = None\n return AccountAlias(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = InstanceArgs.__new__(InstanceArgs)\n\n __props__.__dict__[\"build\"] = None\n __props__.__dict__[\"config\"] = None\n __props__.__dict__[\"create_time\"] = None\n __props__.__dict__[\"instance_id\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"project\"] = None\n __props__.__dict__[\"state\"] = None\n __props__.__dict__[\"state_message\"] = None\n __props__.__dict__[\"update_time\"] = None\n return Instance(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n name: Optional[pulumi.Input[str]] = None,\n virtual_hub_id: Optional[pulumi.Input[str]] = None) -> 'VirtualNetworkAppliance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _VirtualNetworkApplianceState.__new__(_VirtualNetworkApplianceState)\n\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"virtual_hub_id\"] = virtual_hub_id\n return VirtualNetworkAppliance(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Reservation':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ReservationArgs.__new__(ReservationArgs)\n\n __props__.__dict__[\"concurrency\"] = None\n __props__.__dict__[\"creation_time\"] = None\n __props__.__dict__[\"ignore_idle_slots\"] = None\n __props__.__dict__[\"location\"] = None\n __props__.__dict__[\"multi_region_auxiliary\"] = None\n __props__.__dict__[\"name\"] = None\n __props__.__dict__[\"project\"] = None\n __props__.__dict__[\"reservation_id\"] = None\n __props__.__dict__[\"slot_capacity\"] = None\n __props__.__dict__[\"update_time\"] = None\n return Reservation(resource_name, opts=opts, __props__=__props__)", "def get_scene(self, name=None, id=None):\n\n if(name):\n return self.scenes[name] if name in self.scenes else None\n if(id):\n return next((v for (k,v) in self.scenes.items() if v.id == id), None)\n return None", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n accept_language: Optional[pulumi.Input[str]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n created_time: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n distributor: Optional[pulumi.Input[str]] = None,\n has_default_path: Optional[pulumi.Input[bool]] = None,\n name: Optional[pulumi.Input[str]] = None,\n owner: Optional[pulumi.Input[str]] = None,\n provisioning_artifact_parameters: Optional[pulumi.Input[pulumi.InputType['ProductProvisioningArtifactParametersArgs']]] = None,\n status: Optional[pulumi.Input[str]] = None,\n support_description: Optional[pulumi.Input[str]] = None,\n support_email: Optional[pulumi.Input[str]] = None,\n support_url: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n type: Optional[pulumi.Input[str]] = None) -> 'Product':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _ProductState.__new__(_ProductState)\n\n __props__.__dict__[\"accept_language\"] = accept_language\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"created_time\"] = created_time\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"distributor\"] = distributor\n __props__.__dict__[\"has_default_path\"] = has_default_path\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"owner\"] = owner\n __props__.__dict__[\"provisioning_artifact_parameters\"] = provisioning_artifact_parameters\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"support_description\"] = support_description\n __props__.__dict__[\"support_email\"] = support_email\n __props__.__dict__[\"support_url\"] = support_url\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"type\"] = type\n return Product(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n connection_string: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n instance_charge_type: Optional[pulumi.Input[str]] = None,\n instance_series: Optional[pulumi.Input[str]] = None,\n mysql_version: Optional[pulumi.Input[int]] = None,\n port: Optional[pulumi.Input[str]] = None,\n specification: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n vswitch_id: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceState.__new__(_InstanceState)\n\n __props__.__dict__[\"connection_string\"] = connection_string\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"instance_charge_type\"] = instance_charge_type\n __props__.__dict__[\"instance_series\"] = instance_series\n __props__.__dict__[\"mysql_version\"] = mysql_version\n __props__.__dict__[\"port\"] = port\n __props__.__dict__[\"specification\"] = specification\n __props__.__dict__[\"vpc_id\"] = vpc_id\n __props__.__dict__[\"vswitch_id\"] = vswitch_id\n __props__.__dict__[\"zone_id\"] = zone_id\n return Instance(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n availability_zone: Optional[pulumi.Input[str]] = None,\n connection_string: Optional[pulumi.Input[str]] = None,\n create_sample_data: Optional[pulumi.Input[bool]] = None,\n db_instance_category: Optional[pulumi.Input[str]] = None,\n db_instance_class: Optional[pulumi.Input[str]] = None,\n db_instance_mode: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n encryption_key: Optional[pulumi.Input[str]] = None,\n encryption_type: Optional[pulumi.Input[str]] = None,\n engine: Optional[pulumi.Input[str]] = None,\n engine_version: Optional[pulumi.Input[str]] = None,\n instance_charge_type: Optional[pulumi.Input[str]] = None,\n instance_group_count: Optional[pulumi.Input[int]] = None,\n instance_network_type: Optional[pulumi.Input[str]] = None,\n instance_spec: Optional[pulumi.Input[str]] = None,\n ip_whitelists: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InstanceIpWhitelistArgs']]]]] = None,\n maintain_end_time: Optional[pulumi.Input[str]] = None,\n maintain_start_time: Optional[pulumi.Input[str]] = None,\n master_node_num: Optional[pulumi.Input[int]] = None,\n payment_type: Optional[pulumi.Input[str]] = None,\n period: Optional[pulumi.Input[str]] = None,\n port: Optional[pulumi.Input[str]] = None,\n private_ip_address: Optional[pulumi.Input[str]] = None,\n resource_group_id: Optional[pulumi.Input[str]] = None,\n security_ip_lists: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n seg_node_num: Optional[pulumi.Input[int]] = None,\n seg_storage_type: Optional[pulumi.Input[str]] = None,\n ssl_enabled: Optional[pulumi.Input[int]] = None,\n status: Optional[pulumi.Input[str]] = None,\n storage_size: Optional[pulumi.Input[int]] = None,\n tags: Optional[pulumi.Input[Mapping[str, Any]]] = None,\n used_time: Optional[pulumi.Input[str]] = None,\n vector_configuration_status: Optional[pulumi.Input[str]] = None,\n vpc_id: Optional[pulumi.Input[str]] = None,\n vswitch_id: Optional[pulumi.Input[str]] = None,\n zone_id: Optional[pulumi.Input[str]] = None) -> 'Instance':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _InstanceState.__new__(_InstanceState)\n\n __props__.__dict__[\"availability_zone\"] = availability_zone\n __props__.__dict__[\"connection_string\"] = connection_string\n __props__.__dict__[\"create_sample_data\"] = create_sample_data\n __props__.__dict__[\"db_instance_category\"] = db_instance_category\n __props__.__dict__[\"db_instance_class\"] = db_instance_class\n __props__.__dict__[\"db_instance_mode\"] = db_instance_mode\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"encryption_key\"] = encryption_key\n __props__.__dict__[\"encryption_type\"] = encryption_type\n __props__.__dict__[\"engine\"] = engine\n __props__.__dict__[\"engine_version\"] = engine_version\n __props__.__dict__[\"instance_charge_type\"] = instance_charge_type\n __props__.__dict__[\"instance_group_count\"] = instance_group_count\n __props__.__dict__[\"instance_network_type\"] = instance_network_type\n __props__.__dict__[\"instance_spec\"] = instance_spec\n __props__.__dict__[\"ip_whitelists\"] = ip_whitelists\n __props__.__dict__[\"maintain_end_time\"] = maintain_end_time\n __props__.__dict__[\"maintain_start_time\"] = maintain_start_time\n __props__.__dict__[\"master_node_num\"] = master_node_num\n __props__.__dict__[\"payment_type\"] = payment_type\n __props__.__dict__[\"period\"] = period\n __props__.__dict__[\"port\"] = port\n __props__.__dict__[\"private_ip_address\"] = private_ip_address\n __props__.__dict__[\"resource_group_id\"] = resource_group_id\n __props__.__dict__[\"security_ip_lists\"] = security_ip_lists\n __props__.__dict__[\"seg_node_num\"] = seg_node_num\n __props__.__dict__[\"seg_storage_type\"] = seg_storage_type\n __props__.__dict__[\"ssl_enabled\"] = ssl_enabled\n __props__.__dict__[\"status\"] = status\n __props__.__dict__[\"storage_size\"] = storage_size\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"used_time\"] = used_time\n __props__.__dict__[\"vector_configuration_status\"] = vector_configuration_status\n __props__.__dict__[\"vpc_id\"] = vpc_id\n __props__.__dict__[\"vswitch_id\"] = vswitch_id\n __props__.__dict__[\"zone_id\"] = zone_id\n return Instance(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'ResolverConfig':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = ResolverConfigArgs.__new__(ResolverConfigArgs)\n\n __props__.__dict__[\"autodefined_reverse\"] = None\n __props__.__dict__[\"autodefined_reverse_flag\"] = None\n __props__.__dict__[\"owner_id\"] = None\n __props__.__dict__[\"resource_id\"] = None\n return ResolverConfig(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Environment':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = EnvironmentArgs.__new__(EnvironmentArgs)\n\n __props__.__dict__[\"application_name\"] = None\n __props__.__dict__[\"cname_prefix\"] = None\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"endpoint_url\"] = None\n __props__.__dict__[\"environment_name\"] = None\n __props__.__dict__[\"operations_role\"] = None\n __props__.__dict__[\"option_settings\"] = None\n __props__.__dict__[\"platform_arn\"] = None\n __props__.__dict__[\"solution_stack_name\"] = None\n __props__.__dict__[\"tags\"] = None\n __props__.__dict__[\"template_name\"] = None\n __props__.__dict__[\"tier\"] = None\n __props__.__dict__[\"version_label\"] = None\n return Environment(resource_name, opts=opts, __props__=__props__)", "def lookup(cls, id: int):\n record = query_db(\n \"select id, amount, description, user_id from expenses where id = ?\",\n [id],\n one=True,\n )\n if record is None:\n raise NotFound()\n return cls(**record)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n call_recovery: Optional[pulumi.Input[str]] = None,\n default_auth_provider: Optional[pulumi.Input[str]] = None,\n default_included_group_id: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n email_recovery: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n password_auto_unlock_minutes: Optional[pulumi.Input[int]] = None,\n password_dictionary_lookup: Optional[pulumi.Input[bool]] = None,\n password_exclude_first_name: Optional[pulumi.Input[bool]] = None,\n password_exclude_last_name: Optional[pulumi.Input[bool]] = None,\n password_exclude_username: Optional[pulumi.Input[bool]] = None,\n password_expire_warn_days: Optional[pulumi.Input[int]] = None,\n password_history_count: Optional[pulumi.Input[int]] = None,\n password_lockout_notification_channels: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n password_max_age_days: Optional[pulumi.Input[int]] = None,\n password_max_lockout_attempts: Optional[pulumi.Input[int]] = None,\n password_min_age_minutes: Optional[pulumi.Input[int]] = None,\n password_min_length: Optional[pulumi.Input[int]] = None,\n password_min_lowercase: Optional[pulumi.Input[int]] = None,\n password_min_number: Optional[pulumi.Input[int]] = None,\n password_min_symbol: Optional[pulumi.Input[int]] = None,\n password_min_uppercase: Optional[pulumi.Input[int]] = None,\n password_show_lockout_failures: Optional[pulumi.Input[bool]] = None,\n priority: Optional[pulumi.Input[int]] = None,\n question_min_length: Optional[pulumi.Input[int]] = None,\n question_recovery: Optional[pulumi.Input[str]] = None,\n recovery_email_token: Optional[pulumi.Input[int]] = None,\n skip_unlock: Optional[pulumi.Input[bool]] = None,\n sms_recovery: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[str]] = None) -> 'PolicyPasswordDefault':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _PolicyPasswordDefaultState.__new__(_PolicyPasswordDefaultState)\n\n __props__.__dict__[\"call_recovery\"] = call_recovery\n __props__.__dict__[\"default_auth_provider\"] = default_auth_provider\n __props__.__dict__[\"default_included_group_id\"] = default_included_group_id\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"email_recovery\"] = email_recovery\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"password_auto_unlock_minutes\"] = password_auto_unlock_minutes\n __props__.__dict__[\"password_dictionary_lookup\"] = password_dictionary_lookup\n __props__.__dict__[\"password_exclude_first_name\"] = password_exclude_first_name\n __props__.__dict__[\"password_exclude_last_name\"] = password_exclude_last_name\n __props__.__dict__[\"password_exclude_username\"] = password_exclude_username\n __props__.__dict__[\"password_expire_warn_days\"] = password_expire_warn_days\n __props__.__dict__[\"password_history_count\"] = password_history_count\n __props__.__dict__[\"password_lockout_notification_channels\"] = password_lockout_notification_channels\n __props__.__dict__[\"password_max_age_days\"] = password_max_age_days\n __props__.__dict__[\"password_max_lockout_attempts\"] = password_max_lockout_attempts\n __props__.__dict__[\"password_min_age_minutes\"] = password_min_age_minutes\n __props__.__dict__[\"password_min_length\"] = password_min_length\n __props__.__dict__[\"password_min_lowercase\"] = password_min_lowercase\n __props__.__dict__[\"password_min_number\"] = password_min_number\n __props__.__dict__[\"password_min_symbol\"] = password_min_symbol\n __props__.__dict__[\"password_min_uppercase\"] = password_min_uppercase\n __props__.__dict__[\"password_show_lockout_failures\"] = password_show_lockout_failures\n __props__.__dict__[\"priority\"] = priority\n __props__.__dict__[\"question_min_length\"] = question_min_length\n __props__.__dict__[\"question_recovery\"] = question_recovery\n __props__.__dict__[\"recovery_email_token\"] = recovery_email_token\n __props__.__dict__[\"skip_unlock\"] = skip_unlock\n __props__.__dict__[\"sms_recovery\"] = sms_recovery\n __props__.__dict__[\"status\"] = status\n return PolicyPasswordDefault(resource_name, opts=opts, __props__=__props__)", "def get_assessment_output(assessment_id: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetAssessmentResult]:\n ...", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n address1: Optional[pulumi.Input[str]] = None,\n address2: Optional[pulumi.Input[str]] = None,\n billing_contact_user: Optional[pulumi.Input[str]] = None,\n city: Optional[pulumi.Input[str]] = None,\n company_name: Optional[pulumi.Input[str]] = None,\n country: Optional[pulumi.Input[str]] = None,\n end_user_support_help_url: Optional[pulumi.Input[str]] = None,\n expires_at: Optional[pulumi.Input[str]] = None,\n logo: Optional[pulumi.Input[str]] = None,\n opt_out_communication_emails: Optional[pulumi.Input[bool]] = None,\n phone_number: Optional[pulumi.Input[str]] = None,\n postal_code: Optional[pulumi.Input[str]] = None,\n state: Optional[pulumi.Input[str]] = None,\n subdomain: Optional[pulumi.Input[str]] = None,\n support_phone_number: Optional[pulumi.Input[str]] = None,\n technical_contact_user: Optional[pulumi.Input[str]] = None,\n website: Optional[pulumi.Input[str]] = None) -> 'OrgConfiguration':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _OrgConfigurationState.__new__(_OrgConfigurationState)\n\n __props__.__dict__[\"address1\"] = address1\n __props__.__dict__[\"address2\"] = address2\n __props__.__dict__[\"billing_contact_user\"] = billing_contact_user\n __props__.__dict__[\"city\"] = city\n __props__.__dict__[\"company_name\"] = company_name\n __props__.__dict__[\"country\"] = country\n __props__.__dict__[\"end_user_support_help_url\"] = end_user_support_help_url\n __props__.__dict__[\"expires_at\"] = expires_at\n __props__.__dict__[\"logo\"] = logo\n __props__.__dict__[\"opt_out_communication_emails\"] = opt_out_communication_emails\n __props__.__dict__[\"phone_number\"] = phone_number\n __props__.__dict__[\"postal_code\"] = postal_code\n __props__.__dict__[\"state\"] = state\n __props__.__dict__[\"subdomain\"] = subdomain\n __props__.__dict__[\"support_phone_number\"] = support_phone_number\n __props__.__dict__[\"technical_contact_user\"] = technical_contact_user\n __props__.__dict__[\"website\"] = website\n return OrgConfiguration(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Ipam':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = IpamArgs.__new__(IpamArgs)\n\n __props__.__dict__[\"arn\"] = None\n __props__.__dict__[\"default_resource_discovery_association_id\"] = None\n __props__.__dict__[\"default_resource_discovery_id\"] = None\n __props__.__dict__[\"description\"] = None\n __props__.__dict__[\"ipam_id\"] = None\n __props__.__dict__[\"operating_regions\"] = None\n __props__.__dict__[\"private_default_scope_id\"] = None\n __props__.__dict__[\"public_default_scope_id\"] = None\n __props__.__dict__[\"resource_discovery_association_count\"] = None\n __props__.__dict__[\"scope_count\"] = None\n __props__.__dict__[\"tags\"] = None\n return Ipam(resource_name, opts=opts, __props__=__props__)", "def get_state_sid_request(ruleset_name, sid):\n result = host.get_state(ruleset_name, sid)\n return jsonify(result)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n etag: Optional[pulumi.Input[str]] = None,\n folder: Optional[pulumi.Input[str]] = None,\n policy_data: Optional[pulumi.Input[str]] = None) -> 'IAMPolicy':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _IAMPolicyState.__new__(_IAMPolicyState)\n\n __props__.__dict__[\"etag\"] = etag\n __props__.__dict__[\"folder\"] = folder\n __props__.__dict__[\"policy_data\"] = policy_data\n return IAMPolicy(resource_name, opts=opts, __props__=__props__)", "def get(resource_name, id, opts=None, accepts_prompt_none_forward_from_client=None, add_read_token_role_on_create=None, alias=None, authenticate_by_default=None, authorization_url=None, backchannel_supported=None, client_id=None, client_secret=None, default_scopes=None, display_name=None, enabled=None, extra_config=None, first_broker_login_flow_alias=None, hide_on_login_page=None, internal_id=None, jwks_url=None, link_only=None, login_hint=None, logout_url=None, post_broker_login_flow_alias=None, provider_id=None, realm=None, store_token=None, token_url=None, trust_email=None, ui_locales=None, user_info_url=None, validate_signature=None):\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"accepts_prompt_none_forward_from_client\"] = accepts_prompt_none_forward_from_client\n __props__[\"add_read_token_role_on_create\"] = add_read_token_role_on_create\n __props__[\"alias\"] = alias\n __props__[\"authenticate_by_default\"] = authenticate_by_default\n __props__[\"authorization_url\"] = authorization_url\n __props__[\"backchannel_supported\"] = backchannel_supported\n __props__[\"client_id\"] = client_id\n __props__[\"client_secret\"] = client_secret\n __props__[\"default_scopes\"] = default_scopes\n __props__[\"display_name\"] = display_name\n __props__[\"enabled\"] = enabled\n __props__[\"extra_config\"] = extra_config\n __props__[\"first_broker_login_flow_alias\"] = first_broker_login_flow_alias\n __props__[\"hide_on_login_page\"] = hide_on_login_page\n __props__[\"internal_id\"] = internal_id\n __props__[\"jwks_url\"] = jwks_url\n __props__[\"link_only\"] = link_only\n __props__[\"login_hint\"] = login_hint\n __props__[\"logout_url\"] = logout_url\n __props__[\"post_broker_login_flow_alias\"] = post_broker_login_flow_alias\n __props__[\"provider_id\"] = provider_id\n __props__[\"realm\"] = realm\n __props__[\"store_token\"] = store_token\n __props__[\"token_url\"] = token_url\n __props__[\"trust_email\"] = trust_email\n __props__[\"ui_locales\"] = ui_locales\n __props__[\"user_info_url\"] = user_info_url\n __props__[\"validate_signature\"] = validate_signature\n return IdentityProvider(resource_name, opts=opts, __props__=__props__)", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n config: Optional[pulumi.Input[str]] = None,\n description: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SyntheticsPrivateLocationMetadataArgs']]] = None,\n name: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'SyntheticsPrivateLocation':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _SyntheticsPrivateLocationState.__new__(_SyntheticsPrivateLocationState)\n\n __props__.__dict__[\"config\"] = config\n __props__.__dict__[\"description\"] = description\n __props__.__dict__[\"metadata\"] = metadata\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"tags\"] = tags\n return SyntheticsPrivateLocation(resource_name, opts=opts, __props__=__props__)", "def read(self):\n if not self.id:\n raise OperationOutcome('Resource ID is required')\n\n self.resource = self.db.read({\n 'resourceType': self.resource_type,\n 'id': self.id\n })\n return self", "def get(self, state):\n return state[self.primary or self]", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n apply_immediately: Optional[pulumi.Input[bool]] = None,\n arn: Optional[pulumi.Input[str]] = None,\n authentication_strategy: Optional[pulumi.Input[str]] = None,\n auto_minor_version_upgrade: Optional[pulumi.Input[bool]] = None,\n broker_name: Optional[pulumi.Input[str]] = None,\n configuration: Optional[pulumi.Input[pulumi.InputType['BrokerConfigurationArgs']]] = None,\n deployment_mode: Optional[pulumi.Input[str]] = None,\n encryption_options: Optional[pulumi.Input[pulumi.InputType['BrokerEncryptionOptionsArgs']]] = None,\n engine_type: Optional[pulumi.Input[str]] = None,\n engine_version: Optional[pulumi.Input[str]] = None,\n host_instance_type: Optional[pulumi.Input[str]] = None,\n instances: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BrokerInstanceArgs']]]]] = None,\n ldap_server_metadata: Optional[pulumi.Input[pulumi.InputType['BrokerLdapServerMetadataArgs']]] = None,\n logs: Optional[pulumi.Input[pulumi.InputType['BrokerLogsArgs']]] = None,\n maintenance_window_start_time: Optional[pulumi.Input[pulumi.InputType['BrokerMaintenanceWindowStartTimeArgs']]] = None,\n publicly_accessible: Optional[pulumi.Input[bool]] = None,\n security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n storage_type: Optional[pulumi.Input[str]] = None,\n subnet_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n users: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BrokerUserArgs']]]]] = None) -> 'Broker':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _BrokerState.__new__(_BrokerState)\n\n __props__.__dict__[\"apply_immediately\"] = apply_immediately\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"authentication_strategy\"] = authentication_strategy\n __props__.__dict__[\"auto_minor_version_upgrade\"] = auto_minor_version_upgrade\n __props__.__dict__[\"broker_name\"] = broker_name\n __props__.__dict__[\"configuration\"] = configuration\n __props__.__dict__[\"deployment_mode\"] = deployment_mode\n __props__.__dict__[\"encryption_options\"] = encryption_options\n __props__.__dict__[\"engine_type\"] = engine_type\n __props__.__dict__[\"engine_version\"] = engine_version\n __props__.__dict__[\"host_instance_type\"] = host_instance_type\n __props__.__dict__[\"instances\"] = instances\n __props__.__dict__[\"ldap_server_metadata\"] = ldap_server_metadata\n __props__.__dict__[\"logs\"] = logs\n __props__.__dict__[\"maintenance_window_start_time\"] = maintenance_window_start_time\n __props__.__dict__[\"publicly_accessible\"] = publicly_accessible\n __props__.__dict__[\"security_groups\"] = security_groups\n __props__.__dict__[\"storage_type\"] = storage_type\n __props__.__dict__[\"subnet_ids\"] = subnet_ids\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"users\"] = users\n return Broker(resource_name, opts=opts, __props__=__props__)", "def from_esi_name(cls, esi_state_name: str) -> \"Structure.State\":\n STATES_ESI_MAP = {\n \"anchor_vulnerable\": cls.ANCHOR_VULNERABLE,\n \"anchoring\": cls.ANCHORING,\n \"armor_reinforce\": cls.ARMOR_REINFORCE,\n \"armor_vulnerable\": cls.ARMOR_VULNERABLE,\n \"deploy_vulnerable\": cls.DEPLOY_VULNERABLE,\n \"fitting_invulnerable\": cls.FITTING_INVULNERABLE,\n \"hull_reinforce\": cls.HULL_REINFORCE,\n \"hull_vulnerable\": cls.HULL_VULNERABLE,\n \"online_deprecated\": cls.ONLINE_DEPRECATED,\n \"onlining_vulnerable\": cls.ONLINING_VULNERABLE,\n \"shield_vulnerable\": cls.SHIELD_VULNERABLE,\n \"unanchored\": cls.UNANCHORED,\n \"offline\": cls.POS_OFFLINE,\n \"online\": cls.POS_ONLINE,\n \"onlining\": cls.POS_ONLINING,\n \"reinforced\": cls.POS_REINFORCED,\n \"unanchoring \": cls.POS_UNANCHORING,\n }\n return (\n STATES_ESI_MAP[esi_state_name]\n if esi_state_name in STATES_ESI_MAP\n else cls.UNKNOWN\n )", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None) -> 'Product':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = dict()\n\n __props__[\"approval_required\"] = None\n __props__[\"description\"] = None\n __props__[\"display_name\"] = None\n __props__[\"name\"] = None\n __props__[\"state\"] = None\n __props__[\"subscription_required\"] = None\n __props__[\"subscriptions_limit\"] = None\n __props__[\"terms\"] = None\n __props__[\"type\"] = None\n return Product(resource_name, opts=opts, __props__=__props__)", "def __init__(__self__,\n resource_name: str,\n opts: Optional[pulumi.ResourceOptions] = None,\n additional_data: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n assessment_name: Optional[pulumi.Input[str]] = None,\n metadata: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentMetadataPropertiesArgs']]] = None,\n partners_data: Optional[pulumi.Input[pulumi.InputType['SecurityAssessmentPartnerDataArgs']]] = None,\n resource_details: Optional[pulumi.Input[Union[pulumi.InputType['AzureResourceDetailsArgs'], pulumi.InputType['OnPremiseResourceDetailsArgs'], pulumi.InputType['OnPremiseSqlResourceDetailsArgs']]]] = None,\n resource_id: Optional[pulumi.Input[str]] = None,\n status: Optional[pulumi.Input[pulumi.InputType['AssessmentStatusArgs']]] = None,\n __props__=None):\n ...", "def _extract_resource(resource: Optional[dict],\n allowed_vals: tuple[tuple[str, ...]],\n exc: Type[exception.CinderException],\n resource_name: str,\n props: tuple[str] = ('status',)) -> Optional[str]:\n\n resource_id = None\n if resource:\n for prop, allowed_states in zip(props, allowed_vals):\n if resource[prop] not in allowed_states:\n msg = _(\"Originating %(res)s %(prop)s must be one of \"\n \"'%(vals)s' values\")\n msg = msg % {'res': resource_name,\n 'prop': prop,\n 'vals': ', '.join(allowed_states)}\n # TODO(harlowja): what happens if the status changes after\n # this initial resource status check occurs??? Seems like\n # someone could delete the resource after this check passes\n # but before the volume is officially created?\n raise exc(reason=msg)\n resource_id = resource['id']\n return resource_id" ]
[ "0.6976486", "0.608855", "0.5846438", "0.5757699", "0.5648362", "0.55919707", "0.5582545", "0.55817664", "0.55264586", "0.55085826", "0.550386", "0.5496845", "0.54861027", "0.5467891", "0.54414856", "0.53757304", "0.53509825", "0.53232807", "0.5311685", "0.52735114", "0.52480686", "0.51748484", "0.51741683", "0.5172154", "0.51583207", "0.5127296", "0.51180047", "0.5104065", "0.50640565", "0.50514466", "0.5027656", "0.50108075", "0.5002038", "0.49729574", "0.49662992", "0.49566746", "0.49545115", "0.49515942", "0.49468613", "0.49445507", "0.49419588", "0.4934814", "0.49261796", "0.49253267", "0.48890966", "0.48750132", "0.48685965", "0.48520356", "0.48402932", "0.4830121", "0.48283216", "0.48110932", "0.47969994", "0.47939494", "0.47921807", "0.47920212", "0.47794086", "0.47750962", "0.47739", "0.47691792", "0.4769036", "0.47670126", "0.47667027", "0.47606465", "0.4757624", "0.47551784", "0.4749007", "0.4745472", "0.4736245", "0.4733072", "0.47322616", "0.47307837", "0.47260442", "0.47182208", "0.47124273", "0.47041166", "0.47034022", "0.4701799", "0.47010973", "0.46996832", "0.46938944", "0.46875513", "0.4684779", "0.4681574", "0.4677086", "0.46574506", "0.46519288", "0.46503556", "0.46478122", "0.46443924", "0.4643982", "0.46416757", "0.4635642", "0.46337852", "0.46236902", "0.46077526", "0.46076244", "0.45936888", "0.4586977", "0.45712274" ]
0.6898669
1
Additional data regarding the assessment
def additional_data(self) -> pulumi.Output[Optional[Mapping[str, str]]]: return pulumi.get(self, "additional_data")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def student_view_data(self, context=None):\n return {\n 'title': self.title,\n 'description': self.description,\n 'embed_code': self.embed_code,\n 'highres_url': self.highres_url,\n 'lowres_url': self.lowres_url,\n }", "def get_assessment(self):\n if not self.has_assessment:\n raise IllegalState()\n else:\n raise Unimplemented()", "def get_assessment_metadata(self):\n return Metadata(**settings.METADATA['assessment_id'])", "def details(self):\n pass", "def additional_data(self):\n return self._additional_data", "def get_sample_award_badge_data(self):\n return {\n \"recipient\": {\n \"identity\": \"[email protected]\"\n },\n \"notify\": True,\n \"evidence\": [{\n \"url\": \"http://example.com/\",\n \"narrative\": \"Joe completed all...\"\n }]\n }", "def generate_extra_data(self):\n self.data[\"male_initial\"], self.data[\"female_initial\"] = \\\n self.get_initial_student_count()\n \n date_line = '<p class=\"report-title\"> %s</p>' \\\n %(self.start_date.strftime(\"%B %Y\"))\n row1 = \"\"\"\n <table>\n <tr class=\"tblRow\"><td>%s</td><td>Enrollment For Year</td>\n <td>Male:</td><td>%d</td><td>Female:</td><td>%d</td>\n <td>Total:</td><td>%d</td></tr>\n \"\"\" %(unicode(self.school), self.data[\"male_initial\"], \n self.data[\"female_initial\"], \n self.data[\"male_initial\"] + self.data[\"female_initial\"])\n row2 = \"\"\"\n <tr class=\"tblOddRow\"><td>%s</td><td>Enrollment For Month</td>\n <td>Male:</td><td>%d</td><td>Female:</td><td>%d</td>\n <td>Total:</td><td>%d</td></tr>\n \"\"\" %(unicode(self.section), self.data[\"male_current\"], \n self.data[\"female_current\"],\n self.data[\"male_current\"] + self.data[\"female_current\"])\n row3 = \"\"\"\n <tr class=\"tblRow\"><td>%s</td><td>Average Attendance</td>\n <td>Male:</td><td>%.1f</td><td>Female:</td><td>%.1f</td>\n <td>Total:</td><td>%.1f</td></tr>\n \"\"\" %(\"Secondary\", self.data[\"aa_male\"], self.data[\"aa_female\"] ,\n self.data[\"aa_combined\"])\n row4 =\"\"\"\n <tr class=\"tblOddRow\"><td>%s</td><td>Percentage of Attendance</td>\n <td>Male:</td><td>%.1f %% </td><td>Female:</td><td>%.1f %% </td>\n <td>Total:</td><td>%.1f %% </td></tr>\n \"\"\" %(unicode(self.school.municipality), self.data[\"pa_male\"], \n self.data[\"pa_female\"], self.data[\"pa_combined\"])\n row5 = \"\"\"\n <tr class=\"tblRow\"><td>School Days: %d</td><td>Percentage of Enrollment</td>\n <td>Male:</td><td>%.1f %% </td><td>Female:</td><td>%.1f %% </td>\n <td>Total:</td><td>%.1f %% </td></tr>\n </table>\n \"\"\" %(self.data[\"num_school_days\"], \n self.data[\"male_current\"] * 100.0 / self.data[\"male_initial\"],\n self.data[\"female_current\"] * 100.0 / \n self.data[\"female_initial\"],\n (self.data[\"male_current\"] + self.data[\"female_current\"]) * \n 100.0 /\n (self.data[\"male_initial\"] + self.data[\"female_initial\"]))\n self.extra_data = date_line + row1 + row2 + row3 + row4 + row5", "def getInfo(self):\n self.name, self.description = achievements[self.id]", "def getDetail(self):\n\t\t\n\t\treturn (super().setParameters(0,self.getDefense(),0))\n\t\t\n\t\t#return \"\\n#########################################################\\n\"+\"\\nItem of Defense, Name of item:\"+self.getName()+\"\\nCapacity of defense:\"+str(self.getDefense())+\"\\nCapacity of attack:0 \\n Capacity of heal:0 \\n\"+\"#########################################################\\n\"", "def info():\n # -------- Task 1 -------------------------\n # Please complete the following information\n\n return {\"agent name\": \"?\", # COMPLETE HERE\n \"student name\": [\"?\"], # COMPLETE HERE\n \"student number\": [\"?\"]} # COMPLETE HERE", "def course_info(self):\n print(\"Course name: {}\".format(self._course_name))\n print(\"Lead teacher: {}\".format(self._teacher))\n\n if len(self._students) == 0:\n print(\"Course does not enrolled by any student\")\n else:\n print(\"Enrolled: {}/{}\".format(len(self._students), self._total_place))", "def get_infos(self):\n infos = dict()\n infos[\"dataset\"] = self.dataset_name\n infos[\"task\"] = \"separate_noisy\"\n infos[\"licenses\"] = [librispeech_license, tac_license]\n return infos", "def report_data(self):\n return {}", "def AddAncillaryData(self, ds):\n self.IsAncillaryData = True\n self.AncillaryData = ds", "def details(self):\n raise NotImplementedError()", "def student_state(self):\n submission = self.get_submission()\n if submission:\n uploaded_submission = submission.get(\"answer\").get(\"filename\", None)\n if uploaded_submission:\n uploaded = {\"filename\": submission['answer']['filename']}\n else:\n uploaded = None\n else:\n uploaded = None\n\n submission = self.get_question()\n if submission:\n uploaded_submission = submission.get(\"question\").get(\"filename\", None)\n if uploaded_submission:\n quploaded = {\"filename\": submission['question']['filename']}\n else:\n quploaded = None\n else:\n quploaded = None\n\n submission = self.get_solution()\n if submission:\n uploaded_submission = submission.get(\"solution\").get(\"filename\", None)\n if uploaded_submission:\n suploaded = {\"filename\": submission['solution']['filename']}\n else:\n suploaded = None\n else:\n suploaded = None\n \n \n \n return {\n \"display_name\": self.title,\n \"question\":self.question,\n \"uploaded\": uploaded,\n \"quploaded\":quploaded,\n \"suploaded\":suploaded,\n \"raw_answer\":self.raw_answer,\n \"raw_question\":self.raw_question,\n \"score\": self.score,\n \"weight\":self.weight,\n \"attempts\": self.attempts,\n \"max_attempts\": self.max_attempts,\n }", "def info(self):\n self.update_info()\n print('Number of electrodes: ' + str(self.n_elecs))\n print('Recording time in seconds: ' + str(self.dur))\n print('Sample Rate in Hz: '+ str(self.sample_rate))\n print('Number of sessions: ' + str(self.n_sessions))\n print('Date created: ' + str(self.date_created))\n print('Meta data: ' + str(self.meta))", "def get_details(self):", "def _get_information(self):\n pass", "def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta", "def get_assessment_part_mdata():\n return {\n 'assessment_part': {\n 'element_label': {\n 'text': 'assessment part',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'accepts an osid.id.Id object',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_id_values': [''],\n 'syntax': 'ID',\n 'id_set': [],\n },\n 'assessment': {\n 'element_label': {\n 'text': 'assessment',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'accepts an osid.id.Id object',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_id_values': [''],\n 'syntax': 'ID',\n 'id_set': [],\n },\n 'weight': {\n 'element_label': {\n 'text': 'weight',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'enter a cardinal value',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_cardinal_values': [None],\n 'syntax': 'CARDINAL',\n 'minimum_cardinal': None,\n 'maximum_cardinal': None,\n 'cardinal_set': []\n },\n 'allocated_time': {\n 'element_label': {\n 'text': 'allocated time',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'instructions': {\n 'text': 'enter a valid duration object.',\n 'languageTypeId': str(DEFAULT_LANGUAGE_TYPE),\n 'scriptTypeId': str(DEFAULT_SCRIPT_TYPE),\n 'formatTypeId': str(DEFAULT_FORMAT_TYPE),\n },\n 'required': False,\n 'read_only': False,\n 'linked': False,\n 'array': False,\n 'default_duration_values': [None],\n 'syntax': 'DURATION',\n 'date_time_set': [],\n },\n }", "def vulnerability_assessment(self) -> pulumi.Output['outputs.VulnerabilityAssessmentNoteResponse']:\n return pulumi.get(self, \"vulnerability_assessment\")", "def additional_data(self):\n # type: () -> string_types\n return self._additional_data", "def get_assessments_metadata(self):\n return Metadata(**settings.METADATA['assessment_ids'])", "def ExtraInfo(self) -> object:", "def post_add_assessment(self):\n course = courses.Course(self)\n assessment = course.add_assessment()\n course.save()\n self.redirect(self.get_action_url(\n 'edit_assessment', key=assessment.unit_id,\n extra_args={'is_newly_created': 1}))", "def test_instructor_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n # We have configured the stub to simulate that this essay will be staff-graded\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def vulnerability_assessment(self) -> Optional[pulumi.Input['VulnerabilityAssessmentNoteArgs']]:\n return pulumi.get(self, \"vulnerability_assessment\")", "def get_details(self):\n raise Exception(\"bad details\")", "def summary_data(self):\n data = {\n \"total\": self.total,\n \"card_one_value\": self.cards[0].value,\n \"card_two_value\": self.cards[1].value,\n \"card_one_rank\": self.cards[0].rank,\n \"card_two_rank\": self.cards[1].rank,\n \"cards\": \" \".join([str(card) for card in self.cards]),\n \"soft\": int(self.soft),\n \"from_split\": int(self.from_split),\n \"blackjack\": int(self.blackjack),\n \"num_cards\": len(self.cards),\n \"start_total\": self.cards[0] + self.cards[1],\n \"wager\": int(self.wager),\n \"insurance\": int(self.insurance),\n \"surrender\": int(self.surrender),\n \"double_down\": int(self.double_down),\n \"num_aces\": self.num_aces,\n \"num_hard_aces\": self.num_hard_aces\n }\n return data", "def __init__(__self__, *,\n assessment_type: str,\n display_name: str,\n policy_definition_id: str,\n severity: str,\n categories: Optional[Sequence[str]] = None,\n description: Optional[str] = None,\n implementation_effort: Optional[str] = None,\n partner_data: Optional['outputs.SecurityAssessmentMetadataPartnerDataResponse'] = None,\n preview: Optional[bool] = None,\n remediation_description: Optional[str] = None,\n threats: Optional[Sequence[str]] = None,\n user_impact: Optional[str] = None):\n pulumi.set(__self__, \"assessment_type\", assessment_type)\n pulumi.set(__self__, \"display_name\", display_name)\n pulumi.set(__self__, \"policy_definition_id\", policy_definition_id)\n pulumi.set(__self__, \"severity\", severity)\n if categories is not None:\n pulumi.set(__self__, \"categories\", categories)\n if description is not None:\n pulumi.set(__self__, \"description\", description)\n if implementation_effort is not None:\n pulumi.set(__self__, \"implementation_effort\", implementation_effort)\n if partner_data is not None:\n pulumi.set(__self__, \"partner_data\", partner_data)\n if preview is not None:\n pulumi.set(__self__, \"preview\", preview)\n if remediation_description is not None:\n pulumi.set(__self__, \"remediation_description\", remediation_description)\n if threats is not None:\n pulumi.set(__self__, \"threats\", threats)\n if user_impact is not None:\n pulumi.set(__self__, \"user_impact\", user_impact)", "def _set_meta_info(self):\n self._report_data['environment'] = f'{self._get_environment()}'.lstrip()\n self._report_data['meta_account_id'] = self._account_id\n if self._account_name:\n self._report_data['meta_account_name'] = self._account_name\n\n # Get source ???\n # Appears in the Description section of the PDF Document Properties as Title.\n self._report_data['meta_title'] = ReportMeta.reports[self._report_key]['metaTitle'].upper()\n self._report_data['meta_subtitle'] = ReportMeta.reports[self._report_key]['metaSubtitle']\n\n # Appears in the Description section of the PDF Document Properties as Subject.\n if self._report_key in (ReportTypes.SEARCH_DETAIL_REPORT,\n ReportTypes.SEARCH_TOC_REPORT,\n ReportTypes.SEARCH_BODY_REPORT):\n search_type: str = self._report_data['searchQuery']['type']\n search_desc: str = TO_SEARCH_DESCRIPTION[search_type]\n criteria: str = ''\n if search_type == 'OWNER_NAME':\n criteria = self._report_data['searchQuery']['criteria']['ownerName']['last'] + ', '\n criteria += self._report_data['searchQuery']['criteria']['ownerName']['first']\n if 'middle' in self._report_data['searchQuery']['criteria']['ownerName']:\n criteria += ' ' + self._report_data['searchQuery']['criteria']['ownerName']['middle']\n else:\n criteria = self._report_data['searchQuery']['criteria']['value'].upper()\n self._report_data['meta_subject'] = f'{search_desc} - \"{criteria}\"'\n if search_type == 'MHR_NUMBER':\n self._report_data['footer_content'] = f'MHR Number Search - \"{criteria}\"'\n else:\n self._report_data['footer_content'] = f'MHR {search_desc} Search - \"{criteria}\"'\n elif self._report_key in (ReportTypes.MHR_REGISTRATION, ReportTypes.MHR_COVER,\n ReportTypes.MHR_TRANSFER, ReportTypes.MHR_EXEMPTION, ReportTypes.MHR_NOTE,\n ReportTypes.MHR_TRANSPORT_PERMIT, ReportTypes.MHR_REGISTRATION_COVER):\n reg_num = self._report_data.get('mhrNumber', '')\n self._report_data['footer_content'] = f'Manufactured Home Registration #{reg_num}'\n self._report_data['meta_subject'] = f'Manufactured Home Registration Number: {reg_num}'\n if self._get_environment() != '':\n self._report_data['footer_content'] = 'TEST DATA | ' + self._report_data['footer_content']", "def test_client_risk_assessment_retrieve(self):\n pass", "def get_study_info(self,std_id):\n raise NotImplementedError", "def data(self):\n pass", "def data(self):\n pass", "def meta_data(self) -> Dict:\n pass", "def analysis(self, game_info):\n pass", "def input_payment_details(self):\n pass", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def get_raw_information(self):\n try:\n info = self.student_attendance_record.get_period_info(\n self.start_date, self.day_periods)\n return (self.student_name, self.student_gender, info)\n except AttributeError:\n raise AttributeError, \\\n \"Failed to get student attendance record for: %s\" \\\n %unicode(self.student)", "def get_additional(cls, obj, **kwargs):\n if \"classifier_results\" in obj.extra_data:\n keywords = obj.extra_data.get('classifier_results').get(\"complete_output\")\n else:\n keywords = []\n prediction_results = obj.extra_data.get(\"arxiv_guessing\", {})\n if prediction_results:\n prediction_results = prediction_results[0].get(\"result\")\n return render_template(\n 'inspire_workflows/styles/harvesting_record_additional.html',\n object=obj,\n keywords=keywords,\n score=prediction_results.get(\"max_score\"),\n decision=prediction_results.get(\"decision\")\n )", "def data_for_question(self, question_type):\n\t\treturn {}", "def _assessments_editor_context(self, assessment_dates):\n assessments = {}\n for asmnt, date_range in zip(self.rubric_assessments, assessment_dates):\n # Django Templates cannot handle dict keys with dashes, so we'll convert\n # the dashes to underscores.\n template_name = make_django_template_key(asmnt['name'])\n assessments[template_name] = copy.deepcopy(asmnt)\n assessments[template_name]['start'] = date_range[0]\n assessments[template_name]['due'] = date_range[1]\n\n # In addition to the data in the student training assessment, we need to include two additional\n # pieces of information: a blank context to render the empty template with, and the criteria\n # for each example (so we don't have any complicated logic within the template). Though this\n # could be accomplished within the template, we are opting to remove logic from the template.\n student_training_module = self.get_assessment_module('student-training')\n\n student_training_template = {\n 'answer': {\n 'parts': [\n {'text': ''} for _ in self.prompts\n ]\n }\n }\n criteria_list = copy.deepcopy(self.rubric_criteria_with_labels)\n for criterion in criteria_list:\n criterion['option_selected'] = \"\"\n student_training_template['criteria'] = criteria_list\n\n if student_training_module:\n student_training_module = update_assessments_format([student_training_module])[0]\n example_list = []\n # Adds each example to a modified version of the student training module dictionary.\n for example in student_training_module['examples']:\n criteria_list = copy.deepcopy(self.rubric_criteria_with_labels)\n # Equivalent to a Join Query, this adds the selected option to the Criterion's dictionary, so that\n # it can be easily referenced in the template without searching through the selected options.\n for criterion in criteria_list:\n for option_selected in example['options_selected']:\n if option_selected['criterion'] == criterion['name']:\n criterion['option_selected'] = option_selected['option']\n example_list.append({\n 'answer': example['answer'],\n 'criteria': criteria_list,\n })\n assessments['training'] = {'examples': example_list, 'template': student_training_template}\n # If we don't have student training enabled, we still need to render a single (empty, or default) example\n else:\n assessments['training'] = {'examples': [student_training_template], 'template': student_training_template}\n\n return assessments", "def test_ai_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def save_test_evidence(self):\n payload = {\n \"test_id\": self.test_id,\n \"test_case_name\": self.test_case_name,\n \"epoch_timestamp\": self.epoch_timestamp,\n \"human_timestamp\": self.human_timestamp,\n \"verification_name\": self.verification_name,\n \"status\": self.status,\n \"value\": self.value,\n \"critical_value\": self.critical_value\n }\n return self.insert_regression_test_evidence(self.test_case_name, payload)", "def get_info(self):\n self.exists = self.check_subscr()\n return self.attrs", "def data(self):", "def has_assessment(self):\n return 'assessmentId' in self._my_map and bool(self._my_map['assessmentId'])", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def _set_additional_fields(self, data):\n # Remove the non required rules data.\n if 'rules' in data:\n del data['rules']\n # Change description into proper string.\n data['description'] = re.sub(\"[\\'\\\"]\", \"\", data['description'])\n # Calculate and update the premium field.\n premium = str(data.get('premium', \"false\")).lower() == 'true'\n data['pvtVuln'] = premium\n return data", "def collect_data(self,sensation,action,reward,next_sensation):\n pass", "def get_main_information(self) -> Dict:\n if self.lock:\n if self._information is None:\n # Setup self._information for the first time when study is lock\n self._information = self.client.get_instances_id(self.id_)\n\n return self._information\n\n return self.client.get_instances_id(self.id_)", "def add_details(self):\n\n if self.co.algorithm == \"vv\":\n algo = \"Verlocity Verlot\"\n if self.co.algorithm == \"rk4o\":\n algo = \"Runge Kutta Forth Order\"\n if self.co.algorithm == \"herm\":\n algo = \"Hermite Fourth Order\"\n\n self.algorithm_title = self.ax.text(\n 1.01, 0.65, \"Algorithm:\", transform=self.ax.transAxes\n )\n self.algorithm_text = self.ax.text(\n 1.01, 0.58, algo, transform=self.ax.transAxes\n )\n self.timestep_text = self.ax.text(\n 1.01, 0.51, \"dt =\" + str(self.co.tstep), transform=self.ax.transAxes\n )\n self.length_softening_distance = self.ax.text(\n 1.01,\n 0.44,\n r\"$\\epsilon$ = \" + str(self.co.epsilon),\n transform=self.ax.transAxes,\n )", "def metadata(self) -> Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']]:\n return pulumi.get(self, \"metadata\")", "def info(self):", "def info(self):", "def training_info(self):\n pass", "def test_superuser_create_assessment(self):\n req, resp = data.get_assessment(self.contract['id'])\n\n response = self.superuser.post(self.assessment_list_url, req)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def test_superuser_create_assessment(self):\n req, resp = data.get_assessment(self.contract['id'])\n\n response = self.superuser.post(self.assessment_list_url, req)\n self.assertEqual(response.status_code, status.HTTP_201_CREATED)", "def data(self):\n return (self._full_name, self.total_donation, self.num_of_donations, self.avg_donation)", "def healthcare():", "def get_main_information(self) -> Dict:\n if self.information is None:\n self.information = self.orthanc.get_instance_information(\n self.identifier\n )\n\n return self.information", "def what_is_the_grade(self):\n\t\treturn_dict = {\n\t\t\t'section_title': self.title, \n\t\t\t'section_weight': self.weight,\n\t\t\t'grade_value' : self.current_grade_value,\n\t\t\t'comment_text' : self.current_comment_text,\n\t\t\t'default_comments_text' : self.current_default_comment_text,\n\t\t\t'example_comments_text' : self.current_example_comment_text,\n\t\t\t'is_complete': self.is_complete\n\t\t}\n\n\t\treturn return_dict", "def make_extra_questions_txt(self):\n raise NotImplementedError", "def account_summary(self):\n pass", "def export(self):\n metadata = {\n 'user': self.operator,\n 'technique': ' | '.join(self.techniques),\n }\n return self.text, metadata", "def extra(self) -> Dict[str, Any]:\n extra = self.extras.copy()\n if isinstance(self.author, str):\n extra['Author'] = self.author\n if isinstance(self.email, str):\n extra['Email'] = self.email\n if isinstance(self.description, str):\n extra['Description'] = self.description\n return extra", "def prepare_student_data(self) -> dict:\n self._filename_pre_data()\n empty_student = {}\n empty_student[\"scoreTimestamp\"] = \"N/A\"\n for i in self.draft_out:\n empty_student[i] = \"N/A\"\n for i in self.pre_data:\n empty_student[i] = self.pre_data[i]\n self.pre_data = empty_student", "def save_assessment(self, data, _system):\r\n\r\n closed, msg = self.check_if_closed()\r\n if closed:\r\n return msg\r\n\r\n if self.child_state != self.ASSESSING:\r\n return self.out_of_sync_error(data)\r\n\r\n try:\r\n score = int(data.get('assessment'))\r\n score_list = [int(x) for x in data.getall('score_list[]')]\r\n except (ValueError, TypeError):\r\n # This is a dev_facing_error\r\n log.error(\"Non-integer score value passed to save_assessment, or no score list present.\")\r\n # This is a student_facing_error\r\n _ = self.system.service(self, \"i18n\").ugettext\r\n return {\r\n 'success': False,\r\n 'error': _(\"Error saving your score. Please notify course staff.\")\r\n }\r\n\r\n # Record score as assessment and rubric scores as post assessment\r\n self.record_latest_score(score)\r\n self.record_latest_post_assessment(json.dumps(score_list))\r\n\r\n d = {'success': True, }\r\n\r\n self.change_state(self.DONE)\r\n d['allow_reset'] = self._allow_reset()\r\n\r\n d['state'] = self.child_state\r\n return d", "def show_data(self, ):\r\n return print('society_name : {}\\n'\r\n 'flat : {}\\n'\r\n 'house_no : {}\\n'\r\n 'no_of_members : {}\\n'\r\n 'income : {}\\n '\r\n .format(self.society_name, self.flat, self.house_no, self.no_of_members, self.income))", "def get_assessments(self):\n if not self.is_assessment_based_activity():\n raise IllegalState()\n else:\n raise Unimplemented()", "def _load_assessment_results_page(self):\r\n\r\n fmt = '{0:0.' + str(Configuration.PLACES) + 'g}'\r\n\r\n self.txtAvailability.set_text(\r\n str(fmt.format(self._function_model.availability)))\r\n self.txtMissionAt.set_text(\r\n str(fmt.format(self._function_model.mission_availability)))\r\n self.txtMissionHt.set_text(\r\n str(fmt.format(self._function_model.mission_hazard_rate)))\r\n self.txtPredictedHt.set_text(\r\n str(fmt.format(self._function_model.hazard_rate)))\r\n\r\n self.txtMMT.set_text(str(fmt.format(self._function_model.mmt)))\r\n self.txtMCMT.set_text(str(fmt.format(self._function_model.mcmt)))\r\n self.txtMPMT.set_text(str(fmt.format(self._function_model.mpmt)))\r\n\r\n self.txtMissionMTBF.set_text(\r\n str(fmt.format(self._function_model.mission_mtbf)))\r\n self.txtMTBF.set_text(str(fmt.format(self._function_model.mtbf)))\r\n self.txtMTTR.set_text(str(fmt.format(self._function_model.mttr)))\r\n\r\n return False", "def look_for_other_attributes(context):\n json_data = context.response.json()\n assert \"recommended_versions\" in json_data, \"No recommended version found\"\n assert \"registration_link\" in json_data, \"No snyk registration link found\"\n assert \"component_analyses\" in json_data, \"No component analyses data found\"\n assert \"message\" in json_data, \"No message found\"\n assert \"severity\" in json_data, \"No severity found\"\n assert \"known_security_vulnerability_count\" in json_data\n assert \"security_advisory_count\" in json_data", "def __str__(self):\n # First obtain a string describing the underlying data model.\n strg = super(MiriTelescopeEmissionModel, self).__str__()\n \n # Add the extras\n if self.meta.instrument.filter is not None:\n strg += \"Data valid for filter=\\'%s\\' \" % \\\n self.meta.instrument.filter\n else:\n strg += \"Data valid for UNKNOWN filter \"\n if self.meta.telescope_temperature is not None:\n strg += \"and telescope temperature=%.2fK\" % \\\n self.meta.telescope_temperature\n else:\n strg += \"and UNKNOWN telescope temperature\"\n return strg", "def scrape_admission_details(self, main_content):\n group_items = self._scrape_group_items(main_content, 'field field-name-field-admission-details '\n 'field-type-text-long field-label-above')\n details_str = self.scrape_group_items_str(group_items)\n\n details_str = details_str.lstrip()\n details_str = details_str.rstrip()\n\n if len(details_str) > 0:\n details_str = self.csv_quote_escape(details_str)\n\n return details_str", "def __init__(__self__, *,\n assessment_count: Optional[pulumi.Input[int]] = None,\n extended_details: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n group_count: Optional[pulumi.Input[int]] = None):\n if assessment_count is not None:\n pulumi.set(__self__, \"assessment_count\", assessment_count)\n if extended_details is not None:\n pulumi.set(__self__, \"extended_details\", extended_details)\n if group_count is not None:\n pulumi.set(__self__, \"group_count\", group_count)", "def duty_details(self):\n return self._duty_details", "def test_client_risk_assessment_list(self):\n pass", "def field_data(self):\r\n\r\n return DictFieldData({\r\n 'data': '<peergrading/>',\r\n 'location': self.problem_location,\r\n 'use_for_single_location': True,\r\n 'link_to_location': self.coe_location.to_deprecated_string(),\r\n 'graded': True,\r\n })", "def additional_info(self) -> Optional[Mapping[str, str]]:\n return pulumi.get(self, \"additional_info\")", "def info(self):\n print self.id, self.type, self.xyz.get_xyz", "def createIndustryInfo(self):\n self.setCurrentValue(0)\n self.setMinMax()\n self.writeIndustryName()\n self.createIndustrySim()\n self.writeIndustryDescription()\n self.writeIndustryCost()", "def fixture_additional_information_example():\n test_example = AdditionalInformation(\n ethical_considerations=ETHICAL_CONSIDERATIONS,\n caveats_and_recommendations=CAVEATS_AND_RECOMMENDATIONS,\n custom_details=CUSTOM_DETAILS,\n )\n return test_example", "def record(self):\n # TODO: record the data", "def get_enrolment_info(self):\n return None", "def agreements():\n pass", "def details(self) -> \"dict\":\n return self._attrs.get(\"details\")", "def assessment_type(self) -> str:\n return pulumi.get(self, \"assessment_type\")", "def get_info(self):\n return \"TODO !\"", "def info(self):\n ss = \"\\nSummary ARF info\\n\"\n ss += \"----------------\\n\"\n # Summarise data members\n ss += array_stats_str(self.energy_lo, 'Energy lo')\n ss += array_stats_str(self.energy_hi, 'Energy hi')\n ss += array_stats_str(self.effective_area.to('m^2'), 'Effective area')\n ss += 'Safe energy threshold lo: {0:6.3f}\\n'.format(self.energy_thresh_lo)\n ss += 'Safe energy threshold hi: {0:6.3f}\\n'.format(self.energy_thresh_hi)\n\n return ss", "def additional_data(self, additional_data):\n\n self._additional_data = additional_data", "def info(self) -> dict:", "def data(self) -> dict:\n raise NotImplementedError()", "def _section_course_info(course_key, access):\r\n course = get_course_by_id(course_key, depth=None)\r\n\r\n section_data = {\r\n 'section_key': 'course_info',\r\n 'section_display_name': _('Course Info'),\r\n 'access': access,\r\n 'course_id': course_key,\r\n 'course_display_name': course.display_name,\r\n 'enrollment_count': CourseEnrollment.num_enrolled_in(course_key),\r\n 'has_started': course.has_started(),\r\n 'has_ended': course.has_ended(),\r\n 'list_instructor_tasks_url': reverse('list_instructor_tasks', kwargs={'course_id': course_key.to_deprecated_string()}),\r\n }\r\n\r\n try:\r\n advance = lambda memo, (letter, score): \"{}: {}, \".format(letter, score) + memo\r\n section_data['grade_cutoffs'] = reduce(advance, course.grade_cutoffs.items(), \"\")[:-2]\r\n except Exception:\r\n section_data['grade_cutoffs'] = \"Not Available\"\r\n # section_data['offline_grades'] = offline_grades_available(course_key)\r\n\r\n try:\r\n section_data['course_errors'] = [(escape(a), '') for (a, _unused) in modulestore().get_course_errors(course.id)]\r\n except Exception:\r\n section_data['course_errors'] = [('Error fetching errors', '')]\r\n\r\n return section_data", "def additional_log_details(self) -> Dict[str, Any]:\n additional_details = {}\n if hasattr(self, \"requestor\"):\n additional_details[\"Requestor\"] = self.requestor\n return additional_details" ]
[ "0.63733923", "0.61233914", "0.60425943", "0.5986746", "0.59847146", "0.5970008", "0.596638", "0.59152776", "0.590361", "0.5881575", "0.58584493", "0.5775308", "0.56499285", "0.56463957", "0.5642021", "0.5598647", "0.55952555", "0.5585957", "0.5569665", "0.55653036", "0.55639553", "0.55543524", "0.5536431", "0.5517683", "0.55130774", "0.5507232", "0.54993033", "0.54916847", "0.54893136", "0.54888856", "0.54759717", "0.54145104", "0.54067224", "0.5405085", "0.5389159", "0.5389159", "0.5370805", "0.53435177", "0.5341477", "0.5341398", "0.53386515", "0.53239447", "0.5311002", "0.530835", "0.5276652", "0.52746725", "0.5270163", "0.5269666", "0.5268228", "0.5266818", "0.5266818", "0.5266818", "0.52635217", "0.5256684", "0.52479905", "0.5242154", "0.5241826", "0.52315754", "0.5228113", "0.5228113", "0.5225876", "0.5223823", "0.5223823", "0.5214896", "0.5205461", "0.5203595", "0.5200222", "0.5197203", "0.5196509", "0.5195554", "0.51945066", "0.5190833", "0.51833236", "0.5164492", "0.5162665", "0.5158723", "0.5157848", "0.51545966", "0.5154589", "0.5150459", "0.5147789", "0.5144505", "0.51418686", "0.51375574", "0.5136846", "0.5136676", "0.5136221", "0.5134584", "0.51338243", "0.51294935", "0.5120115", "0.51200217", "0.51198107", "0.51186234", "0.51178473", "0.5115324", "0.51111245", "0.5109562", "0.51066816" ]
0.5422631
31
User friendly display name of the assessment
def display_name(self) -> pulumi.Output[str]: return pulumi.get(self, "display_name")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assessment_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"assessment_name\")", "def __str__(self):\n # Use 'Unknown' if the course instance does not have a term\n if self.course_instance.term:\n term = self.course_instance.term.verbose_name()\n else:\n term = 'Unknown'\n\n exam_unicode = '{term} {number} {type} for {course}'.format(\n term=term,\n number=self.get_exam_number_display(),\n type=self.get_exam_type_display(),\n course=self.course_instance.course)\n if self.instructors:\n instructors = ', '.join([i.last_name for i in self.instructors])\n return '{}, taught by {}'.format(exam_unicode, instructors)\n else:\n return '{} (Instructors Unknown)'.format(exam_unicode)", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> str:\n return pulumi.get(self, \"display_name\")", "def displayName(self):\n return self.tr('SE Rimozione Inquinanti')", "def get_descriptive_name(self):\n return f\"{self.year} {self.make} {self.model}\".title()", "def display_name(self):", "def dc_title(self):\n return u\"{0} ({1}): {2} {3}\".format(\n self.label, self.in_assessment[0].timepoint,\n self.subjects[0].code_in_study,\n \"...\" if len(self.subjects) > 1 else \"\")", "def get_descriptive_name(self):\n long_name = f\"{self.make} {self.model} {self.year}\"\n \n return long_name.title()", "def get_descriptive_name(self):\n long_name = f\"{self.year} {self.make} {self.model}\"\n return long_name.title()", "def get_descriptive_name(self):\r\n long_name=str(self.year)+' '+self.make+' '+self.model\r\n return long_name.title()", "def get_descriptive_name(self):\r\n long_name = str(self.year)+' '+self.make + ' '+self.model\r\n return long_name.title()", "def display(self):\n return f'{self._last_name},{self._first_name}:({self._student_id}) {self._major} gpa:{self._gpa}'", "def __str__(self):\n return str(self.__student_name) + \" has grade \" + str(self.__grade_value) + \" at \" + str(self.__discipline_name)", "def display_name(cls):\n return cls.name.replace('_', ' ').title()", "def get_descriptive_name(self):\r\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\r\n return long_name.title()", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"display_name\")", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def get_descriptive_name(self):\n long_name = str(self.year) + ' ' + self.make + ' ' + self.model\n return long_name.title()", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[str]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"display_name\")", "def show_name(self):\n return self.name", "def display_name(self):\n return self.__display_name", "def display(self):\n return self.name", "def __str__(self) -> str:\n return f\"{self.analysis.title} v{self.title}\"", "def display_name(self) -> str:\n return self._display_name", "def __str__(self):\n return f\"{self.semester} | {self.school} | {self.position} | {self.class_name}\"", "def get_display_name(self):\n return self.display_name", "def marketing_name(self):\n return \"Custom solution - 2\"", "def __str__(self):\n return f\"{self.rank.title()} of {self.suit.title()}\"", "def get_descriptive_name(self):\n description = (f\"{self.year} {self.manufacturer.title()} \"\n f\"{self.model.title()}\")\n\n return description", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def display_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"display_name\")", "def __str__(self):\n return '%s' % (self.name)", "def __str__(self):\n return '%s' % (self.name)", "def __str__(self):\n return '%s' % (self.name)", "def __str__(self):\n return '%s' % (self.name)", "def __str__(self):\n return '%s' % (self.name)", "def full_name(self) -> str:\n\n if self.severity == 1:\n return self.name\n\n return f\"{self.name}[{self.severity}]\"", "def get_describe_name(self):\n long_name = str(self.year)+ ' ' + self.make.title()+ ' ' +self.model.title()\n return long_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name", "def display_name(self):\n return self._display_name" ]
[ "0.76884186", "0.72017074", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.71944445", "0.6931914", "0.69249964", "0.6901365", "0.68354696", "0.6828654", "0.6815666", "0.6806246", "0.68054557", "0.680478", "0.6786466", "0.67716026", "0.6768377", "0.67452645", "0.67452645", "0.67452645", "0.67452645", "0.67452645", "0.67452645", "0.67427516", "0.67427516", "0.67427516", "0.67427516", "0.67427516", "0.67427516", "0.67427516", "0.67427516", "0.67324454", "0.67324454", "0.67324454", "0.6715869", "0.6715869", "0.6715869", "0.6715869", "0.6715869", "0.6715869", "0.6715869", "0.66708964", "0.6654169", "0.66311544", "0.6616998", "0.6613916", "0.6607437", "0.65945566", "0.65785605", "0.6548704", "0.65167755", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6513023", "0.6511626", "0.6511626", "0.6511626", "0.6511626", "0.6511626", "0.6507551", "0.6506242", "0.648931", "0.648931", "0.648931", "0.648931", "0.648931", "0.648931", "0.648931", "0.648931", "0.648931" ]
0.6926838
21
Links relevant to the assessment
def links(self) -> pulumi.Output['outputs.AssessmentLinksResponse']: return pulumi.get(self, "links")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getLink(self):", "def exam_url(self, obj):\n request = self.context.get(\"request\")\n return reverse(\"exam-detail\", args=[obj.id], request=request)", "def get_absolute_url(self):\n return reverse('trait_browser:source:studies:pk:detail', kwargs={'pk': self.pk})", "def href(self, request) -> str:\n raise NotImplementedError()", "def get_absolute_url(self):\n return reverse('clinicalTrial-detail', args=[str(self.trialId)])", "def link(self, obj):\n return format_html(\n '<a href=\"{url}\">{url}</a>',\n url='https://sms.cam.ac.uk/collection/{}'.format(obj.id)\n )", "def issueListing(self, v, i):\n #list of URLS within the issue\n# links = []\n issURL = self.link(vol = v, iss = i )\n html=urlopen(issURL)\n soup=BeautifulSoup(html,'html.parser')\n URLs = [] #Empty list\n \n# titles = soup.find_all('h5', class_=\"title\")\n# authors = soup.find_all('h6', class_=\"authors\")\n# pubs = soup.find_all('h6', class_=\"pub-info\")\n# for t, a, p in zip(titles, authors, pubs):\n blocks = soup.find_all('div', class_=\"article panel article-result\")\n for b in blocks:\n# print(b)\n titletag = b.find('h5', class_=\"title\")\n title = titletag.get_text()\n #Extract abstract url from title head\n aURL = titletag.find('a', href = True)['href']\n alink = 'https://journals.aps.org' + aURL\n #Print out the scraped information\n print(title)\n print(alink)\n #Extract research area and topic keywords\n kwlist = b.find('ul', class_=\"inline-list subjects\")\n #If the list tag exists\n if kwlist:\n lis = kwlist.find_all('li')\n kws = [li.get_text() for li in lis] \n print(kws)\n #Add utf-8 encode\n# print(kws.encode('utf-8')) \n print('----------------------------------------------------------------') \n #Collect URLs in the issue\n URLs.append('https://journals.aps.org' + aURL)\n return URLs", "def test_accessible(self):\n survey = Survey.objects.get(id=2)\n responses = Response.objects.filter(survey=survey)\n response = responses.all()[0]\n urls = [\n reverse(\"survey-list\"),\n reverse(\"survey-detail\", kwargs={\"id\": 2}),\n reverse(\"survey-completed\", kwargs={\"id\": 2}),\n reverse(\"survey-detail-step\", kwargs={\"id\": 2, \"step\": 1}),\n reverse(\"survey-confirmation\", kwargs={\"uuid\": response.interview_uuid}),\n ]\n for url in urls:\n self.assert_accessible(url)", "def check_index_and_outline(self, authed_client):\r\n index_url = '/course/'\r\n index_response = authed_client.get(index_url, {}, HTTP_ACCEPT='text/html')\r\n parsed_html = lxml.html.fromstring(index_response.content)\r\n course_link_eles = parsed_html.find_class('course-link')\r\n self.assertGreaterEqual(len(course_link_eles), 2)\r\n for link in course_link_eles:\r\n self.assertRegexpMatches(\r\n link.get(\"href\"),\r\n 'course/slashes:{0}'.format(Locator.ALLOWED_ID_CHARS)\r\n )\r\n # now test that url\r\n outline_response = authed_client.get(link.get(\"href\"), {}, HTTP_ACCEPT='text/html')\r\n # ensure it has the expected 2 self referential links\r\n outline_parsed = lxml.html.fromstring(outline_response.content)\r\n outline_link = outline_parsed.find_class('course-link')[0]\r\n self.assertEqual(outline_link.get(\"href\"), link.get(\"href\"))\r\n course_menu_link = outline_parsed.find_class('nav-course-courseware-outline')[0]\r\n self.assertEqual(course_menu_link.find(\"a\").get(\"href\"), link.get(\"href\"))", "def get_absolute_url(self):\n return reverse('subject-detail', args=[str(self.id)])", "def test_animais_list_link(self):\n PropriedadeUser.objects.create(propriedade=self.propriedade1,\n user=self.user1,\n owner=True)\n login = self.client.login(username='user1', password='12345')\n response = self.client.get(reverse('animal_pesagem_form', kwargs={'animal_pk': self.animal.pk,}))\n expected = 'href=\"{}\"'.format(reverse('animais_list', kwargs={'propriedade_pk': self.animal.propriedade.pk,}))\n self.assertContains(response, expected)", "async def link_to(self, *args):\n pass", "def link_residues(self) -> None:\n ...", "def get_absolute_url(self):\n return reverse('curriculum_guides:curriculum_guide', args=[self.slug])", "def test_dashboards_v2_link(self):\n pass", "def exactor_links(self, response: BeautifulSoup):\n raise NotImplementedError", "def get_success_url(self):\n return reverse_lazy('grades:list') + '?ok'", "def handout_links(self):\r\n return self.q(css='section.handouts ol li a').map(lambda el: el.get_attribute('href')).results", "def links(self) -> str:\n return pulumi.get(self, \"links\")", "def iter_page_links(self) -> Iterable[str]:\n base_url = 'https://www.med.navy.mil'\n r = requests.get(self.starting_url, verify=CERTIFICATE_DIR + '/cat3.pem')\n soup = bs4.BeautifulSoup(r.content, features=\"html.parser\")\n\n # get target column of list items\n issuance_list = soup.find('div', attrs={'class': 'noindex ms-wpContentDivSpace'})\n matches = [\"Publications\", \"BUMEDNotes\", \"BUMEDInstructions\"]\n # extract links\n links = [link for link in issuance_list.find_all('a')]\n for link in links[2:-1]:\n if any(x in str(link) for x in matches):\n if not link['href'].startswith('http'):\n url = base_url + link['href']\n else:\n url = link['href']\n yield url", "def links(request):\n cart = Cart(request)\n assert isinstance(request, HttpRequest)\n return render(\n request,\n 'app/links.html',\n {\n \t'cart': cart,\n 'title':'Полезные ресурсы',\n 'message':'Your contact page.',\n 'year':datetime.now().year,\n }\n )", "def LinkAnat(self):\n\n if self.anatomical is None:\n return\n for entry in self.info.keys():\n info = self.info[entry]\n if info.has_key('anat_link'):\n self.LinkFiles(info['outdir'], self.anatomical)", "def _link_items(self):\n pass", "def trigger_assessment():\n\n data_api_client.req.assessments().post(data={\n 'assessment': {\n 'brief_id': request.form['brief_id'],\n 'domain_name': request.form['domain_name'],\n 'supplier_code': request.form['supplier_code']\n },\n 'update_details': {\n 'updated_by': ''\n }\n })\n\n return redirect(url_for('.assessments_review'))", "def iter_page_links(self) -> Iterable[str]:\n base_url = 'https://health.mil/About-MHS/OASDHA/Defense-Health-Agency/Resources-and-Management/DHA-Publications'\n yield base_url", "def get_absolute_url(self):\n return reverse('injury-detail', args=[str(self.id)])", "def link(self, s_id):\r\n\r\n # Take the link entires from TOML file\r\n schedules = self.cfg.get('payload',{}).get('schedule')\r\n # Check for valid entires\r\n if schedules:\r\n for entries in schedules:\r\n # Construct payload \r\n for payload in entries.get('link'):\r\n # Check the entry vs a json schema\r\n check.check_entry(path='schemas/link.json', test=payload)\r\n # Post request\r\n if 'id' in self.schedules[-1]:\r\n payload['schedule'] = self.schedules[-1].get('id')\r\n if 'id' in self.workouts[-1]:\r\n payload['workout'] = self.workouts[-1].get('id')\r\n return self.add_post(payload, API.url_link, self.links)", "def methods():\n list_groups_text = '<a href=\"/groups\">List Groups</a>'\n list_users_text = '<a href=\"/users\">List Users</a>'\n page_links = list_groups_text + \"<br>\" + list_users_text\n return page_links", "def get_name_link_html(self):\n url_text = \"{{% url 'trait_browser:source:studies:pk:detail' pk={} %}} \".format(self.pk)\n return URL_HTML.format(url=url_text, name=self.i_study_name)", "def to_projectlink(self):\n\n thumb_image_url = reverse('project_serve_file', args=[self.short_name,self.logo])\n\n args = {\"abreviation\":self.short_name,\n \"title\":self.short_name,\n \"description\":self.description,\n \"URL\":reverse('comicsite.views.site', args=[self.short_name]),\n \"download URL\":\"\",\n \"submission URL\":self.get_submission_URL(),\n \"event name\":self.event_name,\n \"year\":\"\",\n \"event URL\":self.event_url,\n \"image URL\":self.logo,\n \"thumb_image_url\":thumb_image_url,\n \"website section\":\"active challenges\",\n \"overview article url\":self.publication_url,\n \"overview article journal\":self.publication_journal_name,\n \"overview article citations\":\"\",\n \"overview article date\":\"\",\n \"submission deadline\":\"\",\n \"workshop date\":self.workshop_date,\n \"open for submission\":\"yes\" if self.is_open_for_submissions else \"no\",\n \"data download\":\"yes\" if self.offers_data_download else \"no\",\n \"dataset downloads\":self.number_of_downloads,\n \"registered teams\":\"\",\n \"submitted results\":self.number_of_submissions,\n \"last submission date\":self.last_submission_date,\n \"hosted on comic\":True,\n \"created at\":self.created_at\n }\n\n projectlink = ProjectLink(args)\n return projectlink", "def get_absolute_url(self):\n return reverse('trialResponse-detail', args=[str(self.responseId)])", "def index():\n g.data['api_version'] = API_VERSION\n g.data['apilib_version'] = API_VERSION\n g.data['oar_version'] = VERSION\n g.data['links'] = []\n #endpoints = ('resources', 'jobs', 'config', 'admission_rules')\n endpoints = ('resources', 'jobs')\n for endpoint in endpoints:\n g.data['links'].append({\n 'rel': 'collection',\n 'href': url_for('%s.index' % endpoint),\n 'title': endpoint,\n })", "def links(self):\n return self._links_tpl.expand(self._identity, self._record)", "def link(self):\n return f\"[{self.numbered_title}]({self.html_url})\"", "def test_tag_links_present(self):\n response = self.client.get(self.get_url(self.study.pk))\n for tagged_trait in self.tagged_traits:\n tag_study_url = reverse(\n 'tags:tag:study:list', kwargs={'pk': tagged_trait.tag.pk, 'pk_study': self.study.pk})\n self.assertIn(tag_study_url, str(response.content))", "def test_with_links_cases_and_issues():\n pass", "def test_link_list(self):\n response = self.client.get('/tests/dashboard/')\n self.assertEqual(response.status_code, 200)\n self.assertContains(response, \"example.com\")", "def get_links(self):\n links = \"\"\n if self.title != \"\":\n links += html_link_to_tag(\n plain_to_html(self.title), self.title, self.proc\n )\n return links + \\\n html_unordered_list([x.get_links() for x in self.subsections])", "def get_absolute_url(self):\n return reverse('course-detail', args=[str(self.id)])", "def _on_details_navigating(self, evt):\n \n # get URL\n url = evt.url\n \n # parse URL\n match = DETAILS_URL_PATTERN.search(url)\n if not match:\n return\n \n # get match\n parameter = match.group('parameter')\n value = match.group('value').replace(\"%20\", \" \")\n \n # check value\n if not value:\n return\n \n # show article by DOI\n if parameter == 'doi':\n link = \"https://dx.doi.org/%s\" % value\n try: webbrowser.open(link, autoraise=1)\n except: pass\n \n # show article by PMID (in PubMed)\n elif parameter == 'pmid':\n link = \"https://ncbi.nlm.nih.gov/pubmed/%s\" % value\n try: webbrowser.open(link, autoraise=1)\n except: pass\n \n # search by author (in PubMed)\n elif parameter == 'author':\n query = \"%s[AU]\" % value\n self._search_repository(query)\n \n # search by journal (in PubMed)\n elif parameter == 'journal':\n query = \"%s[JT]\" % value\n self._search_repository(query)\n \n # show articles by author (in library)\n elif parameter == 'authorid':\n query = \"%s[AUID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # show articles by label (in library)\n elif parameter == 'labelid':\n query = \"%s[LABELID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # show articles by collection (in library)\n elif parameter == 'collectionid':\n query = \"%s[COLLECTIONID]\" % value\n self._articles_view.SetMasterQuery(None)\n self._articles_view.SetQuery(query)\n self._articles_view.ShowArticles()\n \n # set article rating\n elif parameter == 'rating':\n if value in \"012345\":\n self._on_articles_rating(rating=int(value))\n \n # set article colour\n elif parameter == 'colour':\n colour = mwx.COLOUR_BULLETS.get(value, None)\n if colour is not None:\n self._on_articles_colour(colour=colour)\n \n # reveal PDF file\n elif parameter == 'pdf':\n path = os.path.join(self._library.library_path, value+\".pdf\")\n self._on_articles_reveal_pdf(path=path)", "def makeLinks(self):\n self.deleteIndexFileIfExists()\n _fileNames = self.getHTMLFileNames()\n _msgPart1 = \"<a href=\\\"\"\n _msgPart2 = \"\\\" target=\\\"loadHTMLResults\\\">\"\n _msgPart3 = \"</a><br>\"\n _link = \"\"\n for _fileName in _fileNames:\n _origFileName = _fileName\n _linkName = _fileName.split('.')[0]\n _createAnchorTag = (_msgPart1+str(_origFileName)+_msgPart2+str(_linkName)+_msgPart3)\n _link = _link + _createAnchorTag\n return _link", "def set_dbgap_link(self):\n return self.STUDY_VERSION_URL.format(self.full_accession)", "def urls(self):\n urls = super(Approval, self).urls()\n urls.append(url(r'^(?P<process_pk>\\d+)/{}/(?P<task_pk>\\d+)/assign/$'.format(self.name),\n self.assign_view, {'flow_task': self}, name=\"{}__assign\".format(self.name)))\n urls.append(url(r'^(?P<process_pk>\\d+)/{}/(?P<task_pk>\\d+)/unassign/$'.format(self.name),\n self.unassign_view, {'flow_task': self}, name=\"{}__unassign\".format(self.name)))\n return urls", "def get_absolute_url(self):\n return reverse('report', args=[str(self.id)])", "def get_absolute_url(self):\n return reverse('criteria-detail', args=[str(self.criteriaId)])", "def get_absolute_url(self):\n\t\treturn reverse('course-detail', args=[str(self.id)])", "def get_details_url(self, textbook_id):\r\n return reverse_course_url(\r\n 'textbooks_detail_handler',\r\n self.course.id,\r\n kwargs={'textbook_id': textbook_id}\r\n )", "def test_org_instructor_access(self):\r\n self.login(self.org_instructor_user)\r\n url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})\r\n check_for_get_code(self, 200, url)\r\n\r\n url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})\r\n check_for_get_code(self, 200, url)\r\n\r\n url = reverse('instructor_dashboard', kwargs={'course_id': self.other_org_course.id.to_deprecated_string()})\r\n check_for_get_code(self, 404, url)", "def link(self, k, v, row=None):\n\n if row:\n inspection_id = row.cr_shelter_inspection.id\n if inspection_id:\n return A(v, _href=URL(c = \"cr\",\n f = \"shelter_inspection\",\n args = [inspection_id],\n ),\n )\n return v", "def link(self, k, v, row=None):\n\n if row:\n inspection_id = row.cr_shelter_inspection.id\n if inspection_id:\n return A(v, _href=URL(c = \"cr\",\n f = \"shelter_inspection\",\n args = [inspection_id],\n ),\n )\n return v", "def cal_link(self):\n return get_host() + reverse('events:detail', args=[self.id])", "def requestShowLink(self, *args, **kwargs): # real signature unknown\n pass", "def test_create_link(self):\n self.app.set_user(TESTUSER_ADMIN_EMAIL)\n\n link_create_page = self.app.get('/admin/scionlab/link/add/')\n form = link_create_page.form\n\n ases = AS.objects.iterator()\n as_a = next(ases)\n as_b = next(ases)\n\n form.select('type', value=Link.PROVIDER)\n form.select('from_host', text=str(as_a.hosts.first()))\n form.select('to_host', text=str(as_b.hosts.first()))\n form.submit(value=\"Save\").follow() # redirect on success\n\n link = Link.objects.get()\n self.assertEqual(link.type, Link.PROVIDER)\n\n # The variable `ases` is a generator and it will be closed on it being garbage collected.\n # Its contains a reference to a cursor, that will attempt to work with the DB (closing\n # the query), when being deallocated.\n # Avoid that by closing it beforehand:\n ases = None", "def get_absolute_url(self):\n return reverse('trait_browser:source:datasets:detail', kwargs={'pk': self.pk})", "def get_absolute_url(self):\n return reverse('trait_browser:source:traits:detail', kwargs={'pk': self.pk})", "def view_sample_link(unused1, unused2, model, unused3):\n del unused1, unused2, unused3\n return (\n Markup(\n u\"<a href='%s'>%s</a>\"\n % (url_for(\"sample.index_view\", search=model.sample.internal_id), model.sample,)\n )\n if model.sample\n else u\"\"\n )", "def cal_link(self):\n return get_host() + reverse('events:detail', args=[self.event.id])", "def get_links(proj,exp):\n response = do_method(\"experiment.info\",\n {\"proj\":proj,\"exp\":exp,\"aspect\":\"links\"})\n check_response(response)\n return response['value']", "def exac_link(variant_obj):\n url_template = (\"http://exac.broadinstitute.org/variant/\"\n \"{this[chromosome]}-{this[position]}-{this[reference]}\"\n \"-{this[alternative]}\")\n return url_template.format(this=variant_obj)", "def test_02_export_links(self):\r\n self.signin(email=self.email_addr, password=self.password)\r\n res = self.app.get(self.url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"There should be a warning about adding a CKAN api Key\"\r\n assert dom.find(id=\"ckan_warning\") is not None, err_msg\r\n # Add a CKAN API key to the user\r\n u = db.session.query(User).filter_by(name=self.name).first()\r\n u.ckan_api = \"ckan-api-key\"\r\n db.session.add(u)\r\n db.session.commit()\r\n # Request again the page\r\n res = self.app.get(self.url, follow_redirects=True)\r\n err_msg = \"There should be two buttons to export Task and Task Runs\"\r\n dom = BeautifulSoup(res.data)\r\n assert dom.find(id=\"ckan_task\") is not None, err_msg\r\n assert dom.find(id=\"ckan_task_run\") is not None, err_msg", "def get_absolute_url(self):\n # return reverse('tutor-detail', args=[str(self.email_address)])\n return reverse('school-detail', args=[str(self.slug)])", "def test_client_risk_assessment_list(self):\n pass", "def referee_evaluate_synopsis(request):\n if not validate_request(request): return redirect(reverse(URL_FORBIDDEN))\n \n user = auth.get_user(request)\n referee = Referee.objects.get(user = user)\n \n if request.method == \"GET\":\n all_thesis = [] # list of dict\n \n for panelMember in PanelMember.objects.filter(referee = referee).filter(status = 'S'):\n thesis = panelMember.thesis\n dict = {}\n dict['title'] = thesis.title\n dict['student_full_name'] = thesis.student.first_name + \" \" + thesis.student.last_name\n dict['synopsis'] = thesis.synopsis\n dict['student_username'] = thesis.student.user.username\n dict['id'] = thesis.id\n \n all_thesis.append(dict)\n \n return render(request, 'app/referee/evaluate_synopsis.html', {\n 'title':'Unevaluated PhD Synopsis',\n 'layout_data' : get_layout_data(request),\n 'all_thesis' : all_thesis\n })\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def get_absolute_url(self):\n return reverse('trait_browser:harmonized:traits:detail', kwargs={'pk': self.pk})", "def getExpandedLinks():", "def test_with_links(self):\n self.result.figure_link = 'some_link'\n self.result.start_figure_link = 'other_link'\n figure_link, start_link = fitting_report.get_figure_paths(self.result)\n self.assertEqual(figure_link, os.path.join('figures', 'some_link'))\n self.assertEqual(start_link, os.path.join('figures', 'other_link'))", "def get_absolute_url(self):\n\n return reverse('caretaker-detail', args=[str(self.id)])", "def get_success_url(self):\n url = reverse(\n \"qa:question-create\"\n ) + \"?success=true\"\n return url", "def menu_python_for_artists(self, event=None):\n self.link('http://spe.pycs.net/stories/6.html')", "def show_a():\n\n page = request.args.get('page', 1, type=int)\n abilities_ids = Ability.query.order_by(Ability.id.asc()).paginate(\n page, current_app.config['PAGE_ITEMS'], False)\n\n a = \"myA000\"\n abilities_list = [(f'{a}{i.id}' if (i.id < 10) else f'{\"myA00\"}{i.id}'\n if (i.id < 100) else f'{\"myA0\"}{i.id}', i.description) for i in abilities_ids.items]\n verA = True\n\n fileDir = os.path.dirname(os.path.realpath('__file__'))\n # me tengo que meter a la ruta base/cyber_role y ejecutar este endpoint\n file_json = 'cyber_role/KSAT_JSON/Abilities.json'\n\n if not isfile(join(fileDir, file_json)):\n file_json = 'KSAT_JSON/Abilities.json'\n\n with open(file_json) as file:\n # Obtenemos el json del fichero\n data = json.load(file)\n\n equivalencia_nist = {}\n\n # ya tenemos el diccionario del nist, original\n values = list(data.values())\n keys = list(data.keys())\n\n for i in abilities_ids.items:\n if i.description in values:\n equivalencia_nist[i.id] = keys[values.index(i.description)]\n\n\n return render_template('general/ksat.html', title='Abilities',\n lista_A=abilities_ids, l_A=abilities_list, verA=verA,\n l_eq=list(equivalencia_nist.values()))", "def get_success_url(self):\n msg = _(\"Added subject '%s'\") % self.object.__str__()\n messages.success(self.request, msg)\n\n return reverse('dashboard:questions-list')", "def get_course_page_urls(self,soup):\n\t\tcourse_links =[]\n\t\troot_url = 'http://onlinelearning.cornell.edu'\n\t\tfor link in soup.select('span.field-content a[href]'):\n\t\t\tnew_url = root_url + link['href']\n\t\t\tcourse_links.append(new_url)\n\t\t\tcourse_links.append(' \\n')\n\t\t\n\t\tself.new_list.append(course_links)\n\t\treturn course_links", "def get_absolute_url(self):\n\t\treturn reverse('source-detail', args=[str(self.id)])", "def test_ai_assessment(self):\r\n\r\n # Navigate to the AI-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('AI-Assessed')\r\n self.submit_essay('ai', 'Censorship in the Libraries')\r\n\r\n # Refresh the page to get the updated feedback\r\n # then verify that we get the feedback sent by our stub XQueue implementation\r\n self.assertEqual(self.get_asynch_feedback('ai'), ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # First score is the self-assessment score, which we haven't answered, so it's 0/2\r\n # Second score is the AI-assessment score, which we have answered, so it's 1/2\r\n # Third score is peer-assessment, which we haven't answered, so it's 0/2\r\n self.assertEqual(scores, [(0, 2), (1, 2), (0, 2)])", "def Prolinks(promotion_label):\n return prolinks", "async def links(self, ctx):\n await ctx.send(\"https://discordapp.com/channels/566451504332931073/681617252814159904/755489156146397311\")", "def link(self):\n if self.resource is None:\n self.resource = self.client.get_resource(self.href)\n self.client.post_linked_resource(\n self.resource, RelationType.LINK_TO_TEMPLATE,\n EntityType.ROLE.value, None)", "def get_absolute_url(self):\n return reverse('articulo-detalle', args=[str(self.id)])", "def get_absolute_url(self):\n return reverse('articulo-detalle', args=[str(self.id)])", "def get_absolute_url(self):\n return reverse('questionSchema-detail', args=[str(self.questionId)])", "def answer_url(self, obj):\n request = self.context.get(\"request\")\n return reverse(\"answer-detail\", args=[obj.id], request=request)", "def test_related_links(self, client, derrida):\n # add a github link for derrida\n github = RelatedLinkType.objects.get_or_create(name=\"GitHub\")[0]\n derrida_gh = ProjectRelatedLink.objects.create(\n project=derrida,\n type=github,\n url=\"https://github.com/princeton-CDH/derrida-django\",\n )\n\n # should display link type and create a link using URL\n response = client.get(derrida.get_url())\n assertContains(response, '<a href=\"%s\">GitHub</a>' % derrida_gh.url, html=True)", "def append_links(self, lines, lang):\n lines.append(\"verbatim &nbsp;\")\n lines.append(\"section Links\")\n lines.append(\"external http://polcasaglia.blogspot.com Blog\")\n lines.append(\"external http://www.uisp-fe.it/calcio.php UISP\" )\n lines.append(\"verbatim &nbsp;\")\n return lines", "def get_absolute_url(self):\n return reverse('patient-detail', args=[str(self.id)])", "def get_success_url(self):\n return reverse('overseasinvoice-list')", "def get_success_url(self):\n return reverse('overseasinvoice-list')", "def goto_guidelines(self):\n\n self.guidelines.click()", "def test_announcement_view(self):\n response = self.client.get(url_for('main.announcements'))\n self.assertEqual(response.status_code, 200)", "async def _landing_url(self, responses: SourceResponses) -> URL:\n landing_url = await super()._landing_url(responses)\n return (\n URL(f\"{landing_url}/ViewerMain.aspx?scanId={self._scan_id}&ProjectID={self.__project_id}\")\n if responses\n else landing_url\n )", "def test_redirects_to_study_detail_page(self):\n dataset = factories.SourceDatasetFactory.create()\n # We need to create some traits so the detail page renders properly.\n source_traits = factories.SourceTraitFactory.create_batch(\n 10, source_dataset__source_study_version__i_is_deprecated=False,\n source_dataset=dataset)\n response = self.client.post(self.get_url(), {'object': dataset.pk})\n self.assertRedirects(response, reverse('trait_browser:source:datasets:detail', args=[dataset.pk]))", "def overview():\n # TODO: fix ajax https://groups.google.com/d/msg/web2py/YyVilc2ywdg/ZLtN3Gg3Ft0J\n # TODO: fix ?plain link in results\n from plugin_introspect import get_task_code\n lesson = request.args[0] # controller with lesson contents\n # lesson = request.vars.lesson_controller # controller with lesson contents\n fun_names = exposed_functions_names( controller=lesson )\n exposed_functions = generate_exposed_functions_info( controller=lesson )\n examples_codes = [ get_task_code(code=exposed_functions[f]['code'], task_key=lesson+'/'+f, decorate=True) for f in fun_names ]\n results_urls = [ URL(lesson, f, vars=dict(plain=1)) for f in fun_names ]\n return response.render('tutor.html', dict(lesson=lesson, fun_names=fun_names, examples_codes=examples_codes, results_urls=results_urls) )", "def href(self, request) -> str:\n return request.route_path(self.url)", "def get_absolute_url(self):\n return reverse('book_details', args=[str(self.id)])", "def get_absolute_url(self):\n return reverse('tour-review', args=[str(self.id)])", "def _generate_context_link(\n context: mlrun.MLClientCtx, link_text: str = \"view in MLRun\"\n ) -> str:\n return (\n '<a href=\"{}/{}/{}/jobs/monitor/{}/overview\" target=\"_blank\">{}</a>'.format(\n config.resolve_ui_url(),\n config.ui.projects_prefix,\n context.project,\n context.uid,\n link_text,\n )\n )", "def test_page_links(inspire_app):\n create_record(\"lit\", data={\"titles\": [{\"title\": \"Solenoid\"}]})\n create_record(\"lit\", data={\"titles\": [{\"title\": \"Solenoid\"}]})\n with inspire_app.test_client() as client:\n # Limit records\n response = client.get(\n \"/api/literature\", query_string=dict(size=1, page=1, q=\"Solenoid\")\n )\n response_json = response.json\n assert len(response_json[\"hits\"][\"hits\"]) == 1\n\n data = response_json[\"links\"]\n assert \"self\" in data\n assert \"next\" in data\n assert \"prev\" not in data\n\n # Assert next URL before calling it\n first_url = data[\"self\"]\n next_url = data[\"next\"]\n parsed_url = parse_url(next_url)\n assert parsed_url[\"qs\"][\"size\"] == [\"1\"]\n assert parsed_url[\"qs\"][\"page\"] == [\"2\"]\n\n # Access next URL\n response = client.get(next_url)\n response_json = response.json\n assert len(response_json[\"hits\"][\"hits\"]) == 1\n data = response.json[\"links\"]\n assert data[\"self\"] == next_url\n assert \"next\" not in data\n assert \"prev\" in data and data[\"prev\"] == first_url", "def link_dihedra(self, verbose: bool = ...) -> None:\n ...", "def informative_url(self):\n # print\n # print \"========================================\"\n # print \"checking\", self\n # print \"is_uninformative\", self.is_uninformative\n # print \"self.parent\", self.parent\n #\n if self.is_uninformative and self.parent and self.parent.series:\n # print self, \"parent!\"\n return self.parent.get_absolute_url()\n else:\n # print self, \"self!\"\n return self.get_absolute_url()", "def get_absolute_url(self):\n return reverse(\"jewelry_detail\", args = [str(self.id)])" ]
[ "0.6158211", "0.5980082", "0.5797147", "0.57784724", "0.5716922", "0.5695942", "0.56922674", "0.56786233", "0.5670207", "0.5577552", "0.5543779", "0.5527378", "0.5521695", "0.5521499", "0.5518003", "0.55149287", "0.551347", "0.5483872", "0.5457637", "0.5448972", "0.5433619", "0.5413031", "0.54111266", "0.5382787", "0.53820586", "0.5381424", "0.53743935", "0.53675365", "0.5366903", "0.5363985", "0.53630555", "0.53624576", "0.53348917", "0.5332608", "0.5330953", "0.5328219", "0.53257316", "0.5323386", "0.53229153", "0.53177285", "0.53067684", "0.53060734", "0.5304677", "0.52998227", "0.5297095", "0.52908933", "0.52872026", "0.5271065", "0.52690965", "0.52690965", "0.5266973", "0.5264429", "0.5248873", "0.52428645", "0.5242374", "0.5242047", "0.52419835", "0.5240671", "0.52264196", "0.52155995", "0.52136683", "0.5204096", "0.52005696", "0.51957417", "0.5188652", "0.5185752", "0.5185724", "0.51848966", "0.5184891", "0.5176531", "0.5174543", "0.51745105", "0.51722246", "0.51691514", "0.5165394", "0.515715", "0.51571476", "0.5149103", "0.5149103", "0.5148693", "0.5146626", "0.5145895", "0.51400495", "0.51386863", "0.51381797", "0.51381797", "0.513753", "0.5129031", "0.51180667", "0.51158375", "0.51146156", "0.5113937", "0.511293", "0.51050794", "0.5102481", "0.51007295", "0.5097632", "0.50967073", "0.5093146" ]
0.65279543
0
Describes properties of an assessment metadata.
def metadata(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentMetadataPropertiesResponse']]: return pulumi.get(self, "metadata")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def metadata(self) -> global___SummaryMetadata:", "def metadata(self) -> Optional[pulumi.Input['SecurityAssessmentMetadataPropertiesArgs']]:\n return pulumi.get(self, \"metadata\")", "def get_assessment_metadata(self):\n return Metadata(**settings.METADATA['assessment_id'])", "def describe(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n return response\n property_info = {'child_properties': self.child_properties,\n 'descendant_properties': self.descendant_properties,\n 'parent_properties': self.parent_properties,\n 'domain': self.domain,\n 'range': self.range,\n 'uri': self.uri,\n 'label': self.label,\n 'description': self.description}\n return property_info", "def metadata(self) -> Mapping[str, str]:\n return pulumi.get(self, \"metadata\")", "def _metadata(self):\n meta = super()._metadata\n meta.update({\n \"name\": self.name,\n \"lead_in_time\": self.lead_in_time,\n \"amplification\": self.amplification,\n \"amplifier_clipping\": self.amplifier_clipping,\n \"power_threshold\": self.power_threshold,\n })\n return meta", "def description(self):\n desc = self.title\n ops = []\n for attribute in self.attributes.all():\n value = attribute.value\n if isinstance(value, list):\n ops.append(\n \"%s = '%s'\" % (attribute.type, (\", \".join([str(v) for v in value])))\n )\n else:\n ops.append(\"%s = '%s'\" % (attribute.type, value))\n if ops:\n desc = \"%s (%s)\" % (desc, \", \".join(ops))\n return desc", "def metadata(self):\n return {\n \"wildtype\" : self.wildtype,\n \"genotypes\" : self.genotypes,\n \"phenotypes\" : self.Raw.phenotypes,\n \"stdeviations\" : self.stdeviations,\n \"n_replicates\" : self.n_replicates,\n \"mutations\" : self.mutations,\n \"log_transform\" : self.log_transform,\n \"order\" : self.order,\n \"epistasis\" : {\n \"keys\" : self.epistasis.keys,\n \"values\" : self.epistasis.values,\n }\n }", "def describe(self):\r\n mdataset_description = {\r\n 'kind': \"HConteiner\",\r\n 'compliance': self._compliance,\r\n 'has_encryption': self.has_encryption,\r\n 'encryption': self._encryption,\r\n 'sensitive': self._sensitive,\r\n 'license': self._license,\r\n }\r\n verbose_event()\r\n return mdataset_description", "def propertyDetails(self):\n return (PROPERTY_DETAILS.get(aa, NONE) for aa in self.sequence)", "def get_assessments_metadata(self):\n return Metadata(**settings.METADATA['assessment_ids'])", "def description(self) -> str:\n return self._search_in_properties(ATTR_DESCRIPTION)", "def generate_property_template(self):\n template = {\n \"@id\": \"url or curie of the property\",\n \"@type\": \"rdf:Property\",\n \"rdfs:comment\": \"description of the property\",\n \"rdfs:label\": \"carmel case, should match @id\",\n \"schema:domainIncludes\": {\n \"@id\": \"class which use it as a property, could be list\"\n },\n \"schema:isPartOf\": {\n \"@id\": \"http://schema.biothings.io\"\n },\n \"schema:rangeIncludes\": {\n \"@id\": \"relates a property to a class that constitutes (one of) the expected type(s) for values of the property\"\n }\n }\n return template", "def metadata(self): # -> None:\n ...", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def __repr__(self):\n return self._metadata.__str__()", "def _short_info(self) -> str:\n nullable = \"Nullable \" if self._is_nullable else \"\"\n\n # Good candidate for python pattern matching once <3.10 support no longer required\n num_metadata_items = len(self.__metadata)\n if num_metadata_items == 0:\n metadata = \"\"\n elif num_metadata_items == 1:\n metadata = f\" [with {num_metadata_items} metadata item]\"\n else:\n metadata = f\" [with {num_metadata_items} metadata items]\"\n\n return f\"<{nullable}{self.__class__.__name__}{metadata}: {self._resolve_field_name()}>\"", "def metadata(self) -> dict:\n meta = {}\n meta['filename'] = self.filename\n meta['label'] = self.label\n meta['url'] = self.url\n\n return meta", "def meta(self):\n title = 'Месторасположение: {0}'.format(self.object.emplacement)\n return {\n 'title': title\n }", "def __metadata__(self):\n raise NotImplementedError", "def metadata(self) -> dict:\n meta = {}\n meta['name'] = self.name\n meta['id'] = self.id\n meta['family'] = self.family\n \n meta['ptd_type'] = []\n meta['pos'] = []\n meta['atype'] = []\n meta['db_vect'] = []\n meta['scale'] = []\n for cp in self.parameters:\n meta['ptd_type'].append(cp.get('ptd_type', None))\n meta['pos'].append(cp.get('pos', None))\n meta['atype'].append(cp.get('atype', None))\n meta['db_vect'].append(cp.get('db_vect', None))\n meta['scale'].append(cp.get('scale', None))\n \n return meta", "def description(self):", "def describe(self):\n raise NotImplementedError()", "def describe(self):\n raise NotImplementedError()", "def summarize_metadata(self):\n meta_dict = {}\n for comp in self.dataset.data_vars:\n for mkey, mvalue in self.dataset[comp].attrs.items():\n meta_dict[f\"{comp}.{mkey}\"] = mvalue\n\n return meta_dict", "def describe(self) -> str:", "def __repr__(self):\n\n return self._metadata.__str__()", "def description(self):\n pass", "def description(self):\n pass", "def _describe(self) -> Dict[str, Any]:", "def getDescription(self):\n raise NotImplementedError", "def description(self):\n if \"description\" in self._prop_dict:\n return self._prop_dict[\"description\"]\n else:\n return None", "def metadata(self) -> global___SnippetConfigMetadata:", "def describe(self) -> Text:\n return self.__repr__()", "def meta(self):\n title = 'Оборудование: {0}'.format(self.object.value)\n return {\n 'title': title\n }", "def GetMetadata(self):\n return self.dict['meta']", "def metadata(self):\r\n return resources.Metadata(self)", "def details(self) -> str:\n return f\"- **language**: [{self.language}]\\n\" \\\n f\"- **opengame**: [{self.opengame}]\\n\" \\\n f\"- **system**: [{self.system}]\\n\" \\\n f\"- **mode**: [{self.mode}]\\n\" \\\n f\"- **attributes**: [{self.attributes}]\\n \" \\\n f\"- **score_threshold**: [{self.score_threshold}]\\n \" \\\n f\"- **monsters**: [{self.monsters}]\\n\"", "def metadata(self) -> Mapping[str, str]:\r\n return self._metadata", "def metadata(self) -> Mapping[str, str]:\r\n return self._metadata", "def add_metadata_properties(self, sentence, result):\r\n for property in sentence.properties:\r\n if property.property_metadata.is_category:\r\n result[property.name] = property.value", "def describe(self):\n print(self.description)", "def describe(self):\n print(self.description)", "def get_description(self):\n raise NotImplementedError", "def get_description(self):", "def get_description(self):\n return \"It is an Entity.\"", "def metadata(self):\n metadata = dict([(key,{}) for key in self.keys])\n for day in self.days:\n metadata[\"Days\"].append(day.attrs)\n for period in day.period:\n metadata[\"Periods\"].append(period.attrs)\n for course in period.courses:\n metadata[\"Courses\"].append(course.attrs)\n for instructor in course.instructor:\n metadata[\"Instructors\"].append(instructor.attrs)\n return metadata", "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def properties(self):\n raise NotImplementedError", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description(self) -> str:\n return pulumi.get(self, \"description\")", "def description():", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def description(self) -> str:\n raise NotImplementedError", "def get_meta_str(self):\n s = []\n t = \"%-32s: %s\"\n s.append(t % (\"Edition\", self._meta.get(\"edition\", \"---\")))\n s.append(t % (\"Master-table\", self._meta.get(\"master\", \"---\")))\n cc = self._meta.get(\"center\", \"---\")\n cs = self._meta.get(\"subcenter\", \"---\")\n if self._tables is not None:\n cc = self._tables.lookup_codeflag(1033, cc)\n cs = self._tables.lookup_codeflag(1034, cs)\n s.append(t % (\"Centre\", cc))\n s.append(t % (\"Sub-Centre\", cs))\n s.append(t % (\"Update sequence number\", self._meta.get(\"update\", \"---\")))\n s.append(t % (\"Type of data\", (\"observed\" if self._meta.get(\"obs\", 0) else \"other\")))\n dc = self._meta.get(\"cat\", \"---\")\n if self._tables is not None:\n dc = self._tables.lookup_common(dc)\n s.append(t % (\"Data category\", dc))\n s.append(t % (\"International data sub-category\", self._meta.get(\"cat_int\", \"---\")))\n s.append(t % (\"Local data sub-category\", self._meta.get(\"cat_loc\", \"---\")))\n s.append(t % (\"Version number of master table\", self._meta.get(\"mver\", \"---\")))\n s.append(t % (\"Version number of local table\", self._meta.get(\"lver\", \"---\")))\n s.append(t % (\"Most typical time\", self._meta.get(\"datetime\", \"---\")))\n s.append(t % (\"Optional section present\", (\"yes\" if self._meta.get(\"sect2\", False) else \"no\")))\n s.append(t % (\"Compression\", (\"yes\" if self._meta.get(\"comp\", False) else \"no\")))\n s.append(t % (\"Number of data subsets\", self._meta.get(\"subsets\", \"---\")))\n return \"\\n\".join(s)", "def test_model_metadata_values(self):\n self.assertEqual(self.meta['author'], 'Giang Nguyen, Stefan Dlugolinsky')\n self.assertEqual(self.meta['author-email'], '[email protected], [email protected]')", "def get_metadata(self):\n meta_data = {}\n if self.beam_energy is not None:\n meta_data['beam_energy'] = self.beam_energy\n if self.collection_angle is not None:\n meta_data['collection_angle'] = self.collection_angle\n return meta_data", "def describe(self):\n\n ret = []\n ret.append(\"Functional ID: %s\" % self._number)\n ret.append(\"Functional Name: %s\" % self._xc_func_name)\n ret.append(\"Attributes:\")\n ret.append(\" Name: %s\" % self._name)\n ret.append(\" Kind: %d\" % self._kind)\n ret.append(\" Family: %d\" % self._family)\n ret.append(\"Citations:\")\n for x in self._refs:\n ret.append(\" \" + x)\n\n return \"\\n\".join(ret)", "def testDescription(self):\n dis_meta = DiseaseMeta()\n\n self.util.stringTypeTest(self, dis_meta, \"description\")\n\n self.util.stringPropertyTest(self, dis_meta, \"description\")", "def get_metadata(self):\n metadata = NeuroscopeRecordingInterface.get_ecephys_metadata(\n xml_file_path=get_xml_file_path(data_file_path=self.source_data['folder_path'])\n )\n metadata['Ecephys'].update(\n ElectricalSeries=dict(\n name='ElectricalSeries',\n description=\"Raw acquisition traces.\"\n )\n )\n\n return metadata", "def get_metadata(self):\n metadata = NeuroscopeRecordingInterface.get_ecephys_metadata(\n xml_file_path=get_xml_file_path(data_file_path=self.source_data['file_path'])\n )\n metadata['Ecephys'].update(\n ElectricalSeries=dict(\n name='ElectricalSeries',\n description=\"Raw acquisition traces.\"\n )\n )\n\n return metadata", "def meta(self, name=None, text_key=None, axis_edit=None):\n if not name:\n return self._meta\n else:\n return self.describe(name, text_key=text_key, axis_edit=axis_edit)", "def reportProperties():", "def get_description(self):\n pass", "def description(self) -> str:\n pass", "def __str__(self):\n return self._metadata.__str__()", "def metadata_reporter(self):\n logging.info('Creating summary report')\n header = '{}\\n'.format(','.join(self.headers))\n # Create a string to store all the results\n data = str()\n for sample in self.metadata:\n # Add the value of the appropriate attribute to the results string\n data += GenObject.returnattr(sample, 'name')\n # SampleName\n data += GenObject.returnattr(sample.run, 'SamplePlate')\n # Genus\n data += GenObject.returnattr(sample.general, 'closestrefseqgenus')\n # SamplePurity\n data += GenObject.returnattr(sample.confindr, 'num_contaminated_snvs')\n # N50\n n50 = GenObject.returnattr(sample.quast, 'N50',\n number=True)\n if n50 != '-,':\n data += n50\n else:\n data += '0,'\n # NumContigs\n data += GenObject.returnattr(sample.quast, 'num_contigs',\n number=True)\n # TotalLength\n data += GenObject.returnattr(sample.quast, 'Total_length',\n number=True)\n # MeanInsertSize\n data += GenObject.returnattr(sample.quast, 'mean_insert',\n number=True)\n # InsertSizeSTD\n data += GenObject.returnattr(sample.quast, 'std_insert',\n number=True)\n # AverageCoverageDepth\n data += GenObject.returnattr(sample.qualimap, 'MeanCoveragedata',\n number=True)\n # CoverageDepthSTD\n data += GenObject.returnattr(sample.qualimap, 'StdCoveragedata',\n number=True)\n # PercentGC\n data += GenObject.returnattr(sample.quast, 'GC',\n number=True)\n # MASH_ReferenceGenome\n data += GenObject.returnattr(sample.mash, 'closestrefseq')\n # MASH_NumMatchingHashes\n data += GenObject.returnattr(sample.mash, 'nummatches')\n # 16S_result\n data += GenObject.returnattr(sample.sixteens_full, 'sixteens_match')\n # 16S PercentID\n data += GenObject.returnattr(sample.sixteens_full, 'percent_id')\n # CoreGenesPresent\n data += GenObject.returnattr(sample.gdcs, 'coreresults')\n # rMLST_Result\n try:\n # If the number of matches to the closest reference profile is 53, return the profile number\n if sample.rmlst.matches == 53:\n if type(sample.rmlst.sequencetype) is list:\n rmlst_seq_type = ';'.join(sorted(sample.rmlst.sequencetype)).rstrip(';') + ','\n else:\n rmlst_seq_type = GenObject.returnattr(sample.rmlst, 'sequencetype')\n rmlst_seq_type = rmlst_seq_type if rmlst_seq_type != 'ND,' else 'new,'\n data += rmlst_seq_type\n else:\n # Otherwise the profile is set to new\n data += 'new,'\n except AttributeError:\n data += 'new,'\n # MLST_Result\n try:\n if sample.mlst.matches == 7:\n if type(sample.mlst.sequencetype) is list:\n mlst_seq_type = ';'.join(sorted(sample.mlst.sequencetype)).rstrip(';') + ','\n else:\n mlst_seq_type = GenObject.returnattr(sample.mlst, 'sequencetype')\n mlst_seq_type = mlst_seq_type if mlst_seq_type != 'ND,' else 'new,'\n data += mlst_seq_type\n else:\n data += 'new,'\n except AttributeError:\n data += 'new,'\n # MLST_gene_X_alleles\n try:\n # Create a set of all the genes present in the results (gene name split from allele)\n gene_set = {gene.split('_')[0] for gene in sample.mlst.combined_metadata_results}\n for gene in sorted(gene_set):\n allele_list = list()\n # Determine all the alleles that are present for each gene\n for allele in sample.mlst.combined_metadata_results:\n if gene in allele:\n allele_list.append(allele.replace(' ', '_'))\n # If there is more than one allele in the sample, add both to the string separated by a ';'\n if len(allele_list) > 1:\n data += '{},'.format(';'.join(allele_list))\n # Otherwise add the only allele\n else:\n data += allele_list[0] + ','\n # If there are fewer than seven matching alleles, add a ND for each missing result\n if len(gene_set) < 7:\n data += (7 - len(gene_set)) * 'ND,'\n except AttributeError:\n # data += '-,-,-,-,-,-,-,'\n data += 'ND,ND,ND,ND,ND,ND,ND,'\n # E_coli_Serotype\n try:\n # If no O-type was found, set the output to be O-untypeable\n if ';'.join(sample.ectyper.o_type) == '-':\n otype = 'O-untypeable'\n else:\n otype = sample.ectyper.o_type\n # Same as above for the H-type\n if ';'.join(sample.ectyper.h_type) == '-':\n htype = 'H-untypeable'\n\n else:\n htype = sample.ectyper.h_type\n serotype = '{otype}:{htype},'.format(otype=otype,\n htype=htype)\n # Add the serotype to the data string unless neither O-type not H-type were found; add ND instead\n data += serotype if serotype != 'O-untypeable:H-untypeable,' else 'ND,'\n except AttributeError:\n data += 'ND,'\n # SISTR_serovar_antigen\n data += GenObject.returnattr(sample.sistr, 'serovar_antigen').rstrip(';')\n # SISTR_serovar_cgMLST\n data += GenObject.returnattr(sample.sistr, 'serovar_cgmlst')\n # SISTR_serogroup\n data += GenObject.returnattr(sample.sistr, 'serogroup')\n # SISTR_h1\n data += GenObject.returnattr(sample.sistr, 'h1').rstrip(';')\n # SISTR_h2\n data += GenObject.returnattr(sample.sistr, 'h2').rstrip(';')\n # SISTR_serovar\n data += GenObject.returnattr(sample.sistr, 'serovar')\n # GeneSeekr_Profile\n try:\n if sample.genesippr.report_output:\n data += ';'.join(sample.genesippr.report_output) + ','\n else:\n data += 'ND,'\n except AttributeError:\n data += 'ND,'\n # Vtyper_Profile\n data += GenObject.returnattr(sample.verotoxin, 'verotoxin_subtypes_set')\n # AMR_Profile and resistant/sensitive status\n if sample.resfinder_assembled.pipelineresults:\n # Profile\n for resistance, resistance_set in sorted(sample.resfinder_assembled.pipelineresults.items()):\n data += '{res}({r_set});'.format(res=resistance.replace(',', ';'),\n r_set=';'.join(sorted(list(resistance_set))))\n data += ','\n # Resistant/Sensitive\n data += 'Resistant,'\n else:\n # Profile\n data += 'ND,'\n # Resistant/Sensitive\n data += 'Sensitive,'\n # Plasmid Result'\n if sample.mobrecon.pipelineresults:\n for plasmid, details in sorted(sample.mobrecon.pipelineresults.items()):\n data += '{plasmid}({details});'.format(plasmid=plasmid,\n details=details)\n data += ','\n else:\n data += 'ND,'\n # TotalPredictedGenes\n data += GenObject.returnattr(sample.prodigal, 'predictedgenestotal',\n number=True)\n # PredictedGenesOver3000bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover3000bp',\n number=True)\n # PredictedGenesOver1000bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover1000bp',\n number=True)\n # PredictedGenesOver500bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesover500bp',\n number=True)\n # PredictedGenesUnder500bp\n data += GenObject.returnattr(sample.prodigal, 'predictedgenesunder500bp',\n number=True)\n # AssemblyDate\n data += datetime.now().strftime('%Y-%m-%d') + ','\n # PipelineVersion\n data += self.commit + ','\n # Name of the database used in the analyses\n data += os.path.split(self.reffilepath)[-1] + ','\n # Database download date\n data += self.download_date\n # Append a new line to the end of the results for this sample\n data += '\\n'\n # Replace any NA values with ND\n cleandata = data.replace('NA', 'ND')\n with open(os.path.join(self.reportpath, 'combinedMetadata.csv'), 'w') as metadatareport:\n metadatareport.write(header)\n metadatareport.write(cleandata)", "def properties(self) -> Optional[str]:\n return pulumi.get(self, \"properties\")", "def metadata(self):\n return {\n \"namespace\": self.namespace,\n \"short_name\": f\"{self.namespace}_{self._dataset_metadata['DatasetCode']}\",\n \"name\": f\"{self._dataset_metadata['DatasetName']} - FAO ({self.publication_year})\",\n \"description\": self._dataset_metadata[\"DatasetDescription\"],\n \"source_name\": \"Food and Agriculture Organization of the United Nations\",\n \"publication_year\": int(self.publication_year),\n \"publication_date\": self._dataset_metadata[\"DateUpdate\"],\n \"date_accessed\": str(dt.date.today()),\n \"url\": self.url,\n \"source_data_url\": self.source_data_url,\n \"file_extension\": \"zip\",\n }" ]
[ "0.64961946", "0.64541817", "0.6290093", "0.6145428", "0.6084069", "0.6051594", "0.60471356", "0.5994781", "0.5933389", "0.59084606", "0.58996195", "0.5870387", "0.5848383", "0.5837634", "0.5826372", "0.5826372", "0.5818724", "0.58181715", "0.5814273", "0.5807516", "0.58013815", "0.57757455", "0.576439", "0.57614464", "0.57614464", "0.57312673", "0.5714556", "0.5714087", "0.56985253", "0.56985253", "0.56906027", "0.567607", "0.56658524", "0.5664016", "0.5653505", "0.56488395", "0.5630287", "0.5630278", "0.56219167", "0.56204927", "0.56204927", "0.5615076", "0.56134796", "0.56134796", "0.5605538", "0.5592627", "0.55762833", "0.5575389", "0.55743563", "0.55727476", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.557214", "0.5558656", "0.55581427", "0.55581427", "0.55581427", "0.55579007", "0.5557472", "0.555165", "0.55477333", "0.55310714", "0.55232775", "0.55124277", "0.55087674", "0.5502045", "0.5499669", "0.5498878", "0.54924434", "0.54868865", "0.5483703", "0.5479821" ]
0.6462961
1
Data regarding 3rd party partner integration
def partners_data(self) -> pulumi.Output[Optional['outputs.SecurityAssessmentPartnerDataResponse']]: return pulumi.get(self, "partners_data")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def info():\n if g.party_id is None:\n # No party is configured for the current site.\n abort(404)\n\n party = party_service.get_party(g.party_id)\n\n return {\n 'party': party,\n }", "def get(self,\n partner_id):\n abort(501)", "def get(self,\n partner_id):\n abort(501)", "def getPremiumInfo(self, authenticationToken):\r\n pass", "def partner_data(self) -> Optional['outputs.SecurityAssessmentMetadataPartnerDataResponse']:\n return pulumi.get(self, \"partner_data\")", "def investing(site):\n url = (\n \"https://www.widgets.investing.com/live-currency-cross-rates?\"\n + \"theme=darkTheme&cols=last&pairs=3,2111,2124,2126,650,962711,69,68\"\n )\n headers = {\n \"href\": (\n \"https://www.investing.com?utm_source=WMT&amp;utm_medium=referral&amp;\"\n + \"utm_campaign=LIVE_CURRENCY_X_RATES&amp;utm_content=Footer%20Link\"\n ),\n \"target\": \"_blank\",\n \"rel\": \"nofollow\",\n }\n try:\n session = requests.Session()\n session.headers = headers\n cfscrape_requests = cfscrape.create_scraper(sess=session)\n ret = cfscrape_requests.get(url, headers=headers, timeout=(15, 15)).text\n lines = ret.split('target=\"_blank\"')\n lines = [i.replace(\" \", \"\").replace(\",\", \"\") for i in lines]\n lines = [i for i in lines if \"askpid\" in i]\n lines = [i.split(\"hiddenFour\")[0] for i in lines]\n data = {\n item.split(\"</a>\")[0].replace(\">\", \"\"): item.split('last\">')[1]\n for item in lines\n }\n\n data = {k.replace(\"/\", \":\"): v.split(\"</div>\")[0] for k, v in data.items()}\n data = {k: float(v) for k, v in data.items()}\n data[\"USD:XAG\"] = 1 / data.pop(\"XAG:USD\")\n data[\"USD:XAU\"] = 1 / data.pop(\"XAU:USD\")\n data = refine_data(data)\n print(site, data)\n race_write(f\"{site}_forex.txt\", json_dumps(data))\n except:\n print(f\"{site} failed to load\")", "def get_data(ticket_info, logger):\n\n # This string will be returned and added as a comment to the SecureChange ticket\n return_str = \"\"\n\n # Log integration starting\n logger.info(\"Running '{}' integration\".format(__name__))\n\n try:\n\n for req in ticket_info['Requests']:\n for src in req['Sources']:\n if src['Private'] and src['Cidr'] == '32':\n return_str = '{}\\n\\n{}\\n\\n{}'.format(return_str, device_query(src['Ip'], fmt='text'), event_query(src['Ip'], fmt='text'))\n for dst in req['Destinations']:\n if dst['Private'] and dst['Cidr'] == '32':\n return_str = '{}\\n\\n{}\\n\\n{}'.format(return_str, device_query(dst['Ip'], fmt='text'), event_query(dst['Ip'], fmt='text'))\n\n\n except Exception as e:\n\n # Log the error and return an empty string\n logger.error(\"Error: {}\".format(e))\n return None\n\n # Log integration completing\n logger.info(\"{} integration completed\".format(__name__))\n\n # Return comment\n return return_str", "def get_oauth_data():", "def get_personal_info(self):\n self.get(\"INFO\",\"GetPersonalInfo\")\n response = self.send()\n return response", "def datapack_details(request):\n print 'get datapack details'\n\n context = request['context']\n print context\n try:\n telephone_number = first_entity_value(request['entities'], 'phone_number')\n with open(os.path.join(sys.path[0], \"app/wit/static/users.json\"), \"r\") as data_file:\n data = json.load(data_file)\n network_details = data[telephone_number]['data_details']\n print network_details\n\n\n\n reply = \"Our Initial Investigation shows that you're are currently using \" + network_details['network_services_available'] + \" and have subscribed for \" + network_details['network_services_subscribed'] + \".\"\n if network_details['megabytes_available'] == 0:\n reply += \" You have exhausted your datapack. Change your network settings to use pay2go plan or recharge now with available datapacks. Please check http://www.airtel.in/Airtel3G/tariff.html\"\n elif network_details['network_services_available'] != network_details['network_services_subscribed']:\n reply += \" Your subscribed datapack settings does not match with services available. Please change your network settings\"\n\n except:\n telephone_number = None\n reply = \"Your number is not subscribed with Airtel. Please contact your network operator for your query\"\n\n context['datapack'] = reply\n\n return context", "def get_account_details(self):\n pass", "def awaiting_payment(self):", "def get_infos(self):\n infos = dict()\n infos[\"dataset\"] = self.dataset_name\n infos[\"task\"] = \"separate_noisy\"\n infos[\"licenses\"] = [librispeech_license, tac_license]\n return infos", "def info(self):\n\n self.call(method='getInvoice', args=[self.reference_no, self.with_vat])", "def _production_partner(self, cr, uid, ids, prop, unknow_none, context=None):\n result = {}\n for prod in self.browse(cr, uid, ids, context=context):\n result[prod.id] = {\n 'partner_id':'',\n 'partner_rag_soc':'',\n }\n #import pdb;pdb.set_trace()\n if prod.origin:\n # ha trovato un dato nelle origini verifica che esista un ordine cliente e ne legge l'informazione\n cerca = [('name','=',prod.origin)]\n sale_ids = self.pool.get('sale.order').search(cr,uid,cerca)\n if sale_ids:\n riga_sale = self.pool.get('sale.order').browse(cr,uid,sale_ids)[0]\n result[prod.id]['partner_ref'] = riga_sale.partner_id.ref\n result[prod.id]['partner_rag_soc'] = riga_sale.partner_id.name\n return result", "def request_info(self):\r\n if self.use_http():\r\n self.enqueue_http_request(\"money/info\", {}, \"info\")\r\n else:\r\n self.send_signed_call(\"private/info\", {}, \"info\")", "def partner_get(self):\n try:\n document = mongo_module.mongo_find(self.partner_id, single=True)\n output = 'partner' if document else 'No data match'\n code = 200 if document else 204\n except Exception as err:\n document = None\n output = str(err)\n code = 400\n return output, document, code", "def get_profile_data(self, transceiver, placement):", "def getfundamentals(self, results):\n self.log(\"Retrieving fundamental phone information\")\n self.log(\"Phone serial number\")\n results['uniqueserial']=sha.new(self.get_esn()).hexdigest()\n results['groups']=self.get_groups()\n results['ringtone-index']=self.get_ringtone_index()\n results['wallpaper-index']=self.get_wallpaper_index()\n return results", "def _get_information(self):\n pass", "def getInfo():", "def onchange_partner_id(self\n ):\n if self._context is None:\n context = {}\n acc_part_brw = False\n acc_id = False\n rp_obj = self.env['res.partner']\n wh_line_obj = self.env['account.wh.src.line']\n\n if self.partner_id:\n #partner = rp_obj.browse(self.partner_id)\n acc_part_brw = rp_obj._find_accounting_partner(self.partner_id)\n if self.type and self.type in ('out_invoice', 'out_refund'):\n acc_id = acc_part_brw.property_account_receivable_id \\\n and acc_part_brw.property_account_receivable_id.id or False\n else:\n acc_id = acc_part_brw.property_account_payable_id \\\n and acc_part_brw.property_account_payable_id.id or False\n\n # part_brw = self.ids and rp_obj._find_accounting_partner(self.browse(\n # self, self.ids[0]).partner_id)\n wh_lines = self.ids and wh_line_obj.search(\n [('wh_id', '=', self.ids[0])])\n if not self.partner_id:\n if wh_lines:\n wh_line_obj.unlink(wh_lines)\n wh_lines = []\n if self.partner_id and acc_part_brw and self.partner_id.id != acc_part_brw.id:\n if wh_lines:\n wh_line_obj.unlink(wh_lines)\n wh_lines = []\n\n return {'value': {\n 'line_ids': wh_lines,\n 'account_id': acc_id,\n }\n }", "def get_details(self):", "def test_get_order_buyer_info(self):\n pass", "def party_id(self):\n pass", "def JointAccount(self):\n joint_accounts = []\n if self.IsGeneralPartner():\n for contact in self.__contact.Party().Contacts(): \n if contact.AdditionalInfo().RegGeneralPartner(): \n joint_accounts.append(contact)\n else:\n FRegulatoryLogger.WARN(logger, \"<%s> is not a General Partner. Hence JointAccount is None\"%self.__contact.Fullname())\n joint_accounts = None\n return joint_accounts", "def partners_data(self) -> Optional[pulumi.Input['SecurityAssessmentPartnerDataArgs']]:\n return pulumi.get(self, \"partners_data\")", "def get_data():\n pass", "def _get_config_data(self, cr, uid):\n\n model_conf = self.pool.get('customer.support.settings')\n args = [('selected', '=', True)] \n ids = model_conf.search(cr, uid, args)\n config = model_conf.browse(cr, uid, ids[0])\n\n return {\n 'tor_api_key': config.tor_api_key,\n 'tor_domain': config.tor_domain,\n 'company': config.company\n }", "def fhir_enquiry(request, context_override={}):\n\n state = get_state(CLIENT_ID,AUTH_URL)\n code = get_code(CLIENT_ID,AUTH_URL)\n\n # set default context\n context = {}\n context['template'] = \"result.html\"\n context['get_fmt'] = \"json\"\n context['display'] = \"Me\"\n context['code'] = code\n context['state'] = state\n context['ask'] = \"/api/v1/me?_format=json\"\n context['url'] = settings.OAUTH_TEST_INFO['BASE']\n context['headers'] = {'content-type': 'application/x-www-form-urlencoded',\n 'Authorization': \"Bearer \"+ get_code(CLIENT_ID, AUTH_URL)},\n\n # add / overwrite anything in context_override\n context = update_dict(context, context_override)\n\n data = {'code': code,\n 'grant_type': 'authorization_code',\n 'key': 'access_token',\n #'key': 'refresh_token',\n 'access_token': get_access(state),\n 'refresh_token': get_refresh(state),\n 'redirect_uri': REDIRECT_URI}\n\n if settings.DEBUG:\n print(\"Context after update:\", context)\n print(\"Data:\", data)\n\n print(\"SERVICE:\", SERVICE )\n\n # Get access_token\n headers = {}\n print('Context Headers:', dict(context['headers'][0]))\n #headers = {'headers': update_dict(headers, context_override=dict(context['headers'][0]))}\n headers = update_dict(headers, context_override=dict(context['headers'][0]))\n print(\"Headers:\", headers)\n\n kw_to_send = {'data': data, 'headers': headers}\n\n #session = SERVICE.get_auth_session(method=\"POST\",**kw_to_send)\n #session = SERVICE.get_session(get_access(state))\n #session = SERVICE.get_raw_access_token(method=\"POST\", **kw_to_send)\n session = SERVICE.get_raw_access_token(data=data)\n\n #response = SERVICE.get_access_token(method=\"POST\")\n # response = SERVICE.get_auth_session(data=data)\n print(\"Auth Session\", session)\n #response = SERVICE.get_raw_access_token(data=data, **headers)\n\n get_text = session.json()\n\n if 'access_token' in get_text:\n print(\"got an access token\")\n access = save_tokens(state,\n get_text['access_token'],\n get_text['refresh_token'])\n\n print(\"RESPONSE:\", get_text)\n # RESPONSE: {\"expires_in\": 36000,\n # \"access_token\": \"h1vY5eDu69JKfV4nPpdu8xEan63hKl\",\n # \"scope\": \"patient/*.read write_consent\",\n # \"token_type\": \"Bearer\",\n # \"refresh_token\": \"6HZnSwhfsGvfr9Aguw5n0e5CoGr8CQ\"}\n\n\n sesn = SERVICE.get_session(get_text['access_token'])\n print(\"SESSION:\", sesn)\n\n r = sesn.get(context['url'] + context['ask'])\n\n if settings.DEBUG:\n print(\"R:\", r.content)\n\n return r", "def user_data(self, token, *args, **kwargs):\n url = \"https://api.intra.42.fr/v2/me\"\n auth_header = {\"Authorization\": \"Bearer %s\" % token}\n try:\n return self.get_json(url, headers=auth_header)\n except ValueError:\n return None", "def get_bill_details(request):\n\n print request\n\n context = request['context']\n print context\n try:\n telephone_number = first_entity_value(request['entities'], 'phone_number')\n with open(os.path.join(sys.path[0], \"app/wit/static/users.json\"), \"r\") as data_file:\n data = json.load(data_file)\n customer_billing = data[telephone_number]['last_month_billing']\n print customer_billing\n\n customer_type = data[telephone_number]['type_customer']\n if customer_type == 'postpaid':\n\n reply = \"Our Initial Investigation shows that you're a \" + data[telephone_number]['type_customer'] + \" Customer and currently using \" + data[telephone_number]['plan_details'] + \" plan type.\"\n if customer_billing['roaming'] == 'True':\n reply += \"You had used your cellphone while on roaming for which you were charged extra.\"\n elif customer_billing['data_exhaust'] == 'True':\n reply += \"You had used your data network after your allocated limit was exhausted. You were charged for these services\"\n elif customer_billing['subscribed'] == 'True':\n reply += \"You had subscribed to some promotional services for which you were charged in extra.\"\n else:\n reply = \"Our Initial Investigation shows that you're a \" + data[telephone_number]['type_customer'] + \". We believe that this might be a mistake from our side and would like you to speak to our customer care executives separately.\"\n\n\n except:\n telephone_number = None\n reply = \"Your number is not subscribed with Airtel. Please contact your network operator for your query\"\n\n\n print reply\n\n context['bill_details'] = reply\n\n return context", "def inspect_incoming(self, cr, uid, phone_number, context=None):\n # Retrieve the default URL for the web client, use to:\n # - default URL when no action found\n # - Compose the URL to return to the customer\n ir_config_obj = self.pool.get('ir.config_parameter')\n url = ir_config_obj.get_param(cr, 1, 'web.base.url', 'http://localhost:8069')\n\n # Retrieve the context for the user\n user_obj = self.pool.get('res.users')\n user_context = user_obj.context_get(cr, uid, context=context)\n if context is not None:\n user_context.update(context)\n\n # Search address and partner for this phone number\n (partner_id, address_id) = self.find_partner_from_phone_number(cr, uid, phone_number, context=user_context)\n if not partner_id and not address_id:\n return url\n\n # Check if default action is define from the company\n company_id = user_obj.browse(cr, uid, uid, context=user_context).company_id\n if not company_id:\n return url\n\n company = self.pool.get('res.company').browse(cr, uid, company_id.id, context=user_context)\n if not company.cti_action_id:\n # No action found on this company\n return url\n\n # Check custom for this action\n current_act = self.browse(cr, uid, company.cti_action_id.id, context=user_context)\n if current_act.create_entry:\n entry_id = self.pool.get(current_act.model_id.model).create(cr, uid, {'partner_id': partner_id}, context=user_context)\n cr.commit()\n else:\n entry_id = partner_id or address_id\n\n return self._format_url_from_action(cr, uid, url, company.cti_action_id.model_id.model, company.cti_action_id.act_window_id.id, entry_id, context=user_context)", "def get_info(self, charger):\n data = {\n \"device_id\": self.uuid,\n \"cmd\": \"get_info\",\n \"token\": charger.token(),\n \"account_token\": self.api_token\n }\n headers = {\n \"Content-Type\": \"application/json\"\n }\n\n response = requests.post(\"{}/box_api_secure\".format(self.BASE_URL),\n data=json.dumps(data),\n headers=headers)\n response_json = response.json()\n return response_json", "def got_info(self, cloud_obj):", "def input_payment_details(self):\n pass", "def __init__(__self__, *,\n partner_name: str,\n secret: str):\n pulumi.set(__self__, \"partner_name\", partner_name)\n pulumi.set(__self__, \"secret\", secret)", "def rpc_info():", "def submitting_party(self):\n party = {\n 'businessName': self.name,\n 'address': address_utils.get_address_from_db2(self.legacy_address)\n }\n if self.phone_number:\n party['phoneNumber'] = self.phone_number\n return party", "def info(self):\n if self.integration is None:\n return None\n return self.integration.info", "def gen_CRM(call_text, response_text):\n pass", "def device_info(self) -> Dict[str, Any]:\n return {\n 'name': 'Boiler',\n 'identifiers': {\n (DOMAIN, self.toon.agreement.id, 'boiler'),\n },\n 'via_device': (DOMAIN, self.toon.agreement.id, 'boiler_module'),\n }", "def scrapeFollowingFromAnAccount():\n global api", "def test_get_pay_in_details(self):\n pass", "def get_data(self):", "def getPayment(self):\n pass", "def partner_name(self) -> str:\n return pulumi.get(self, \"partner_name\")", "def partner_name(self) -> str:\n return pulumi.get(self, \"partner_name\")", "def send_counterparty(self) -> None:\n object_ = self.objects[0]\n ticket_text = ''\n if 'сб' in object_.counterparty_name.lower() and self.keyword == 'closing':\n # order_id = sberinkas.main(\n # object_.object_SAP_code,\n # object_.object_address,\n # object_.lat,\n # object_.lon\n # )\n # ticket_text = f\"<br>Номер заявки на портале инкассация - {order_id}.\"\n pass\n\n body = '<p>Добрый день!<br><br>' \\\n f'Прошу принять в работу письмо на {self.letter_text}<br>' \\\n f'Скан подписанного письма вышлю позднее.{ticket_text}'\n if 'сб' in object_.counterparty_name.lower():\n self.send_sber_manager_service(body)\n else:\n self.sendmail(\n self.outlook,\n self.to,\n \"\",\n self.letter_name,\n body,\n self.attachment,\n 2\n )", "def get_another_number(request):\n \n user = request.user \n phone_number = generate_phn_number()\n \n user_email = user.username\n \n try: \n # Create stripe account\n stripe_customer = stripe.Customer.create(\n email = user_email\n )\n \n # Set a default card for account\n s_card = stripe.Customer.create_source(\n stripe_customer.id,\n source=\"tok_amex\",\n ) \n \n plan_id = \"price_1JsHMxSDkRo5FXlkOsq2QHSV\"\n\n # if data[\"subscription_plan\"]== \"Globalnet Silver\":\n # plan_id = \"price_1JsHOJSDkRo5FXlkQmfEQzhN\"\n \n # if data[\"subscription_plan\"]== \"Globalnet Gold\":\n # plan_id = \"price_1JsHPFSDkRo5FXlk9VSl41rV\"\n\n # Create a default subscription for customer \n subscription = stripe.Subscription.create(\n customer = stripe_customer.id,\n items = [{'plan':plan_id}]\n )\n \n \n start_date = datetime.datetime.now().strftime(\"%c\")\n end_date = (datetime.datetime.now() + datetime.timedelta(30)).strftime(\"%x\")\n\n subscription_plan = SubscriptionPlan.objects.get(subscription_plan_name=\"Globalnet Bronze\")\n \n # Create customer data\n customer_data = SecondaryNumber.objects.create(\n user = user,\n phn_number = phone_number,\n subscription_plan = subscription_plan,\n stripe_id = stripe_customer.id,\n start_date = start_date,\n end_date = end_date,\n subscription_id = subscription.id\n \n )\n \n # Entry Subscription data\n SubscriptionData.objects.create(\n subscriber = phone_number,\n subscription = subscription_plan.subscription_plan_name,\n subscription_start = start_date,\n subscription_end = end_date \n \n )\n \n \n serializer= SeconderyNumberSerializer(customer_data,many=False)\n return Response(serializer.data)\n\n except Exception as e:\n message = {\"detail\":str(e)}\n print(e)\n return Response(message)", "def reqData(self):\r\n #self.reqGlobalCancel()\r\n #self.add_historical(\"Stock('TSLA', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('IBM', 'SMART', 'USD')\")\r\n #self.add_historical(\"Stock('MSFT', 'SMART', 'USD')\")\r\n self.add_historical(\"Stock('FB', 'SMART', 'USD')\")", "def get_info(self):\n pass", "def get_info(self):\n pass", "def local_bonds_prices():\n url1 = \"https://api.invertironline.com/token\"\n\n data = {\n \"username\": usuario,\n \"password\": password,\n \"grant_type\": \"password\" \n }\n response = requests.post(url1, data=data)\n if response.status_code == 200:\n content = response.text\n access_key = token_key(content)\n\n url2 = f'https://api.invertironline.com/api/v2/Cotizaciones/Bonos/Merval/argentina'\n datos = requests.get(url2, headers={\n 'Authorization': 'Bearer '+access_key\n })\n datos = json.loads(datos.text)\n datos = datos['titulos']\n datos = clean_assets(datos)\n return datos", "def get_info(user):\n from Game.models import Ownership\n response = {}\n wallet = Wallet.objects.get(user=user)\n response['liquid'] = wallet.liquid_with_loans\n value_wallet = wallet.liquid_with_loans\n ownerships = Ownership.objects.filter(wallet=wallet, quantity__gt=0)\n assets = []\n asset_communication = ACommunication(settings.API_URL)\n for o in ownerships:\n asset = asset_communication.get_asset_quote(o.asset)\n asset.quantity = o.quantity\n value_wallet += o.quantity * asset.sell\n assets.append(asset)\n response['assets'] = assets\n response['value_wallet'] = value_wallet\n response['error'] = False\n return response", "def device_info(self) -> Dict[str, Any]:\n return {\n 'name': 'Boiler Module',\n 'manufacturer': 'Eneco',\n 'identifiers': {\n (DOMAIN, self.toon.agreement.id, 'boiler_module'),\n },\n 'via_device': (DOMAIN, self.toon.agreement.id),\n }", "def getIntervenciones():", "def get_tenants(self):", "def _generate_cybersource_sa_payload(*, order, receipt_url, cancel_url, ip_address):\n # http://apps.cybersource.com/library/documentation/dev_guides/Secure_Acceptance_WM/Secure_Acceptance_WM.pdf\n # Section: API Fields\n\n # NOTE: be careful about max length here, many (all?) string fields have a max\n # length of 255. At the moment none of these fields should go over that, due to database\n # constraints or other reasons\n\n coupon_redemption = CouponRedemption.objects.filter(order=order).first()\n coupon_version = (\n coupon_redemption.coupon_version if coupon_redemption is not None else None\n )\n\n line_items = {}\n total = 0\n for i, line in enumerate(order.lines.all()):\n product_version = line.product_version\n unit_price = get_product_version_price_with_discount(\n coupon_version=coupon_version, product_version=product_version\n )\n line_items[f\"item_{i}_code\"] = str(product_version.product.content_type)\n line_items[f\"item_{i}_name\"] = str(product_version.description)[:254]\n line_items[f\"item_{i}_quantity\"] = line.quantity\n line_items[f\"item_{i}_sku\"] = product_version.product.content_object.id\n line_items[f\"item_{i}_tax_amount\"] = \"0\"\n line_items[f\"item_{i}_unit_price\"] = str(unit_price)\n\n total += unit_price\n\n # At the moment there should only be one line\n product_version = order.lines.first().product_version\n product = product_version.product\n content_object = product.content_object\n readable_id = get_readable_id(content_object)\n\n merchant_fields = {\n \"merchant_defined_data1\": str(product.content_type),\n \"merchant_defined_data2\": readable_id,\n \"merchant_defined_data3\": \"1\",\n }\n\n if coupon_version is not None:\n merchant_fields[\"merchant_defined_data4\"] = coupon_version.coupon.coupon_code\n merchant_fields[\"merchant_defined_data5\"] = ( # company name\n coupon_version.payment_version.company.name\n if coupon_version.payment_version.company\n else \"\"\n )\n merchant_fields[\"merchant_defined_data6\"] = (\n coupon_version.payment_version.payment_transaction or \"\"\n )\n merchant_fields[\"merchant_defined_data7\"] = (\n coupon_version.payment_version.payment_type or \"\"\n )\n\n return {\n \"access_key\": settings.CYBERSOURCE_ACCESS_KEY,\n \"amount\": str(total),\n \"consumer_id\": order.purchaser.username,\n \"currency\": \"USD\",\n \"locale\": \"en-us\",\n **line_items,\n \"line_item_count\": order.lines.count(),\n **merchant_fields,\n \"reference_number\": order.reference_number,\n \"profile_id\": settings.CYBERSOURCE_PROFILE_ID,\n \"signed_date_time\": now_in_utc().strftime(ISO_8601_FORMAT),\n \"override_custom_receipt_page\": receipt_url,\n \"override_custom_cancel_page\": cancel_url,\n \"transaction_type\": \"sale\",\n \"transaction_uuid\": uuid.uuid4().hex,\n \"unsigned_field_names\": \"\",\n \"customer_ip_address\": ip_address if ip_address else None,\n }", "def post(self,\n partner_id):\n abort(501)", "def post(self,\n partner_id):\n abort(501)", "def parse_whoxy_results(self,whoxy_data,reverse=False):\n results = {}\n results['domain'] = whoxy_data['domain_name']\n # Check for the registrar information\n if \"domain_registrar\" in whoxy_data:\n results['registrar'] = whoxy_data['domain_registrar']['registrar_name']\n elif \"registrar\" in whoxy_data:\n results['registrar'] = whoxy_data['registrar_name']\n else:\n results['registrar'] = \"None Listed\"\n # Check for an expiration date\n if \"expiry_date\" in whoxy_data:\n results['expiry_date'] = whoxy_data['expiry_date']\n else:\n results['expiry_date'] = \"None Listed\"\n # Check for a company name\n if \"company_name\" in whoxy_data:\n results['organization'] = whoxy_data['registrant_contact']['company_name']\n else:\n results['organization'] = \"None Listed\"\n # Check for a registrant's name\n if \"full_name\" in whoxy_data:\n results['registrant'] = whoxy_data['registrant_contact']['full_name']\n else:\n results['registrant'] = \"None Listed\"\n # A few pieces of information are unavailable from WhoXY's reverse WHOIS lookups\n if reverse:\n results['address'] = \"Unavailable for Reverse WHOIS\"\n results['admin_contact'] = \"Unavailable for Reverse WHOIS\"\n results['tech_contact'] = \"Unavailable for Reverse WHOIS\"\n # Try to assemble different pieces of information from the record\n else:\n try:\n reg_address = whoxy_data['registrant_contact']['mailing_address']\n reg_city = whoxy_data['registrant_contact']['city_name']\n reg_state = whoxy_data['registrant_contact']['state_name']\n reg_zip = whoxy_data['registrant_contact']['zip_code']\n reg_email = whoxy_data['registrant_contact']['email_address']\n reg_phone = whoxy_data['registrant_contact']['phone_number']\n results['address'] = \"{} {}, {} {} {} {}\".format(reg_address,reg_city,reg_state,reg_zip,reg_email,reg_phone)\n except:\n results['address'] = \"None Listed\"\n try:\n admin_name = whoxy_data['administrative_contact']['full_name']\n admin_address = whoxy_data['administrative_contact']['mailing_address']\n admin_city = whoxy_data['administrative_contact']['city_name']\n admin_state = whoxy_data['administrative_contact']['state_name']\n admin_zip = whoxy_data['administrative_contact']['zip_code']\n admin_email = whoxy_data['administrative_contact']['email_address']\n admin_phone = whoxy_data['administrative_contact']['phone_number']\n results['admin_contact'] = \"{} {} {}, {} {} {} {}\".format(admin_name,admin_address,admin_city,admin_state,admin_zip,admin_email,admin_phone)\n except:\n results['admin_contact'] = \"None Listed\"\n try:\n tech_name = whoxy_data['technical_contact']['full_name']\n tech_address = whoxy_data['technical_contact']['mailing_address']\n tech_city = whoxy_data['technical_contact']['city_name']\n tech_state = whoxy_data['technical_contact']['state_name']\n tech_zip = whoxy_data['technical_contact']['zip_code']\n tech_email = whoxy_data['technical_contact']['email_address']\n tech_phone = whoxy_data['technical_contact']['phone_number']\n results['tech_contact'] = \"{} {} {}, {} {} {} {}\".format(tech_name,tech_address,tech_city,tech_state,tech_zip,tech_email,tech_phone)\n except:\n results['tech_contact'] = \"None Listed\" \n return results", "def lab03_extra_credit():\n okpy_email = \"[email protected]\"\n practice_result_code = \"xxxx...xxxxx\"\n return (okpy_email, practice_result_code)", "def getStockData():\n pass", "async def info(self, context):\n await context.send('creador: [email protected]\\ncolabs:\\n emi: https://twitter.com/emilianosce/ o https://www.instagram.com/emilianosce/ \\n garza: https://twitter.com/Matias_Garcia00 o https://www.twitch.tv/garzangb')", "def test_client_verification_retrieve(self):\n pass", "def init():\n output, code = partners_view.init()\n return jsonify(output), code", "def get_incoming_data(self):\n\n fields = self.cleaned_data.copy()\n action = self.cleaned_data['action']\n \n logger.debug(\"Fields that we got are %s\" % fields)\n\n return_data = {}\n\n #determine our further PoA based on the action varible passed by envaya phone\n return_data['action'] = action\n return_data['events'] = {}\n #add ISD code to any missing 'From' number missing it.\n from_number = self.cleaned_data[self.identity_name]\n if len(from_number) == 8 and from_number.startswith('7'):\n from_number = '+257' + from_number\n\n if action == 'incoming':\n logger.info(\"We have an incoming message!\")\n\n return_data['text'] = self.cleaned_data[self.text_name]\n return_data['connection'] = self.lookup_connections([from_number])[0]\n return_data['from_phone'] = self.cleaned_data['phone_number']\n\n elif action == 'outgoing':\n\n logger.info(\"Received a poll for outgoing message!\")\n return_data['events'] = [{'event': 'log', 'message': \"We do not deliver outgoing messages via EnvayaSMS Android app!\"}]\n\n elif action == 'test':\n logger.info(\"Received a test connection request!\")\n return_data['events'] = [{'event': 'log', 'message': \"Alpha to Charlie: This software by FortyPlusTwo-Hewe rocks! :)\"}]\n\n elif action == 'send_status':\n logger.error(\"NOT IMPLEMENTED: send_status action\")\n\n elif action == 'device_status':\n logger.error(\"NOT IMPLEMENTED: device_status action\")\n\n elif action == 'forward_sent':\n logger.error(\"NOT IMPLEMENTED: forward_status action\")\n\n elif action == 'amqp_started':\n logger.error(\"NOT IMPLEMENTED: amqp_status action\")\n\n else:\n logger.exception(\"UNSUPPORTED ACTION %s requested by EnvayaSMS Android app\" % action)\n raise NotImplementedError(\"Action %s not implemented!\" % action)\n\n print return_data\n return return_data", "def view_party(request):\n result = {}\n\n u = request.user\n other = Party.objects.get(id=request.POST['party_id'])\n if other in u.friends():\n # this other person is a friend so show all details\n bought = TransactionLineItem.objects.filter(transaction__party=other).order_by('-transaction__timestamp')\n wishes = Wishlist.objects.filter(party=other).order_by('-added')\n\n result['bought'] = [b.details() for b in bought[:10]]\n result['wished'] = [w.details() for w in wishes[:10]]\n \n else:\n # just show some details\n bought = TransactionLineItem.objects.filter(transaction__party=other).order_by('-transaction__timestamp')\n wishes = Wishlist.objects.filter(party=other).order_by('-added')\n\n result['bought'] = [b.details() for b in bought[:3]]\n result['wished'] = [w.details() for w in wishes[:3]]\n \n return JSONHttpResponse(result)", "def _get_uid_wh_agent(self):\n context = self._context or {}\n rp_obj = self.env['res.partner']\n ru_obj = self.env['res.users']\n ru_brw = ru_obj.browse()\n acc_part_brw = rp_obj._find_accounting_partner(\n ru_brw.company_id.partner_id)\n return acc_part_brw.wh_src_agent", "def get_info(self):\n return None", "def _online_data(self):\n\t\treport = RCReport()\n\t\t\n\t\ttry:\n\t\t\t# Importe la bonne API.\n\t\t\tapi_name = self.config.get(self.system, 'online_api')\n\t\t\tapi_class_name = 'RC' + api_name.capitalize() + 'API'\n\t\t\tapi_mod = importlib.import_module('RCOnlineAPI.' + api_class_name)\n\t\t\tapi = getattr(api_mod, api_class_name)(self.system, self.config)\n\t\texcept ImportError as e:\n\t\t\treport.log('\\tOnline API \"' + api_class_name + '\" does not exist.')\n\t\t\treturn\n\t\texcept RCException as e:\n\t\t\treport.log('\\t' + e.message)\n\t\t\treturn\n\t\t\n\t\treport.log('\\tUsing \"' + api_class_name + '\" API', 2)\n\t\t\n\t\t# On récupère les langues autorisées pour la recherche.\n\t\tlang = self.config.get(self.system, 'online_data_lang').split(',')\n\t\tself.generate = True\n\t\t\n\t\t# On créé le dossier \"covers\" si besoin\n\t\tif self.config.get(self.system, 'download_covers') and not os.path.exists(COVERS_DIR):\n\t\t\tos.mkdir(COVERS_DIR)\n\t\t\n\t\tfor (game, infos) in self.games.items():\n\t\t\t# On ne cherche pas de données si il y en a déjà ou si aucune donnée n'a été trouvée la fois précédente avec l'API utilisée.\n\t\t\tif infos['onlineData']['state'] or (api_name in infos['onlineData'] and not infos['onlineData'][api_name]):\n\t\t\t\tcontinue\n\t\t\telif len(lang) > 0 and lang[0] != '' and infos['country'] not in lang:\n\t\t\t\tcontinue\n\t\t\t\n\t\t\treport.log('\\tGetting data for ' + game, 2)\n\t\t\t\n\t\t\tdata = api.search(game)\n\t\t\t\n\t\t\tif data == -1:\n\t\t\t\tinfos['onlineData'][api_name] = False\n\t\t\t\treport.log('\\t\\t>> Unable to find data.', 2)\n\t\t\telif data == -2:\n\t\t\t\treport.log('\\t\\t>> HTTP Error, stop looking for online data.')\n\t\t\t\tbreak\n\t\t\telif data != None:\n\t\t\t\trelease_date = data['release_date']\n\t\t\t\tgenre = data['genre']\n\t\t\t\teditor = data['editor']\n\t\t\t\tresume = data['resume']\n\t\t\t\tnote = data['note']\n\t\t\t\trating = data['rating']\n\t\t\t\timage = data['image']\n\t\t\t\t\n\t\t\t\t# Je procède comme ceci afin d'éviter de perdre des données qui peuvent être déjà présentes\n\t\t\t\tinfos['year'] = release_date or infos['year']\n\t\t\t\tinfos['genre'] = genre or infos['genre']\n\t\t\t\tinfos['editor'] = editor or infos['editor']\n\t\t\t\tinfos['resume'] = resume or infos['resume']\n\t\t\t\tinfos['note'] = note or infos['note']\n\t\t\t\tinfos['rating'] = rating or infos['rating']\n\t\t\t\t\n\t\t\t\t# Récupération de la cover\n\t\t\t\tif image != None:\n\t\t\t\t\tfile = open('covers/' + infos['game_name'] + image['ext'], 'wb')\n\t\t\t\t\t\n\t\t\t\t\tfile.write(image['file'].read())\n\t\t\t\t\tfile.close()\n\t\t\t\t\timage['file'].close()\n\t\t\t\t\n\t\t\t\tinfos['onlineData']['state'] = True\n\t\t\t\tinfos['onlineData'][api_name] = True", "def getAutopilotInfo(self, request, context): \n\n response = droneconnect_pb2.AutopilotInfo(identifier=request.identifier,\n autopilot_firmware_version = str(self.vehicle.version),\n major_version_number = self.vehicle.version.major,\n minor_version_number = self.vehicle.version.minor,\n patch_version_number = self.vehicle.version.patch,\n release_type = self.vehicle.version.release_type(),\n release_version = self.vehicle.version.release_version(),\n stable_release = self.vehicle.version.is_stable())\n \n return response", "def extra_data(self, user, uid, response, details):\n try:\n return self.get_steam_profile(response)\n except:\n return \"\"", "def _commercial_fields(self):\n return ['website']", "def get_com_data(self):\n self.form_url_str()\n if self.__print_url: print self.com_data_full_url\n self.download_json()\n self.get_datalist_fr_json()", "def get_self_info_client(request: Request) -> ReturnDict:\n client_info_serializer = ClientInfoSerializer(request.user.client)\n customer_account = CustomerAccountSerializer(request.user.customeraccount)\n data = client_info_serializer.data\n data['customer_account'] = customer_account.data\n return data", "def get_data():\n return", "def main(self, name):\n\t\tapi_results = [] \n\t\tparams = self.get_search_parameters(name)\n\t\tapi_results.append(self.api_connect(params))\n\t\ttime.sleep(1.0)\n\t\tkey = api_results[0]['businesses'][0]\n\t\tbusiness_information = [key['name'], self.phone_number_organizer(key), key['rating'],\\\n\t\tkey['review_count']]\n\t\treturn business_information", "def get_servicech(self, conf, phone_num):\n\t\tpass", "def get_info(self):\n return \"TODO !\"", "def get_info(self):\n\n (app_key,app_secret,access_type) = self.get_dropbox_app_keys()\n sess = session.DropboxSession(app_key, app_secret, access_type)\n sess.set_token(self.access_token_key,self.access_token_secret)\n\n db_client = client.DropboxClient(sess)\n\n #can throw ErrorResponse\n info = db_client.account_info()\n\n message = info\n\n return message", "def get_data_from_web():\n pass", "def who():\n cleanup()\n return {'available': userlist(), 'eta': data['etas'], 'etd': data['etds'], 'lastlocation': data['lastlocation'], 'ceitloch': ceitloch(), 'reminder': data['reminder']}", "def data(self, user=None):\n return {\n \"provider\": self.BACKEND,\n \"access_token\": self.access_token,\n \"client_id\": self.client_id,\n \"honor_code\": \"true\",\n \"country\": \"US\",\n \"username\": user.username if user else \"test_username\",\n \"name\": user.first_name if user else \"test name\",\n \"email\": user.email if user else \"[email protected]\"\n }", "def config(request):\n\tlusers = User.objects.all()\n\tresp = {'location':{'lon':settings.SNP_DEFAULT_LON, 'lat':settings.SNP_DEFAULT_LAT, 'zoomlevel':settings.SNP_DEFAULT_ZOOMLEVEL},\n\t\t'poi_types':[poi_type[1] for poi_type in settings.SNP_POI_TYPES],\n\t\t'live_users':list(),\n\t\t'advertisement':dict()\n\t\t}\n\n\ttopad_dict = dict()\n\ttry:\n\t\ttopad = Top_advertisement.objects.filter(active__exact=True)[0]\n\t\ttopad_dict = dict()\n\t\ttopad_dict['title'] = topad.title\n\t\ttopad_dict['transparency'] = topad.transparency\n\t\ttopad_dict['url'] = topad.url\n\t\ttopad_dict['image'] = topad.image.url\n\texcept IndexError:\n\t\tpass\n\tresp['advertisement']['top'] = topad_dict\n\n\tsidead_dict = dict()\n\ttry:\n\t\tsidead = Side_advertisement.objects.filter(active__exact=True)[0]\n\t\tsidead_dict['title'] = sidead.title\n\t\tsidead_dict['url'] = sidead.url\n\t\tsidead_dict['image'] = sidead.image.url\n\texcept IndexError:\n\t\tpass\n\tresp['advertisement']['side'] = sidead_dict\n\n\tfor luser in lusers:\n\t\ttracks = list()\n\t\tfor track in Track.objects.filter(user=luser):\n\t\t\t# check if we have some message for this track\n\t\t\tif Message.objects.filter(track=track):\n\t\t\t\ttrack_last_time = Message.objects.filter(track=track).latest().time.isoformat()\n\t\t\t\ttracks.append({'id':track.pk, 'name':track.name,\n\t\t\t\t 'description':track.description,\n\t\t\t\t 'is_active':track.is_active,\n\t\t\t\t 'last_location_time':track_last_time})\n\n\t\t# return only users with at least one track with at least one message\n\t\tif len(tracks) > 0:\n\t\t\tresp['live_users'].append({'id':luser.id, 'username':luser.username,\n\t\t\t\t\t 'first_name':luser.first_name,\n\t\t\t\t\t 'last_name':luser.last_name,\n\t\t\t\t\t 'email':luser.email, 'phone':luser.phone,\n\t\t\t\t\t 'tracks':tracks})\n\treturn HttpResponse(simplejson.dumps(resp), mimetype='application/json')", "def get_infos(self):\n infos = dict()\n infos['dataset'] = self._dataset_name()\n infos['task'] = self.task\n if self.task == 'sep_clean':\n data_license = [librispeech_license]\n else:\n data_license = [librispeech_license, wham_noise_license]\n infos['licenses'] = data_license\n return infos", "def get_data(self):\r\n pass", "def fetch_data(self):", "def account_info(request):\r\n user = request.user\r\n\r\n return _api_response(request, user.safe_data())", "def get_info(self,honeypotids):\n req = {\"type\":\"get_info\",\n \"from\":self.network.mc_id,\n \"to\": honeypotids}\n expect_dict = {\"type\":\"send_info\"}\n msg_list = self.send_receive(req,honeypotids,expect_dict)\n answer = {}\n for msg in msg_list:\n answer[msg[\"from\"]] = msg[\"info\"]\n return answer", "def local_stocks_prices():\n url1 = \"https://api.invertironline.com/token\"\n\n data = {\n \"username\": usuario,\n \"password\": password,\n \"grant_type\": \"password\" \n }\n response = requests.post(url1, data=data)\n if response.status_code == 200:\n content = response.text\n access_key = token_key(content)\n\n url2 = f'https://api.invertironline.com/api/v2/Cotizaciones/Acciones/Merval/argentina'\n datos = requests.get(url2, headers={\n 'Authorization': 'Bearer '+access_key\n })\n datos = json.loads(datos.text)\n datos = datos['titulos']\n datos = clean_assets(datos)\n return datos", "def accounts():", "def get_data(self):\n pass", "def get_data(self):\n pass", "def parse_url(url):\n results = NotifyBase.parse_url(url, verify_host=False)\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n # Store our access code\n access_token = NotifyStreamlabs.unquote(results['host'])\n results['access_token'] = access_token\n\n # call\n if 'call' in results['qsd'] and results['qsd']['call']:\n results['call'] = NotifyStreamlabs.unquote(\n results['qsd']['call'].strip().upper())\n # donation - amount\n if 'amount' in results['qsd'] and results['qsd']['amount']:\n results['amount'] = NotifyStreamlabs.unquote(\n results['qsd']['amount'])\n # donation - currency\n if 'currency' in results['qsd'] and results['qsd']['currency']:\n results['currency'] = NotifyStreamlabs.unquote(\n results['qsd']['currency'].strip().upper())\n # donation - name\n if 'name' in results['qsd'] and results['qsd']['name']:\n results['name'] = NotifyStreamlabs.unquote(\n results['qsd']['name'].strip().upper())\n # donation - identifier\n if 'identifier' in results['qsd'] and results['qsd']['identifier']:\n results['identifier'] = NotifyStreamlabs.unquote(\n results['qsd']['identifier'].strip().upper())\n # alert - alert_type\n if 'alert_type' in results['qsd'] and results['qsd']['alert_type']:\n results['alert_type'] = NotifyStreamlabs.unquote(\n results['qsd']['alert_type'])\n # alert - image_href\n if 'image_href' in results['qsd'] and results['qsd']['image_href']:\n results['image_href'] = NotifyStreamlabs.unquote(\n results['qsd']['image_href'])\n # alert - sound_href\n if 'sound_href' in results['qsd'] and results['qsd']['sound_href']:\n results['sound_href'] = NotifyStreamlabs.unquote(\n results['qsd']['sound_href'].strip().upper())\n # alert - duration\n if 'duration' in results['qsd'] and results['qsd']['duration']:\n results['duration'] = NotifyStreamlabs.unquote(\n results['qsd']['duration'].strip().upper())\n # alert - special_text_color\n if 'special_text_color' in results['qsd'] \\\n and results['qsd']['special_text_color']:\n results['special_text_color'] = NotifyStreamlabs.unquote(\n results['qsd']['special_text_color'].strip().upper())\n\n return results", "def test_client_nationlity_retrieve(self):\n pass", "def runAnalytics():\n #gets OAuth from the API\n analytics = get_Analytics_service()\n #get the object return from the API\n #send that object to print out useful fields\n response = get_report(analytics)\n print_response(response)", "def get_partner_requests(request):\n try:\n partner_requests = PartnerRequest.objects.filter(to_user=request.user)\n except:\n partner_requests = []\n\n return partner_requests" ]
[ "0.62221545", "0.56236756", "0.56236756", "0.5579682", "0.5457257", "0.5414618", "0.5382873", "0.5349557", "0.53429776", "0.5338125", "0.5333075", "0.53239155", "0.5318248", "0.5313936", "0.53106076", "0.5300438", "0.528713", "0.528309", "0.5242493", "0.52211165", "0.52005583", "0.5187517", "0.5162123", "0.51608396", "0.51598436", "0.5112094", "0.5107046", "0.5104729", "0.5103881", "0.50963", "0.5087667", "0.5084241", "0.50824", "0.5071942", "0.50688374", "0.5066455", "0.50618774", "0.50616634", "0.5058476", "0.5048972", "0.5025594", "0.5021749", "0.50172764", "0.49990255", "0.49986178", "0.49974847", "0.49894542", "0.49894542", "0.49720117", "0.49711353", "0.49646822", "0.49623203", "0.49623203", "0.49489766", "0.49459228", "0.49412006", "0.4941075", "0.49395558", "0.49252185", "0.49140123", "0.49140123", "0.49083754", "0.49073812", "0.48938", "0.4885585", "0.48850584", "0.48831856", "0.48783916", "0.4876319", "0.4874738", "0.48689792", "0.48678026", "0.48664492", "0.4862322", "0.4861348", "0.48602504", "0.48533988", "0.48440075", "0.48426527", "0.4838257", "0.48370284", "0.48308468", "0.4820755", "0.48178896", "0.4817492", "0.48173314", "0.481589", "0.48004037", "0.4788735", "0.47880468", "0.47834393", "0.47713283", "0.47652733", "0.47575593", "0.47575593", "0.4756295", "0.4750324", "0.47493172", "0.4746309" ]
0.52863556
17
Details of the resource that was assessed
def resource_details(self) -> pulumi.Output[Any]: return pulumi.get(self, "resource_details")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_resource_details (self):\n return (f\"[Title:\\\"{self.get_title()}\\\"] [Author:{self.get_author()}] [Publisher:{self.get_publisher()}] [Year:{self.get_year()}]\")", "def resource(self):\n return str(self._resource)", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def resource(self):\n return self._resource", "def details(self):\n pass", "def __str__(self):\n return self.resource.__name__", "def __str__(self):\n return self.__resource;", "def __str__(self):\n\n return str(self.__resource);", "def meta_data(self):\r\n return simplejson.dumps(self.__resource_meta)", "def detail(self):\n info = self.info()\n return info", "def PrintResource(resource):\n print resource.resource_id.text, resource.GetResourceType()", "def resource_details(self) -> pulumi.Input[Union['AzureResourceDetailsArgs', 'OnPremiseResourceDetailsArgs', 'OnPremiseSqlResourceDetailsArgs']]:\n return pulumi.get(self, \"resource_details\")", "def details(self):\n raise NotImplementedError()", "def getResource(self):\n pass;", "def resourceid(self):", "def info(self) -> dict:", "def info(self):\n return self.__dict__[self.sid]", "def info(self):", "def info(self):", "def resource(self):\n return self.properties.get('resource',\n Entity(self.context, ResourcePath(\"resource\", self.resource_path)))", "def details(self):\n return self._details", "def _get_information(self):\n pass", "def get_resource(self):\n msg = _(\"wrote a new wechat article : %(title)s\") % {\n 'title': self.title}\n msg = unicode(msg)\n resource = {\n 'title': msg,\n 'description': self.get_digest(),\n 'url': self.get_absolute_url(),\n 'image_url': self.cover_img.url,\n }\n return resource", "def info(self, resource, id):\n return self.request('/' + resource + '/' + str(id))", "def info(self):\n self._info()", "def info(self):\n return self._info", "def info(self):\n return self._info", "def get_info(self):\n return None", "def metadata(self):\r\n return resources.Metadata(self)", "def get_resource(self):\n raise errors.Unimplemented()", "def get_info(self):\n pass", "def get_info(self):\n pass", "def getResource(self):\n return self.__resource;", "def info(self):\r\n return self._get('info', {})", "def details (self):\n return six.text_type(self)", "def info(self) -> str:\n return self._info", "def info(self) -> str:\n return self._info", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_details(self):\n return self.details", "def get_info(self):\n return \"TODO !\"", "def get_details(self):", "def data(self):\n return { # TODO Actually query for this shit\n \"foo\": self.__name__,\n \"url\": f\"{self.request.resource_url(self)}\",\n }", "def res_description(self):\n return self.get(\"res_description\", decode=True)", "def getResource(self):\n\n return self.__resource;", "def getInfo(self):\n return self.name + \" [\" + self.target_type + \"]\"", "def get_info(self) -> str:\n template_data = self.get_template_data()\n return self.get_template().render(\n resource=self.resource,\n markdown=markdown,\n data=template_data,\n base=RESOURCE_TEMPLATE\n )", "def info(self) -> str:\n return pulumi.get(self, \"info\")", "def get_info(self) -> str:\n return self.info", "def details(self) -> \"dict\":\n return self._attrs.get(\"details\")", "def get_info(self) -> str:\n raise NotImplementedError()", "def info(self):\n return self.info_text", "def detail(self):\n return self.status[\"health\"][\"detail\"]", "def _resource_fields(chromo):\n return {\n 'name': chromo['resource_name'],\n 'description': chromo['title'],\n 'url_type': u'datastore',\n }", "def get_info(self):\n self.exists = self.check_subscr()\n return self.attrs", "def getInfo():", "def info(self):\n if not self._was_read:\n self.read()\n return self._info", "def get_details(self):\n raise Exception(\"bad details\")", "def info(self):\n return self.current_run.info", "def __str__(self):\n \n for att in self.__dict__:\n print('%s: %r' % (att, getattr(self, att)))\n \n return 'Background Sources class object attributes'", "def info(self):\n return {}", "def resource_state(self) -> str:\n return pulumi.get(self, \"resource_state\")", "def details(self):\n print \"ABC - Deployer.details()\"", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def info(self):\n attr_list = []\n for name in self._metadata:\n attr_list.append(name + \": \" + str(getattr(self, name, None)) + \"\\n\")\n print(f\"{self.__class__}\\n\" + \"\".join(attr_list))", "def getInfo(self):\n return self.info", "def name(self):\n return self.raw_resource[\"name\"]", "def info(self):\n return self.client.call('GET', self.name + 'info')", "def get_resource_information():\n\n\n # the resources we are allowed to use is easy. We just copy this...\n resource_limit_dict = _resources_allowed_dict.copy()\n\n \n # from the other dict, we only take the resource information. (this omits\n # locks and timing information that isn't needed)\n\n # first, let's do the easy thing, the quantity resources. These are just \n # floats\n resource_use_dict = {}\n for resourcename in resource_constants.quantity_resources:\n resource_use_dict[resourcename] = _resources_consumed_dict[resourcename]\n\n # for the fungible resources (files opened, etc,), we only need a count...\n for resourcename in resource_constants.fungible_item_resources:\n resource_use_dict[resourcename] = len(_resources_consumed_dict[resourcename])\n\n # for the individual item resources (ports, etc,), we copy the set...\n for resourcename in resource_constants.individual_item_resources:\n resource_use_dict[resourcename] = _resources_consumed_dict[resourcename].copy()\n\n # and that's it!\n return (resource_limit_dict, resource_use_dict)", "def getInfo(self):\n doc = minidom.parse(urllib.urlopen(serverString + \"/rest/asset/\" + self.id))\n self._getInfoFromNode(doc.getElementsByTagName(\"asset\")[0])", "def info(self):\n return (self._title, self._version, self._descr)", "def getInfo(self):\n return self._info", "def print_details(self):\n print(\"[{}]\".format(self.name))\n print(\"ID: \" + str(self.id))\n print(\"name: %s\" % self.name)\n print(\"URL: %s\" % self.url)\n print(\"CPUs: \" + str(self.cpus) + \" cores\")\n print(\"Mem: \" + self.memory_str)\n print(\"Tasks: \" + str(self.tasks_len))\n print(\"Uptime %s\" + self.uptime)\n print(\"Uptime Descriptive %s\" + self.uptime_descriptive)\n print(\" \")", "def resource_link_title(self):\n return self.request.POST.get(\"resource_link_title\", self.resource_link_id)", "def get_main_information(self) -> Dict:\n if self.information is None:\n self.information = self.orthanc.get_instance_information(\n self.identifier\n )\n\n return self.information", "def resources(self):", "def getInstDescription(self):\n return self.name()", "def resource(self):\n return self.add_resource", "def extract_resource_details(metadata):\n\n # check data integrity\n if Update.get_entry(metadata, 'success') is not True:\n raise UpdateException('metadata does not have `success` equal to `True`')\n if len(Update.get_entry(metadata, 'result')) != 1:\n raise UpdateException('metadata does not have exactly 1 result')\n if len(Update.get_entry(metadata, 'result', 0, 'resources')) != 1:\n raise UpdateException('metadata does not have exactly 1 resource')\n\n # return resource details\n resource = Update.get_entry(metadata, 'result', 0, 'resources', 0)\n return resource['url'], resource['revision_timestamp']", "def get_description(self):", "def return_info(self):\n\t\treturn self.info", "def print_resource():\n logging.info(\"__package__: %s\", __package__)\n logging.info(\"__name__: %s\", __name__)\n logging.info(\"JSON_RESOURCE: %s\", JSON_RESOURCE)\n logging.info(\"JSON_PATH: %s\", JSON_PATH)", "def describe(self):\n return str(self)", "def get_main_information(self) -> Dict:\n if self.lock:\n if self._information is None:\n # Setup self._information for the first time when study is lock\n self._information = self.client.get_instances_id(self.id_)\n\n return self._information\n\n return self.client.get_instances_id(self.id_)", "def target_resource(self):\n return self._target_resource", "def _resource_dump(pe, res):\n rva = res.data.struct.OffsetToData\n size = res.data.struct.Size\n\n return pe.get_data(rva, size)", "def info(self):\n print self.id, self.type, self.xyz.get_xyz", "def usage_information(self):\n return self._usage_information", "def info() -> None:", "def _get_infores(source: str) -> str:\n if source in self.context.catalog:\n return self.context.catalog[source]\n else:\n infores: str = _process_infores(source)\n if infores:\n self.context.catalog[source] = infores\n return infores\n else:\n return \"\"", "def get_info(self):\n return {}", "def __repr__(self):\r\n return self.uri", "def resource_status(self) -> 'outputs.InstantSnapshotResourceStatusResponse':\n return pulumi.get(self, \"resource_status\")", "def name(self):\n\n return self.resource[\"metadata\"][\"name\"]" ]
[ "0.76440537", "0.7001793", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.6875115", "0.686545", "0.6756598", "0.6750164", "0.66482615", "0.6627125", "0.6553682", "0.6521818", "0.6483526", "0.64652663", "0.6450048", "0.6442991", "0.63966125", "0.63947666", "0.6393753", "0.6393753", "0.63681227", "0.6352751", "0.6350844", "0.6317342", "0.6305615", "0.6295532", "0.62758183", "0.62539893", "0.62459886", "0.62382597", "0.62354326", "0.62251353", "0.62251353", "0.6220586", "0.61779207", "0.6151666", "0.6150642", "0.6150642", "0.6147839", "0.6147839", "0.6147839", "0.6133916", "0.61214894", "0.6112529", "0.60607797", "0.6058028", "0.6054495", "0.6044078", "0.602688", "0.60230654", "0.60037214", "0.600051", "0.59956336", "0.59946823", "0.59854203", "0.59655184", "0.59423304", "0.5940428", "0.5916753", "0.59152496", "0.5907256", "0.5904732", "0.590381", "0.59034437", "0.5895525", "0.5895525", "0.58940214", "0.58899766", "0.5883736", "0.5868952", "0.5866518", "0.58644927", "0.5849613", "0.5849438", "0.5825993", "0.5815696", "0.5811836", "0.58045256", "0.5796444", "0.5791339", "0.5789975", "0.5769325", "0.5766697", "0.5762917", "0.57476383", "0.57412225", "0.5738121", "0.57340276", "0.5725352", "0.57217497", "0.57186913", "0.5713716", "0.5702322", "0.57022995", "0.5698046" ]
0.7746103
0
The result of the assessment
def status(self) -> pulumi.Output['outputs.AssessmentStatusResponseResponse']: return pulumi.get(self, "status")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def result(self):\n return self['result']", "def results(self):\n\n\t\tresults = {'answer':42}\n\n\t\treturn results", "def result(self):\n\n print('Ergebnisse: -------------\\n'\n 'Richtige Antworten:{} \\n'\n 'Falsche Antworten:{} \\n'.format(self.answer_right, self.answer_wrong))", "def result(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"result\")", "def output(self):\r\n return self.result", "def getResult(self):\n return self.ok", "def result(self, result):\n print(result)", "def result( self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):\n return self._result", "def result(self):", "def result(self):", "def evaluate(self):\n scores = []\n scores.append(self.word_analogy())\n print(\"Word Analogy (acc): \", scores[0])\n scores.append(self.word_similarity())\n print(\"Word Similarity (MSE): \", scores[1])\n scores.append(self.concept_categorization())\n print(\"Concept Categorization (purity): \", scores[2])\n scores.append(self.sentiment_analysis())\n print(\"Sentiment Analysis (acc): \", scores[3])\n return scores", "def _get_result(self):\r\n \r\n return self._result", "def final_result(self):\r\n print(\" Game \\t\\t Word \\t\\t Result \\t\\t Bad Guess \\t\\t Missed Letters \\t\\t Score \")\r\n print(\" ---- \\t\\t ---- \\t\\t ------ \\t\\t --------- \\t\\t -------------- \\t\\t ----- \")\r\n count = 0\r\n final_score = 0\r\n for x in self.instances:\r\n count += 1\r\n print(\" \"+str(count)+\" \\t\\t \"+str(x.get_word())+\" \\t\\t \"+str(x.get_result())+\" \\t\\t \"+str(x.get_wrong_guess())+\" \\t\\t\\t \"+str(x.get_wrong_letter())+\" \\t\\t\\t \"+str(round(x.get_score(),3)))\r\n final_score += x.get_score()\r\n\r\n print(\"\\nFinal Score : \"+str(round(final_score,3)))", "def result(self):\r\n raise NotImplementedError('method result() is not implemented')", "def outcome(self):\r\n return self._outcome", "def parse_verifier_result(self):\n stat = self.get_verifier_result(self.verification_id)\n try:\n num_executed = stat['num_tests'] - stat['num_skipped']\n try:\n self.result = 100 * stat['num_success'] / num_executed\n except ZeroDivisionError:\n self.result = 0\n if stat['num_tests'] > 0:\n LOGGER.info(\"All tests have been skipped\")\n else:\n LOGGER.error(\"No test has been executed\")\n return\n\n with open(os.path.join(self.res_dir, \"rally.log\"),\n 'r', encoding='utf-8') as logfile:\n output = logfile.read()\n\n success_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} success ',\n output):\n success_testcases.append(match)\n failed_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} fail',\n output):\n failed_testcases.append(match)\n skipped_testcases = []\n for match in re.findall(r'.*\\{\\d{1,2}\\} (.*?) \\.{3} skip(?::| )',\n output):\n skipped_testcases.append(match)\n\n self.details = {\"tests_number\": stat['num_tests'],\n \"success_number\": stat['num_success'],\n \"skipped_number\": stat['num_skipped'],\n \"failures_number\": stat['num_failures'],\n \"success\": success_testcases,\n \"skipped\": skipped_testcases,\n \"failures\": failed_testcases}\n except Exception: # pylint: disable=broad-except\n self.result = 0\n\n LOGGER.info(\"Tempest %s success_rate is %s%%\",\n self.case_name, self.result)", "def result(self): \n return self.body", "def calculate(self):\n\n return \"Yes\" if self.result else \"No\"", "def results(self):\r\n pass", "def _load_assessment_results_page(self):\r\n\r\n fmt = '{0:0.' + str(Configuration.PLACES) + 'g}'\r\n\r\n self.txtAvailability.set_text(\r\n str(fmt.format(self._function_model.availability)))\r\n self.txtMissionAt.set_text(\r\n str(fmt.format(self._function_model.mission_availability)))\r\n self.txtMissionHt.set_text(\r\n str(fmt.format(self._function_model.mission_hazard_rate)))\r\n self.txtPredictedHt.set_text(\r\n str(fmt.format(self._function_model.hazard_rate)))\r\n\r\n self.txtMMT.set_text(str(fmt.format(self._function_model.mmt)))\r\n self.txtMCMT.set_text(str(fmt.format(self._function_model.mcmt)))\r\n self.txtMPMT.set_text(str(fmt.format(self._function_model.mpmt)))\r\n\r\n self.txtMissionMTBF.set_text(\r\n str(fmt.format(self._function_model.mission_mtbf)))\r\n self.txtMTBF.set_text(str(fmt.format(self._function_model.mtbf)))\r\n self.txtMTTR.set_text(str(fmt.format(self._function_model.mttr)))\r\n\r\n return False", "def get_results(self):\n return self.result", "def present_result(self, parameters, result):\n print \"Result for parameters %s: %.7f\" % (repr(parameters), result)", "def result(self):\n return self.a", "def results(self):\n pass", "def get_result(self):\n if len(self.result_transcripts) > 0:\n return self.result_transcripts[0]\n else:\n return ''", "def status(self) -> pulumi.Output['outputs.AssessmentStatusResponse']:\n return pulumi.get(self, \"status\")", "def print_result(self):\n print(\"Final results: \")\n for i in range(1, len(self.agents) + 1):\n agent = self.agents[i-1]\n print(agent.name + \": {} wins\".format(self.results[agent.name]))", "def __get_evaluation_summary(self):\n self.logger.debug(\n f\"Getting summary for assignment {self.assignment_id}, eval_id {self.eval_id}\"\n )\n result = self.interactor.get_policy_eval_summary(self.assignment_id)\n\n if result.status_code != 200:\n self.logger.debug(\n f\"Could not get summary for assignment {self.assignment_id} for eval_id {self.eval_id} - {result.text}\"\n )\n raise Exception(\n f\"Summary could not be retrived: {result.status_code} - {result.text}\"\n )\n\n return result.json()[\"value\"][0][\"results\"]", "def score(self):\n return 1 if self.succeeded() else 0", "def Results(self):\n return self.data", "def Results(self):\n return self.data", "def GetResult(self, playerjm):\n return self.score / len(self.scores)", "def report(self, result):\n raise NotImplementedError", "def complain_result(self) -> Optional[str]:\n utils.logger.debug(f\"vote_result({self.complain_votes[self.round].get_summary()})\")\n if self.complain_votes[self.round].is_completed():\n vote_result = self.complain_votes[self.round].get_result()\n return vote_result.hex_hx()\n else:\n return None", "def show_result(dict_result):\r\n\r\n\tcorrects = dict_result[\"Corrects\"]\r\n\twrongs = dict_result[\"Wrongs\"]\r\n\tn_questions = dict_result[\"n_questions\"]\r\n\r\n\tprint(\"\\n\\n\",\"-\"*10,\"Final Result\", \"-\"*10)\r\n\r\n\tfinal_note = (len(corrects)*100)/n_questions\r\n\tprint(\"\\nResult: \", final_note*10)\r\n\r\n\tif final_note*10 > 600:\r\n\t\tprint(\"\\nYOU PASS!\")\r\n\telse:\r\n\t\tprint(\"\\nI'm sorry, you don't pass, but please try again!\")\r\n\r\n\tif len(wrongs) > 0:\r\n\t\tprint(\"\\nSome questions for review:\", end=\" \")\r\n\t\tfor i in wrongs:\r\n\t\t\tif i == wrongs[-1]:\r\n\t\t\t\tprint(i)\r\n\t\t\telse:\r\n\t\t\t\tprint(i, end=\", \")", "def get_outcome(self):\n return self.__outcome", "def result(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"result\")", "def evaluate(self) :\n pass", "def reportResult(self):\n return True;", "def __str__(self):\n return self.result", "def result(self):\n return (\"Recall@\" + str(self.length) + \": \"), (self.hit / self.test)", "def scan_result(self):\n assert 'masscan' in self._scan_result, 'Do a scan before trying to get result !'\n\n return self._scan_result", "def generateFinalResult(self):\n if self.__testResult == 'FAIL':\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n elif self.__testResult == 'PASS':\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n elif self.__testResult == 'NONE':\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY) \n self.__testResult = 'PASS'\n #else:\n total_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"total_count\", TestScriptSymbolTable.test_result_tab))\n pass_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"pass_count\", TestScriptSymbolTable.test_result_tab))\n fail_count = int(TestScriptSymbolTable.get_value_from_sym_tab(\"fail_count\", TestScriptSymbolTable.test_result_tab))\n conditional_chk_flag = int(TestScriptSymbolTable.get_value_from_sym_tab(\"conditional_chk_flag\", TestScriptSymbolTable.test_result_tab))\n num_of_pass_required = int(TestScriptSymbolTable.get_value_from_sym_tab(\"num_of_pass_required\", TestScriptSymbolTable.test_result_tab))\n \n if total_count >= 1:\n if conditional_chk_flag == 1:\n if num_of_pass_required <= pass_count:\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'PASS'\n else:\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'FAIL'\n else:\n if fail_count > 0:\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'FAIL'\n else:\n Util.set_color(Util.FOREGROUND_GREEN | Util.FOREGROUND_INTENSITY)\n self.__testResult = 'PASS'\n else:\n if GlobalConfigFiles.curr_tc_name != \"\":\n Util.set_color(Util.FOREGROUND_RED | Util.FOREGROUND_INTENSITY)\n logging.debug(\"\\n TEST COMPLETED without FINAL RESULT...\")\n\n self.__testResult = 'FAIL'\n\n self.tmsPacket.TestResult = self.__testResult\n if GlobalConfigFiles.curr_tc_name != \"\":\n logging.info(\"\\n FINAL TEST RESULT ---> %15s\", self.__testResult)\n logging.info(' END: TEST CASE [%s]', GlobalConfigFiles.curr_tc_name)\n\n Util.set_color(Util.FOREGROUND_WHITE)\n GlobalConfigFiles.test_result = self.__testResult\n\n self.tmsPacket.TimeStamp = time.strftime('%Y-%m-%dT%H:%M:%SZ', time.localtime())\n if GlobalConfigFiles.curr_tc_name != \"\":\n self.tmsPacket.writeTMSJson()\n\n return", "def _to_string(self):\n self.results.print_results()\n self.results.print_comparison()", "def get_result(self) -> Any:\n ...", "def evaluate(self):\n pass", "def evaluate(self):\n pass", "def _get_result(self):\n try:\n # get test data\n test_id = self._feature_processor.test_data_id\n test_feature = self._feature_processor.test_data_feature\n test_target = self._feature_processor.test_data_target\n\n # process data\n test_feature = test_feature.astype(\"float64\", errors='ignore')\n\n # predict\n predict_res = self._model.predict(test_feature)\n predict_res_df = pd.DataFrame(predict_res, columns=[PredictConstance.PRE])\n proba_res = self._model.predict_proba(test_feature)\n proba_res_df = pd.DataFrame([str(x) for x in proba_res],\n columns=[PredictConstance.PROBA])\n\n res = [test_id, predict_res_df, proba_res_df]\n # get model score\n if test_target is not None:\n res.append(test_target)\n model_auc = pre_utils.PredictUtils.get_roc_score(test_target, proba_res)\n model_score = pre_utils.PredictUtils.get_model_score(test_target, predict_res)\n model_score.update(model_auc)\n with open(os.path.join(self._result_path, PredictConstance.TEST_SCORE), \"w\") as ftp:\n ftp.write(str(model_score))\n\n # joint predict result\n self._joint_predict_result(res)\n\n return True\n except Exception as err:\n self.managerlogger.logger.error(\"base ml get result error: %s\" % err)\n self.errorlogger.logger.error(\"base ml get result error:\\n %s\" % traceback.format_exc())\n return False", "def _get_output_for_task_success(self, attempted, succeeded, total, student=None):\r\n # view task entry for task in progress\r\n instructor_task = self._create_progress_entry(student)\r\n task_id = instructor_task.task_id\r\n mock_result = Mock()\r\n mock_result.task_id = task_id\r\n mock_result.state = SUCCESS\r\n mock_result.result = {\r\n 'attempted': attempted,\r\n 'succeeded': succeeded,\r\n 'total': total,\r\n 'action_name': 'rescored',\r\n }\r\n output = self._test_get_status_from_result(task_id, mock_result)\r\n return output", "def getResults():", "def test_call_result_as_dict(self):\r\n exp_assignments = rdp_test1_expected_dict\r\n min_confidence = self.default_app.Params['Confidence']\r\n\r\n # Since there is some variation in the assignments, run\r\n # 10 trials and make sure we get the expected result at least once\r\n num_trials = 10\r\n unverified_seq_ids = set(exp_assignments.keys())\r\n for i in range(num_trials):\r\n obs_assignments = self.default_app(self.tmp_seq_filepath)\r\n for seq_id in list(unverified_seq_ids):\r\n obs_assignment, obs_confidence = obs_assignments[seq_id]\r\n exp_assignment, exp_confidence = exp_assignments[seq_id]\r\n self.assertTrue(obs_confidence >= min_confidence)\r\n if obs_assignment == exp_assignment:\r\n unverified_seq_ids.remove(seq_id)\r\n if not unverified_seq_ids:\r\n break\r\n\r\n messages = []\r\n for seq_id in unverified_seq_ids:\r\n messages.append(\r\n \"Unable to verify %s in %s trials\" % (seq_id, num_trials))\r\n messages.append(\" Expected: %s\" % exp_assignments[seq_id][0])\r\n messages.append(\" Observed: %s\" % obs_assignments[seq_id][0])\r\n messages.append(\" Confidence: %s\" % obs_assignments[seq_id][1])\r\n\r\n # make sure all taxonomic results were correct at least once\r\n self.assertFalse(unverified_seq_ids, msg='\\n'.join(messages))", "def get_result(self):\n\n x = self.rps_data[0][1].upper()\n y = self.rps_data[1][1].upper()\n if x[0] == '|':\n x = x[2:3]\n if y[0] == '|':\n y = y[2:3]\n if x == y:\n self.write_scores(\"Draw\")\n return \"Draw\"\n elif (x == 'R' and y == 'S') or (x == 'S' and y == 'P') or (x == 'P' and y == 'R'):\n self.write_scores(\"First\")\n return \"First\"\n else:\n self.write_scores(\"Second\")\n return \"Second\"", "def _AddResult(self):\n if not self._results:\n result = analyzer_result.AnalyzerResult()\n result.attribute_name = 'test_result'\n result.attribute_value = 'is_vegetable'\n self._results.append(result)", "def result(self):\n if self.__json:\n return self.__json[\"result\"]\n else:\n return {}", "def getTestResults():", "def evaluate(self) -> None:\n eval_results = {'segmentation': self.evaluate_segmentation()}\n if self.task == 'tracking':\n eval_results['tracking'] = self.evaluate_tracking()\n self.save_result(eval_results)", "def result(self):\n assert(self.__complete)\n return self.__result", "def get_results(self):\n error_dict = {'error_code_test': self.error_code_test,\n 'error_text_test': self.error_text_test}\n\n return self.testresults, error_dict, self.checkstats", "def get_eval_result(self):\n return self.content_eval", "def test_pass_result(self):\r\n data = {\r\n \"EdX-ID\": self.receipt_id,\r\n \"Result\": \"PASS\",\r\n \"Reason\": \"\",\r\n \"MessageType\": \"You have been verified.\"\r\n }\r\n json_data = json.dumps(data)\r\n response = self.client.post(\r\n reverse('verify_student_results_callback'), data=json_data,\r\n content_type='application/json',\r\n HTTP_AUTHORIZATION='test BBBBBBBBBBBBBBBBBBBB:testing',\r\n HTTP_DATE='testdate'\r\n )\r\n attempt = SoftwareSecurePhotoVerification.objects.get(receipt_id=self.receipt_id)\r\n self.assertEqual(attempt.status, u'approved')\r\n self.assertEquals(response.content, 'OK!')", "def get_result(self):\n config = self.bisect_config\n results_confidence = 0\n if self.culprit:\n results_confidence = self.api.m.math_utils.confidence_score(\n self.lkgr.values, self.fkbr.values)\n\n if self.failed:\n status = 'failed'\n elif self.bisect_over:\n status = 'completed'\n else:\n status = 'started'\n\n aborted_reason = None\n if self.failed_initial_confidence:\n aborted_reason = _FAILED_INITIAL_CONFIDENCE_ABORT_REASON\n elif self.failed_direction:\n aborted_reason = _DIRECTION_OF_IMPROVEMENT_ABORT_REASON\n return {\n 'try_job_id': config.get('try_job_id'),\n 'bug_id': config.get('bug_id'),\n 'status': status,\n 'buildbot_log_url': self._get_build_url(),\n 'bisect_bot': self.get_perf_tester_name(),\n 'command': config['command'],\n 'test_type': config['test_type'],\n 'metric': config['metric'],\n 'change': self.relative_change,\n 'score': results_confidence,\n 'good_revision': self.good_rev.commit_hash,\n 'bad_revision': self.bad_rev.commit_hash,\n 'warnings': self.warnings,\n 'aborted_reason': aborted_reason,\n 'culprit_data': self._culprit_data(),\n 'revision_data': self._revision_data()\n }", "def final_report(self):\n print('Final Count for', self.reason, self.successes, 'of', self.tests, 'tests passed')", "def save_result(self):\n self.print_to_console()", "def evaluate(self):\n predictions = self.model.predict(self.test[0])\n accuracy = accuracy_score(self.test[1], predictions)\n print(\"Accuracy:\", str(accuracy * 100) + \"%\")\n self.plot_results(predictions)", "def result_summary(self):\r\n summary = ['Ran %d commands to test %d scripts. %d of these commands '\r\n 'failed and %d scripts could not be tested due to errors.' %\r\n (self.total_commands, self.total_scripts,\r\n self._num_failures(), self._num_script_errors())]\r\n\r\n if self._num_failures() > 0:\r\n summary.append('Failed scripts were: %s' %\r\n ' '.join(self._failed_scripts()))\r\n\r\n for error_info in self.script_errors.values():\r\n if len(error_info[0]) > 0:\r\n summary.append(self._format_script_error_summary(\r\n error_info[0], error_info[1]))\r\n\r\n if self.warnings:\r\n summary.append('Warnings:')\r\n for warning in self.warnings:\r\n summary.append(' ' + warning)\r\n\r\n return '\\n'.join(summary)", "def result(self):\n return (\"HitRate@\" + str(self.length) + \": \"), (self.hit / self.test)", "def log_readable_accessibility_result(self, type):\n # logger.info(self.axe_instance.report(self.results[type]))\n type_results = self.axe_instance.report(self.results[type])\n results = type_results.split(\"Rule Violated:\")\n\n for result in results:\n if \"Impact Level\" in result:\n final_result = result.strip()\n chunks = final_result.split(\"\\n\")\n\n html_text = \"\"\"\n <style>\n #demo table, #demo th, #demo td{\n border: 1px dotted black;\n border-collapse: collapse;\n table-layout: auto;\n }\n </style>\n <table id=\"demo\" style=\"width:100%%\">\n <tr>\n <th style=\"width:50%%\">Issue</th>\n <th style=\"width:5%%\">URL</th>\n <th style=\"width:7%%\">Impact</th>\n <th style=\"width:10%%\">Tags</th>\n </tr>\n <tr>\n <td>%s</td>\n <td style=\"text-align:center\"><a href=\"%s\">Link</a></td>\n <td style=\"text-align:center\">%s</td>\n <td style=\"text-align:center\">%s</td>\n </tr>\n </table>\n <table id=\"demodesc\" style=\"width:100%%\">\n <tr>\n <th style=\"text-align:left\">Element Affected</th>\n </tr>\n <tr>\n <td>%s</td>\n </tr>\n </table>\n \"\"\"%(str(chunks[0]), (chunks[1].split(\"URL: \"))[-1], (chunks[2].split(\"Impact Level: \"))[-1],\n (chunks[3].split(\"Tags: \"))[-1], str((final_result.split(\"\\n\\tElements Affected:\\n\\t\"))[-1]))\n logger.info(html_text, html=True)\n\n # for index in range(len(chunk_results)):\n # logger.info(chunk_results[index])", "def result(self, result, limit):\n\n # If score is empty, this a direct query\n score = result[\"score\"]\n score = score if score is not None else 1.0\n\n self.console.print(\n f\"[bright_green]Question (by {result['questionuser']}): {result['question']} [{score:4f}][/bright_green]\",\n highlight=False,\n )\n self.console.print(f\"Id: {result['id']}\", highlight=False)\n self.console.print(f\"Last Activity: {result['date']}\", highlight=False)\n self.console.print(f\"Tags: {result['tags']}\")\n self.console.print(f\"Answer (by {result['answeruser']}):\\n\", highlight=False)\n self.console.print(self.markdown(result[\"answer\"]))\n self.console.print(f\"\\nReference: {result['reference']}\")\n\n # Print results divider\n if limit > 1:\n self.console.rule()", "def get_exam_status(self, course):\n return {'completed': False, 'exist': False, 'progress': 53}", "def getPredictedResult(self):\n output = self.svclassifier.predict([self.inputData])\n return output[0]", "def vulnerability_assessment(self) -> pulumi.Output['outputs.VulnerabilityAssessmentNoteResponse']:\n return pulumi.get(self, \"vulnerability_assessment\")", "def test_print_results(self):\n calculated = super().predict_and_print()\n self.assertEqual(calculated, EXP_PRINT_OUTPUT_BASE.format(.18, .1, 0.186, self.test_model.model.train_time) +\n \"Max tree max_depth: 1\\n\"\n \"Number of n_estimators: 1\\n\"\n \"Impurity method: entropy\\n\")", "def evaluation( self ) :\n\n return( self.__evaluation )", "def evaluate(self):\n try:\n self._evaluate()\n except Exception as e:\n if str(e) == \"assignment destination is read-only\":\n log.exception(\n \"Encountered error during scenario evaluation. Be sure \"\n + \"that the classifier's predict() isn't directly modifying the \"\n + \"input variable itself, as this can cause unexpected behavior in ART.\"\n )\n else:\n log.exception(\"Encountered error during scenario evaluation.\")\n sys.exit(1)\n\n if self.results is None:\n log.warning(f\"{self._evaluate} did not set self.results to a dict\")\n\n self.save()", "def score(self) -> Tuple[bool, str, float]:\n\n num_miss = np.sum(self.algorithm_data[:,FieldRolls.StepResult] != self.algorithm_data[:,FieldRolls.ResultPresentation])\n num_miss_perc = num_miss * 100/self.algorithm_data.shape[0]\n return True, \"\", num_miss_perc", "def print_output(self):\n print(\"Reference score: \" + str(self.PotTax_reference.sum().TFI))\n print(\"Intervention score: \" + str(self.PotTax_intervention.sum().TFI))\n return", "def evaluation(self):\n return self._evaluation", "def print_outcome(self) -> None:\n pass", "def results():\n \n to_predict_list = request.form.to_dict() \n to_predict_list = list(to_predict_list.values()) \n to_predict_list = list(map(float, to_predict_list)) \n result = ValuePredictor(to_predict_list) \n if int(result)== 1: \n prediction ='Run Martha, or you\\'re gonna get the sugar.'\n else: \n prediction ='Go ahead and have another donut Martha, you\\'re all good.' \n return render_template(\"results.html\",\n year=datetime.now().year,\n prediction = prediction\n )", "def show_results(self):\n print(\"Survey results:\")\n for response in self.responses:\n print('- ' + response)", "def test_self_assessment(self):\r\n\r\n # Navigate to the self-assessment problem and submit an essay\r\n self.course_nav.go_to_sequential('Self-Assessed')\r\n self.submit_essay('self', 'Censorship in the Libraries')\r\n\r\n # Fill in the rubric and expect that we get feedback\r\n rubric = self.open_response.rubric\r\n\r\n self.assertEqual(rubric.categories, [\"Writing Applications\", \"Language Conventions\"])\r\n rubric.set_scores([0, 1])\r\n rubric.submit('self')\r\n\r\n self.assertEqual(rubric.feedback, ['incorrect', 'correct'])\r\n\r\n # Verify the progress page\r\n self.progress_page.visit()\r\n scores = self.progress_page.scores('Test Section', 'Test Subsection')\r\n\r\n # The first score is self-assessment, which we've answered, so it's 1/2\r\n # The other scores are AI- and peer-assessment, which we haven't answered so those are 0/2\r\n self.assertEqual(scores, [(1, 2), (0, 2), (0, 2)])", "def _send_lti2_outcome(self):\r\n payload = textwrap.dedent(\"\"\"\r\n {{\r\n \"@context\" : \"http://purl.imsglobal.org/ctx/lis/v2/Result\",\r\n \"@type\" : \"Result\",\r\n \"resultScore\" : {score},\r\n \"comment\" : \"This is awesome.\"\r\n }}\r\n \"\"\")\r\n data = payload.format(score=0.8)\r\n return self._send_lti2(data)", "def result(self) -> Item:\n return self._result", "def print_results(self) -> None:\n print(\"=\" * 70, file=sys.stderr)\n total = 0.0\n max_points = 0.0\n for problem in self.problems:\n total += problem.run_tests()\n max_points += problem.max_grade\n print(f\"Total Grade: {total}/{max_points}\", file=sys.stderr)", "def present_solved_equation(self, result):\n print(\"the result to the equation is:\", result)", "def display_results(self):\n print \"Resultats pour le fichier : \\n================================\"\n print \"Moyenne arithmetique : \", self.results['arithAvg']\n print \"Moyenne quadratique : \", self.results['quadAvg']\n print \"Moyenne geometrique : \", self.results['geoAvg']\n print \"Moyenne harmonique : \", self.results['harmAvg']\n print \"Ecart a la moyenne : \", self.results['std']\n print \"Valeure maximale : \", self.results['max']\n print \"Valeurs minimale : \", self.results['min']\n print \"Variance : \", self.results['var']\n print \"Moments d'ordre R (jusqu'a 4) : \", self.results['momentsR']\n print \"Moments centrés d'ordre R (jusqu'a 4) : \", self.results['centralMomentsR']\n print \"Dissymetrie : \", self.results['dissym']\n print \"Coefficient d'applatissement : \", self.results['flattening']\n print \"Ecart type : \", self.results['ecartType']", "def __repr__(self):\n return (f'rsatoolbox.inference.Result\\n'\n f'containing evaluations for {self.n_model} models\\n'\n f'evaluated using {self.cv_method} of {self.method}'\n )", "def get_ideal_result(self):\n sim = Aer.get_backend('qasm_simulator')\n self.stats.ideal_distribution = execute(self.compiled_circ, sim).result().get_counts()", "def get_result(wd):\n try:\n result = wd.find_element_by_id(\"js-score\").text\n return result\n except:\n return \"N/A Result\"", "def setCheckResult(self, rlt):\n total_count = TestScriptSymbolTable.get_value_from_sym_tab(\"total_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"total_count\", total_count, TestScriptSymbolTable.test_result_tab)\n \n #if rlt == 'PASS':\n if 'PASS' in rlt:\n pass_count = TestScriptSymbolTable.get_value_from_sym_tab(\"pass_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"pass_count\", pass_count, TestScriptSymbolTable.test_result_tab)\n else:\n fail_count = TestScriptSymbolTable.get_value_from_sym_tab(\"fail_count\", TestScriptSymbolTable.test_result_tab) + 1\n TestScriptSymbolTable.insert_sym_tab(\"fail_count\", fail_count, TestScriptSymbolTable.test_result_tab)", "def _process_results(self):\n self.portfolio.create_backtest_result_dataframe()\n stats = self._show_stats()\n return stats", "def test_get_results_simple(self):\n\t\ttest = sentiment.LibraryRun(self.text3, self.lib)\n\t\ttest.do_run()\n\t\tobj_ut = test.get_results()\n\t\tself.assertEqual(obj_ut, ['.text id\\t.text score\\tneg hits\\t\\\npos hits\\ttotal hits\\ttotal wordcount\\n', '100\\t-1\\t2\\t0\\t2\\t7\\n'])", "def get_test_results(self):\n element = self.find_element_by_id(self.results_id, wait=True)\n\n if element:\n return element.text\n else:\n return False", "def _get_problem_report_results_str(self):\n return 'curr_rew: %0.3f, best_rew: %0.3f'%(self.curr_reward, self.curr_best_reward)", "def val(self):\n return self.output", "def val(self):\n return self.output", "def returnData(self):\r\n return self.returnRes" ]
[ "0.7455872", "0.7048662", "0.687895", "0.68733793", "0.6868782", "0.6851356", "0.68279386", "0.6774118", "0.67248684", "0.67248684", "0.67248684", "0.65798163", "0.65798163", "0.6516882", "0.6478505", "0.6459931", "0.645169", "0.64418316", "0.64348304", "0.6432639", "0.639982", "0.6385924", "0.6374701", "0.6368318", "0.63612336", "0.63388896", "0.6336285", "0.6308378", "0.6273809", "0.619285", "0.61546695", "0.61486435", "0.6140451", "0.6140451", "0.61257154", "0.6117071", "0.6112252", "0.61075246", "0.6106663", "0.61027324", "0.6100377", "0.60927457", "0.60904044", "0.6089315", "0.6081166", "0.6078048", "0.60617936", "0.6061454", "0.6054585", "0.6054585", "0.60481507", "0.60448974", "0.6044641", "0.60236734", "0.6020184", "0.6013553", "0.60089904", "0.59969735", "0.5990609", "0.5970148", "0.5963881", "0.5962178", "0.59620976", "0.5957298", "0.59538335", "0.59504116", "0.5945629", "0.5922979", "0.59061986", "0.59046936", "0.58985126", "0.5895648", "0.58692396", "0.5868474", "0.58612394", "0.5856009", "0.5855832", "0.5853069", "0.5851778", "0.5851156", "0.5845176", "0.5840127", "0.5829681", "0.5828683", "0.58258647", "0.58162", "0.5812824", "0.5812594", "0.5805723", "0.5802141", "0.57928103", "0.57849693", "0.5783451", "0.57800597", "0.5772011", "0.57669103", "0.57668173", "0.57654524", "0.57654524", "0.57650846" ]
0.6113301
36
Test get_type_for_key_path with Simple Key Path
def test_get_type_for_key_path_simple_path(test_schema): assert get_type_for_key_path(test_schema, "Age") == "integer"
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_type_for_key_path_multi_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.Beneficiary.Name\")\n == \"string\"\n )", "def test_get_type_for_key_path_invalid_key_path(test_schema):\n assert get_type_for_key_path(test_schema, \"foo.bar\") == None", "def test_get_type_for_key_path_depth_one_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.OriginalHireDate\")\n == \"string\"\n )", "def GetKeyByPath(self, key_path):", "def _generic_test(self, pathstr, expected):\n self.assertEqual(self._get_pe_key(pathstr), expected)", "def key_type(self) -> global___Type:", "def type(path):", "def get(self, key, key_type=None):\n pass", "def GetSubkeyByPath(self, key_path):", "def test_getKey_tmpfile(self):\n filename = self.mktemp()\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def test_get_transaction_types_key(self):\n pass", "def load_key(self, type, keyid):\n pass", "def test_get_contact_person_types_key(self):\n pass", "def test_get_types(self):\n pass", "def load_key():", "def test_utils_get_dict_value_from_path_should_return_given_value(path, value):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) == value", "def test_getKey_nokey(self):\n filename = os.path.join(os.getcwd(), 'sekrit')\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def setKeyPath(*args, **kwargs)->List[AnyStr]:\n pass", "def _validate_key(sample, path):\n mapping_tmp = sample\n for key in path:\n try:\n mapping_tmp = mapping_tmp[key]\n except KeyError:\n return False\n except TypeError:\n return False\n return True", "def _load_key(client, entity_type, entity_id=None, parent_key=None):\n\n key = None\n if entity_id:\n key = client.key(entity_type, entity_id, parent=parent_key)\n else:\n # this will generate an ID\n key = client.key(entity_type)\n return key", "def _is_generic_key(key):\n for prefix in [\n \"graph_rewriter_config\",\n \"model\",\n \"train_input_config\",\n \"train_config\",\n \"eval_config\"]:\n if key.startswith(prefix + \".\"):\n return True\n return False", "def test_generate_key(self): \n k = Key().generate()\n self.assertRegex(k, \"[a-zA-Z0-9+\\/]+={0,2}\")", "def read_key(path_to: str) -> str:\n m_type, _ = guess_type(path_to)\n if m_type == types_map['.txt']:\n with open(path_to, 'r') as api_token_file:\n return api_token_file.read().strip()\n\n else:\n return path_to", "def key_type(self):\n raise exceptions.NotImplementedError()", "def testPath(self):\n self.cache._GetKeyPath.return_value = '/foo/bar'\n\n ref = cache.CacheReference(self.cache, 'key')\n self.assertEqual(ref.path, '/foo/bar')\n\n self.cache._GetKeyPath.assert_called_once_with('key')", "def __getitem__(self, key):\n path = self.path\n if self.path_is_string:\n path = [path]\n return path[key]", "def make_asset_key(self, asset_type, path):\r\n raise NotImplementedError()", "def readKey(self, keyPath):\n\t\ttry:", "def test_get_key(self):\n pairs = {'library': '~/home/documents/dms',\n 'key': 'value',\n }\n try:\n tempconfig = tempfile.NamedTemporaryFile(\n suffix=\".yaml\", delete=False)\n for key, value in pairs.items():\n tempconfig.write(\"{0}: {1}\\n\".format(\n key, value).encode('UTF-8'))\n tempconfig.close()\n config = easydms.config.Config(tempconfig.name)\n\n for key, value in pairs.items():\n self.assertEqual(config.getKey(key, \"Spam\"), value)\n for key, value in pairs.items():\n self.assertEqual(config.getRequiredKey(key), value)\n finally:\n os.remove(tempconfig.name)", "def getkey(attrstr, paths=None, prompt=True, promptpass=False):\n paths = paths or DEFAULT_PATHS\n for path in paths:\n filepath = os.path.expanduser(path)\n if not os.path.exists(filepath):\n continue\n with open(filepath, 'r') as handle:\n value = rget(json.load(handle), attrstr)\n if value is None:\n continue\n if isinstance(value, dict):\n raise Exception(f'Ambiguous key: {attrstr}')\n if isinstance(value, list):\n return value\n if not isinstance(value, str):\n return value\n if not value.startswith('b64:'):\n return value\n return b64decode(value[4:]).decode('utf8')\n promptfunc = getpass if promptpass else input\n if prompt:\n return promptfunc(f'Enter {attrstr}: ')\n pathstr = '\\n' + '\\n'.join(paths)\n raise Exception(f'Key not found: {attrstr}{pathstr}')", "def test_add_keys_multiple_times(self):\n path = _path.Path.from_str(\"RootOper.Foo(*)\")\n with self.assertRaisesRegex(\n ValueError, \"Path element already has key information\"):\n path(4)", "def create_key(\n self,\n path: Union[bytes, str],\n type_: Optional[Union[bytes, str]] = None, # TODO enum\n policy_path: Optional[Union[bytes, str]] = None,\n auth_value: Optional[Union[bytes, str]] = None,\n exists_ok: bool = False,\n ) -> bool:\n path = _to_bytes_or_null(path)\n type_ = _to_bytes_or_null(type_)\n policy_path = _to_bytes_or_null(policy_path)\n auth_value = _to_bytes_or_null(auth_value)\n ret = lib.Fapi_CreateKey(self._ctx, path, type_, policy_path, auth_value)\n _chkrc(\n ret, acceptable=lib.TSS2_FAPI_RC_PATH_ALREADY_EXISTS if exists_ok else None\n )\n return ret == lib.TPM2_RC_SUCCESS", "def _create_key(_type, name):\n return \"{}{}{}\".format(_type, DiagnosticManager._type_separator, name)", "def ReadKey(type, key):\n try:\n return {keyinfo.AES: AesKey.Read,\n keyinfo.HMAC_SHA1: HmacKey.Read,\n keyinfo.DSA_PRIV: DsaPrivateKey.Read,\n keyinfo.RSA_PRIV: RsaPrivateKey.Read,\n keyinfo.DSA_PUB: DsaPublicKey.Read,\n keyinfo.RSA_PUB: RsaPublicKey.Read}[type](key)\n except KeyError:\n raise errors.KeyczarError(\"Unsupported key type: %s\" % type)", "def testComparable(self):\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n data_stream=u'test', location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, data stream: test, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)", "def test_get(schema, schemas, key, expected_value):\n returned_artifacts = artifacts.model.get(schemas, schema)\n\n assert getattr(returned_artifacts, key) == expected_value", "def _check_key_type(cls, key: Any) -> K:\n if not isinstance(key, cls.keytype):\n raise KeyError(\n f\"{cls!r} accepts only keys of type {cls.keytype!r}, \"\n f\"got {type(key)!r}\"\n )\n return cast(K, key)", "def testTheType(self, theTestType):\n \n pass", "def test_traversal__path_type_view_name(path, resource_type, view_name):\n from pyramid.traversal import traverse\n root_resource = root_resource_factory()\n t = traverse(root_resource, path)\n assert isinstance(t['context'], resource_type)\n assert t['view_name'] == view_name", "def test_key_str(self):\n key = Key({\"warning\": False, \"inCar\": True})\n\n string = str(key)\n assert isinstance(string, str)\n assert string == \"{'warning': False, 'in_car': True}\"", "def test_valid_key(self):\n f = lws.valid_data_key\n assert f('string', int, r'string') is False\n assert f('string', str, r'test') is False\n assert f(123, int, '123') is False\n assert f(123.00, float, '123') is False\n assert f('123', str, r'[0-9]*') is True", "def test_string_key():\n\tbackup_and_restore(\n\t\tlambda context: put_keys(lib.SET, STRING_KEYS, \"foobar\", False),\n\t\tNone,\n\t\tlambda context: check_keys(lib.SET, STRING_KEYS, \"foobar\", False)\n\t)", "def _get_key(args):\n\n input_key = args.input_key\n key = None\n if input_key:\n from pathlib import Path\n key_file = Path(input_key)\n if key_file.is_file():\n key = load_key(key_file)\n\n if not key:\n key = key_handler(args)\n\n return key", "def test_get_key_digest_with_only_ns(self):\n with pytest.raises(TypeError) as typeError:\n self.as_connection.get_key_digest(\"test\")", "def get_type_from_path(path):\n return path.split('.')[-1]", "def _get_key(self, object_type, user_key = None):\n\t\tif not user_key and not self.object_type_keys.has_key(object_type):\n\t\t\traise ParserError(\"Unknown key for object type: %s\\n\" % object_type)\n\n\t\t## Use a default key\n\t\tif not user_key:\n\t\t\tuser_key = self.object_type_keys[object_type]\n\n\t\treturn user_key", "def path_type(mode: str, docstring: Optional[str] = None, **kwargs) -> type:\n Path._check_mode(mode)\n name = \"Path_\" + mode\n key_name = \"path \" + \"\".join(sorted(mode))\n\n skip_check = get_private_kwargs(kwargs, skip_check=False)\n if skip_check:\n from ._deprecated import path_skip_check_deprecation\n\n path_skip_check_deprecation()\n name += \"_skip_check\"\n key_name += \" skip_check\"\n\n register_key = (key_name, str)\n if register_key in registered_types:\n return registered_types[register_key]\n\n class PathType(Path):\n _expression = name\n _mode = mode\n _skip_check = skip_check\n _type = str\n\n def __init__(self, v, **k):\n super().__init__(v, mode=self._mode, skip_check=self._skip_check, **k)\n\n restricted_type = type(name, (PathType,), {\"__doc__\": docstring})\n add_type(restricted_type, register_key, type_check=_is_path_type)\n\n return restricted_type", "def test_key_use() -> None:\n # check key usage method\n # don't test if all keys are translated, crowdin will monitor it\n lib_folder = Path(__file__).parents[1] / \"sepal_ui\"\n\n assert \"test_key\" in ms.key_use(lib_folder, \"ms\")\n\n return", "def _validate_type(self, key, type_):\n if type_ is None:\n type_ = \"\"\n \n if not isinstance(type_, (str, unicode)):\n raise TypeError(\"FileLink.type should be a str or unicode, \"\n \"not %s\" % type_.__class__.__name__)\n \n return type_", "def get_address_key_file(addresses_path, address_type, address_or_key, name):\n if not address_type in ['payment', 'stake']:\n print('Unknown address type :', address_type)\n return None\n if address_or_key == 'address':\n ext = '.addr'\n elif address_or_key == 'signing_key':\n ext = '.skey'\n elif address_or_key == 'verification_key':\n ext = '.vkey'\n else:\n print('Unknown type :', address_or_key)\n return None\n\n addr_key_file = get_address_path(addresses_path, name)+address_type+name+ext\n return addr_key_file", "def filetype_of(path: Path) -> str:\n\n filetype = \"unsorted\"\n\n if path.suffix == \".json\":\n filetype = \"json\"\n\n elif path.suffix == \".txt\":\n if search(pattern=\"v[0-9][0-9]_[0-9]\", string=path.stem):\n filetype = \"onsets\"\n elif \"subject_info\" in path.stem:\n filetype = \"subject info\"\n\n elif path.suffix == \".nii\":\n if \"_t1_\" in path.stem:\n filetype = \"anat\"\n elif \"_lessvoids_\" in path.stem:\n filetype = \"func\"\n elif \"field_map\" in path.stem:\n filetype = \"fieldmap\"\n\n return filetype", "def test_get_key_digest_with_string_key(self):\n\n digest = self.as_connection.get_key_digest(\"test\", \"demo\",\n \"get_digest_key\")\n\n assert isinstance(digest, bytearray)", "def _check_key(self, key):\n raise NotImplementedError", "def _get_key(var_type, attr):\n if attr is None:\n return var_type\n return f'{var_type}{SEP}{attr}'", "def test_get_node_type_name(self):\n pass", "def test_read_json(self, magic_0, magic_1):\n expected = {\n 'key_1': [1, 2, 3, 4, 5],\n 'key_2': ['a', 'b', 'c', 'd', 'e']\n }\n result = helpers.read_json(r\"path\")\n self.assertEqual(expected, result)", "def _get_raw_key(args, key_field_name):\n flag_key = getattr(args, key_field_name, None)\n if flag_key is not None:\n return flag_key\n return _read_key_store_file().get(key_field_name)", "def test_aws_service_api_keypair_get(self):\n pass", "def key_to_obj_type(self, key):\n\t\tif key.endswith('ids'):\n\t\t\tkey = key[0:-1]\n\t\tif key == 'order_id' or key == 'user_id':\n\t\t\treturn key[0:-2]\n\t\telif key == 'partner_id' or key == 'demand_partner_id':\n\t\t\treturn 'account'\n\t\telif key == 'openx_buyer_id':\n\t\t\treturn 'buyer'\n\t\telse:\n\t\t\treturn key[0:-3]", "def _get_pe_key(self, pathstr):\n path = _path.Path.from_str(pathstr)\n return path.elems()[-1].key", "def _get_raw_key(self, key_id):", "def test_search_key() -> None:\n # assert that having a wrong key at root level\n # in the json will raise an error\n key = \"toto\"\n d = {\"toto\": {\"a\": \"b\"}, \"c\": \"d\"}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n # Search when the key is in a deeper nested level\n key = \"nested_key\"\n d = {\"en\": {\"level1\": {\"level2\": {\"nested_key\": \"value\"}}}}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n return", "def test_two_keys():\n test = [{'key1': {'key2': 'val1'}}, ['key1', 'key2']]\n assert fetch_data_by_keys(*test).unwrap() == 'val1'", "def test_decorator_keyobj(testchannel, keycls):\n keyobj = keycls()\n\n @testchannel(keyobj=keyobj)\n async def one():\n \"\"\"one\"\"\"\n\n getfunc = (keyobj.get if isinstance(keyobj, MutableMapping)\n else partial(getattr, keyobj))\n assert getfunc('key', None) == 0", "def test_key_type(self):\n self.failureResultOf(self.producer.send_messages(\"topic\", key=\"key\", msgs=[b\"msg\"]), TypeError)", "def test_get_path_valid(self, audio_collection, key_idx, file_idx):\n # TODO: the hard-coded parametrization is gross.\n keys = list(audio_collection.audio_stores.keys())\n keys.sort()\n store_name = keys[key_idx]\n assert os.path.exists(audio_collection.get_path(\n store_name, file_idx))", "def _get_key(key_or_id, key_cls):\n return (\n key_cls.from_string(key_or_id)\n if isinstance(key_or_id, str)\n else key_or_id\n )", "def test_get_key_digest_with_only_ns_and_set(self):\n with pytest.raises(TypeError) as typeError:\n self.as_connection.get_key_digest(\"test\", \"set\")", "def test_keys(app, client):\n response = client.get(\"/keys\")\n assert response.status_code == 200\n assert len(response.json[\"keys\"]) > 0", "def __getitem__(self, key):\n return self.types[key]", "def __guess_key_type(self):\n if self.__key:\n if isinstance(self.__key, types.IntType) or \\\n isinstance(self.__key, types.LongType) or \\\n isinstance(self.__key, types.FloatType):\n return NUMBER_KEY_TYPE\n elif isinstance(self.__key, types.StringTypes):\n return STRING_KEY_TYPE\n\n return UNKNOWN_KEY_TYPE", "def key_from_path(db_table, value):\r\n if isinstance(value, (int, long)):\r\n ValidateInteger(value, 'id')\r\n return Key.from_path(db_table, value)", "def _get_key_path(self, key):\n if not isinstance(key, str):\n raise TypeError(\"pickle keys must be strings\")\n path = abspath(join(self._path, key + \".pkl\"))\n if not path.startswith(abspath(self._path)):\n raise OSError(joins(\"invalid path to pickle file:\", path))\n return path", "def test_pathlib_obj(self):\n \"\"\"\n We do this because pygame functions internally use pg_EncodeString\n to decode the filenames passed to them. So if we test that here, we\n can safely assume that all those functions do not have any issues\n with pathlib objects\n \"\"\"\n encoded = encode_string(pathlib.PurePath(\"foo\"), \"utf-8\")\n self.assertEqual(encoded, b\"foo\")\n\n encoded = encode_string(pathlib.Path(\"baz\"))\n self.assertEqual(encoded, b\"baz\")", "def test_create_api_key(self):\n pass", "def test_create_node_from_key(self):\n created_node = self.test_graph.create_node_from_key(\n 'package.class$nested')\n self.assertEqual(created_node.package, 'package')\n self.assertEqual(created_node.class_name, 'class')\n self.assertEqual(created_node.name, 'package.class')", "def test_string_key_stored():\n\tbackup_and_restore(\n\t\tlambda context: put_keys(lib.SET, STRING_KEYS, \"foobar\", True),\n\t\tNone,\n\t\tlambda context: check_keys(lib.SET, STRING_KEYS, \"foobar\", True)\n\t)", "def create_key ():", "def PathType(path_str):\n orig = path_str\n path_str = os.path.expanduser(path_str) # Expand user path if necessary\n path_str = os.path.abspath(path_str)\n\n if os.path.exists(path_str):\n return path_str\n else:\n raise argparse.ArgumentError(message='\"{}\" is not a valid path'.format(orig))", "def path_type(cls, path):\n if os.path.isdir(path):\n return 'package'\n else:\n return 'object'", "def first_part_is(self, key):\n if self.path_is_string:\n return self.path.startswith(str(key) + '.')\n if not self.path:\n return not bool(key)\n if self.path_type is list:\n return self.path[0] == key\n if self.path_type is Path:\n return self.path.first_part_is(key)\n return self.joined().startswith(str(key) + '.')", "def drive_type():", "def test_split_nested_class_from_key_no_nested(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertIsNone(part2)", "def test_getKey_keyexists(self):\n filename = self.mktemp()\n with open(filename, 'wb') as fh:\n fh.write(SEKRIT_KEY)\n fh.flush()\n\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))\n self.assertEqual(SEKRIT_KEY, key,\n \"\"\"The example key and the one read from file differ!\n key (in hex): %s\n SEKRIT_KEY (in hex): %s\"\"\"\n % (key.encode('hex'), SEKRIT_KEY.encode('hex')))", "def key_type(self):\n return self._key_type", "def test_get_path_returns_none_for_bad_key(\n self, audio_store_and_expected_files, key):\n audio_store = audio_store_and_expected_files[0]\n assert audio_store.get_path(key) is None", "def _get_by_type(self, key, operation, create, type_, default, return_default=True, decode=False):\n key = self._encode(key)\n if self.type(key) in [type_, b'none']:\n if create:\n val = self.redis.setdefault(key, default)\n if decode:\n val = self._decode(val)\n return val\n else:\n val = self.redis.get(key, default if return_default else None)\n if decode:\n val = self._decode(val)\n return val\n\n raise TypeError(\"{} requires a {}\".format(operation, type_))", "def _GetKeyString(self):", "def _GetKeyString(self):", "def testStorePath(self):\n store_path = dicom_path.FromString(tdpu.STORE_PATH_STR)\n self._AssertStoreAttributes(store_path)\n self.assertIsNone(store_path.study_uid)\n self.assertIsNone(store_path.series_uid)\n self.assertIsNone(store_path.instance_uid)\n self.assertEqual(store_path.type, dicom_path.Type.STORE)\n self.assertEqual(store_path.dicomweb_path_str, tdpu.DICOMWEB_PATH_STR)\n self.assertEqual(str(store_path), tdpu.STORE_PATH_STR)\n self.assertEqual(str(store_path.GetStorePath()), tdpu.STORE_PATH_STR)", "def _split_key(cls, logical_key):\n if isinstance(logical_key, str):\n path = logical_key.split('/')\n elif isinstance(logical_key, (tuple, list)):\n path = logical_key\n else:\n raise TypeError('Invalid logical_key: %r' % logical_key)\n return path", "def lookup(self, key):", "def kind(self):\n return self.__key.kind()", "def test_anchortype_nokey(testchannel):\n key = testchannel.add(list)\n for i in range(5):\n testchannel.add(list)\n testchannel.remove(key)\n\n with pytest.raises(KeyError) as err:\n testchannel.anchortype(key)\n\n assert err.value.args == (key, )", "def get_keypair_keypath ( aws_account_type ) :\n return '/keypairs/' + aws_account_type + '/'", "def test_key_kind(self):\r\n parent = ParentKind.objects.create(pk=1)\r\n child = ChildKind.objects.create(\r\n pk=2, parent=parent, parents=[parent.pk])\r\n self.assertEqual(child.parent.pk, parent.pk)\r\n self.assertEqual(child.parents[0], parent.pk)\r\n\r\n from google.appengine.api.datastore import Get\r\n from google.appengine.api.datastore_types import Key\r\n parent_key = Key.from_path(parent._meta.db_table, 1)\r\n child_key = Key.from_path(child._meta.db_table, 2)\r\n parent_entity = Get(parent_key)\r\n child_entity = Get(child_key)\r\n parent_column = child._meta.get_field('parent').column\r\n parents_column = child._meta.get_field('parents').column\r\n self.assertEqual(child_entity[parent_column], parent_key)\r\n self.assertEqual(child_entity[parents_column][0], parent_key)", "def get_prop_type(value, key=None):\n if isinstance(key, str):\n # Encode the key as utf-8\n key = key.encode('utf-8', errors='replace')\n\n # Deal with the value\n if isinstance(value, bool):\n tname = 'bool'\n\n elif isinstance(value, int):\n tname = 'float'\n value = float(value)\n\n elif isinstance(value, float):\n tname = 'float'\n\n elif isinstance(value, str):\n tname = 'string'\n value = value.encode('utf-8', errors='replace')\n\n elif isinstance(value, dict):\n tname = 'object'\n\n else:\n tname = 'string'\n value = str(value)\n \n #If key is a byte value, decode it to string\n try:\n key = key.decode('utf-8')\n except AttributeError:\n pass\n\n return tname, value, key", "def test_key(score1, score2, measure=0, part=0):\n\n\tdiff = ScoreDiff(score1, score2, path)\n return diff.have_same_key_signature(measure, part)", "def test_fpath():\n\n assert fpath(None, 'data.json') == 'data.json'\n assert fpath('/path/', 'data.json') == '/path/data.json'\n assert fpath(Path('/path/'), 'data.json') == '/path/data.json'", "def test_map_missing_key_encountered():\n with pytest.raises(KeyError):\n Map().read_key(10, b\"\")" ]
[ "0.7844626", "0.75669205", "0.74641645", "0.7052028", "0.65783304", "0.65595055", "0.65575176", "0.62750286", "0.59552884", "0.59359103", "0.5933874", "0.59200144", "0.58311874", "0.5820171", "0.5779001", "0.5731968", "0.5724418", "0.5718408", "0.5717397", "0.5695019", "0.5671227", "0.56208533", "0.56131315", "0.56086844", "0.5582928", "0.5576313", "0.55444163", "0.55284685", "0.55279166", "0.5527288", "0.5517636", "0.5506267", "0.55031824", "0.54286253", "0.54270625", "0.54123306", "0.5410068", "0.5402076", "0.5390512", "0.5384795", "0.5375642", "0.5371566", "0.53462195", "0.5341597", "0.53208506", "0.5319843", "0.5307899", "0.53065157", "0.529529", "0.5291112", "0.528146", "0.52714694", "0.5267981", "0.52607596", "0.52588487", "0.52584815", "0.5257979", "0.5257445", "0.5254677", "0.5249839", "0.524678", "0.5240708", "0.52357006", "0.5233072", "0.52250975", "0.522381", "0.52227575", "0.5212757", "0.52016836", "0.5188688", "0.5188286", "0.51856583", "0.5184611", "0.5179906", "0.51788706", "0.51634014", "0.51585644", "0.51514363", "0.514859", "0.5141749", "0.514064", "0.5132606", "0.5128319", "0.51273876", "0.5121558", "0.5121373", "0.5116623", "0.5110159", "0.5110159", "0.5099787", "0.5096068", "0.5096044", "0.5093612", "0.5089727", "0.5087363", "0.50837904", "0.507529", "0.50723505", "0.5072094", "0.507047" ]
0.84244055
0
Test get_type_for_key_path with key path of one level deep
def test_get_type_for_key_path_depth_one_level(test_schema): assert ( get_type_for_key_path(test_schema, "EmploymentInformation.OriginalHireDate") == "string" )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_type_for_key_path_multi_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.Beneficiary.Name\")\n == \"string\"\n )", "def test_get_type_for_key_path_simple_path(test_schema):\n assert get_type_for_key_path(test_schema, \"Age\") == \"integer\"", "def test_get_type_for_key_path_invalid_key_path(test_schema):\n assert get_type_for_key_path(test_schema, \"foo.bar\") == None", "def GetKeyByPath(self, key_path):", "def type(path):", "def GetSubkeyByPath(self, key_path):", "def test_search_key() -> None:\n # assert that having a wrong key at root level\n # in the json will raise an error\n key = \"toto\"\n d = {\"toto\": {\"a\": \"b\"}, \"c\": \"d\"}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n # Search when the key is in a deeper nested level\n key = \"nested_key\"\n d = {\"en\": {\"level1\": {\"level2\": {\"nested_key\": \"value\"}}}}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n return", "def test_utils_get_dict_value_from_path_should_return_given_value(path, value):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) == value", "def _create_path(root, dict_type, path):\n for sub_path in path:\n if not isinstance(root.get(sub_path, None), dict):\n root[sub_path] = dict_type()\n\n root = root[sub_path]\n\n return root", "def test_split_nested_class_from_key_no_nested(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertIsNone(part2)", "def test_split_nested_class_from_key(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$nested')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, 'nested')", "def __getitem__(self, key):\n path = self.path\n if self.path_is_string:\n path = [path]\n return path[key]", "def _generic_test(self, pathstr, expected):\n self.assertEqual(self._get_pe_key(pathstr), expected)", "def _validate_key(sample, path):\n mapping_tmp = sample\n for key in path:\n try:\n mapping_tmp = mapping_tmp[key]\n except KeyError:\n return False\n except TypeError:\n return False\n return True", "def test_type_mapping(registry, item_type):\n with mappings_use_nested(False):\n mapping = type_mapping(registry[TYPES], item_type)\n assert mapping\n assert 'properties' in mapping\n if item_type == 'TestingLinkTargetElasticSearch':\n assert mapping['properties']['reverse_es'].get('type', 'object') != 'nested' # should not occur here\n\n # check calculated properties on objects/arrays of objects are mapped correctly\n if item_type == 'TestingCalculatedProperties':\n assert mapping['properties']['nested']['properties']['key']['type'] == 'text'\n assert mapping['properties']['nested']['properties']['value']['type'] == 'text'\n assert mapping['properties']['nested']['properties']['keyvalue']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['key']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['value']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['keyvalue']['type'] == 'text'", "def path_lookup(data_obj, xj_path, create_dict_path=False):\n\n if not xj_path or xj_path == '.':\n return data_obj, True\n\n res = list(split(xj_path, '.', maxsplit=1))\n top_key = res[0]\n leftover = res[1] if len(res) > 1 else None\n if top_key == '*':\n return _full_sub_array(data_obj, leftover, create_dict_path)\n elif top_key.startswith('@'):\n return _single_array_element(data_obj, leftover, top_key,\n create_dict_path)\n else:\n val_type, top_key = _clean_key_type(top_key)\n top_key = unescape(top_key)\n if top_key in data_obj:\n value = data_obj[top_key]\n if val_type is not None and not isinstance(value, val_type):\n raise XJPathError(\n 'Key %s expects type \"%s\", but found value type is \"%s\"' %\n (top_key, val_type.__name__, type(value).__name__))\n if leftover:\n return path_lookup(value, leftover, create_dict_path)\n else:\n return value, True\n else:\n if val_type is not None:\n if not isinstance(data_obj, dict):\n raise XJPathError('Accessed object must be a dict type '\n 'for the key: \"%s\"' % top_key)\n if create_dict_path:\n data_obj[top_key] = val_type()\n else:\n return None, False\n if leftover:\n return path_lookup(data_obj[top_key], leftover,\n create_dict_path)\n else:\n return data_obj[top_key], True\n return None, False", "def test_type_mapping_nested(registry):\n with mappings_use_nested(True):\n mapping = type_mapping(registry[TYPES], 'TestingLinkTargetElasticSearch')\n assert mapping\n assert 'properties' in mapping\n # if type is defined on this field, it should beg object, NOT nested since it is not enabled on this field\n assert mapping['properties']['reverse_es'].get('type', 'object') == 'object'", "def get_key_recursive(key, config):\n if not isinstance(key, list):\n key = key.split(\"/\") # subdict indexing split using slash\n assert key[0] in config, f\"missing key '{key[0]}' in metadata dictionary: {config}\"\n val = config[key[0]]\n if isinstance(val, (dict, collections.OrderedDict)):\n assert len(key) > 1, \"missing keys to index metadata subdictionaries\"\n return get_key_recursive(key[1:], val)\n return int(val)", "def key_type(self) -> global___Type:", "def _is_generic_key(key):\n for prefix in [\n \"graph_rewriter_config\",\n \"model\",\n \"train_input_config\",\n \"train_config\",\n \"eval_config\"]:\n if key.startswith(prefix + \".\"):\n return True\n return False", "def test_set_with_deep_key_path_with_string():\n deep_key_path = 'deep.key.path'\n test_value = 'deep key path value'\n\n config.set(deep_key_path, test_value)\n assert isinstance(config.get('deep'), dict)\n assert config.get(deep_key_path) == test_value", "def test_traversal__path_type_view_name(path, resource_type, view_name):\n from pyramid.traversal import traverse\n root_resource = root_resource_factory()\n t = traverse(root_resource, path)\n assert isinstance(t['context'], resource_type)\n assert t['view_name'] == view_name", "def test_create_node_from_key(self):\n created_node = self.test_graph.create_node_from_key(\n 'package.class$nested')\n self.assertEqual(created_node.package, 'package')\n self.assertEqual(created_node.class_name, 'class')\n self.assertEqual(created_node.name, 'package.class')", "def get_by_dot_path(dictionary: Dict, key_path: str) -> Any:\n return get_by_list_of_keys(dictionary, key_path.split(\".\"))", "def _get_by_path(tree, keys):\n return reduce(getitem, keys, tree)", "def _get_by_path(tree, keys):\n return reduce(getitem, keys, tree)", "def get_data(self,key=''):\n path = key.split('.')\n itm = self._root \n for ik,k in enumerate(path):\n child_found = False\n try: \n itm = itm[k]\n child_found = True\n except:\n try: \n itm = itm[int(k)]\n child_found = True\n except:\n longer_key = k\n for kk in path[ik+1:]:\n longer_key += '.'\n try: \n itm = itm[longer_key]\n child_found = True\n except: \n pass\n longer_key += kk\n try: \n itm = itm[longer_key]\n child_found = True\n except: \n pass\n if not child_found:\n raise KeyError(key)\n return itm", "def pathlookup(obj_or_path_tuple, depth=None, include_origin=True):", "def test_split_nested_class_from_key_numeric(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$1')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, '1')", "def test_two_keys():\n test = [{'key1': {'key2': 'val1'}}, ['key1', 'key2']]\n assert fetch_data_by_keys(*test).unwrap() == 'val1'", "def test_add_keys_multiple_times(self):\n path = _path.Path.from_str(\"RootOper.Foo(*)\")\n with self.assertRaisesRegex(\n ValueError, \"Path element already has key information\"):\n path(4)", "def get(self, key, key_type=None):\n pass", "def test_utils_set_dict_value_from_path_updating_fields():\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n ralph_utils.set_dict_value_from_path(dictionary, [\"foo\", \"bar\"], \"baz\")\n assert dictionary == {\"foo\": {\"bar\": \"baz\"}}", "def test_access_nested_map_exception(self, nested_map, path):\n with self.assertRaises(KeyError) as error:\n access_nested_map(nested_map, path)\n self.assertEqual(error.exception.args[0], path[-1])", "def test_access_nested_map(self, nested_map, path, result):\n self.assertEqual(access_nested_map(nested_map, path), result)", "def get_type_from_path(path):\n return path.split('.')[-1]", "def strict_path_lookup(data_obj, xj_path, force_type=None):\n\n value, exists = path_lookup(data_obj, xj_path)\n if exists:\n if force_type is not None:\n if not isinstance(value, force_type):\n raise XJPathError('Found value is a wrong type',\n (xj_path, force_type))\n return value\n else:\n raise XJPathError('Path does not exist', (xj_path,))", "def test_set_with_deep_key_path_with_list():\n deep_key_path = ('second', 'deep', 'key', 'path')\n test_value = 'second deep key path value'\n\n config.set(deep_key_path, test_value)\n assert isinstance(config.get('second'), dict)\n assert config.get(deep_key_path) == test_value", "def test_utils_set_dict_value_from_path_creating_new_fields():\n dictionary = {}\n ralph_utils.set_dict_value_from_path(dictionary, [\"foo\", \"bar\"], \"baz\")\n assert dictionary == {\"foo\": {\"bar\": \"baz\"}}", "def get_path(self, key):\n return get_path(self, key)", "def _verify_key_exists(self, key, stack_path=[]):\r\n error_msg = (\r\n \"Could not find the {key_type} key '{key}' in: {stack_path}. \"\r\n \"Found {keys_found} instead.\"\r\n )\r\n try:\r\n dk = stack_path[0]\r\n fk = stack_path[1]\r\n xk = stack_path[2]\r\n yk = stack_path[3]\r\n vk = stack_path[4]\r\n except:\r\n pass\r\n try:\r\n if len(stack_path) == 0:\r\n if key not in self:\r\n key_type, keys_found = 'data', self.keys()\r\n stack_path = 'stack'\r\n raise ValueError\r\n elif len(stack_path) == 1:\r\n if key not in self[dk]:\r\n key_type, keys_found = 'filter', self[dk].keys()\r\n stack_path = 'stack[{dk}]'.format(\r\n dk=dk)\r\n raise ValueError\r\n elif len(stack_path) == 2:\r\n if key not in self[dk][fk]:\r\n key_type, keys_found = 'x', self[dk][fk].keys()\r\n stack_path = 'stack[{dk}][{fk}]'.format(\r\n dk=dk, fk=fk)\r\n raise ValueError\r\n elif len(stack_path) == 3:\r\n if key not in self[dk][fk][xk]:\r\n key_type, keys_found = 'y', self[dk][fk][xk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}]'.format(\r\n dk=dk, fk=fk, xk=xk)\r\n raise ValueError\r\n elif len(stack_path) == 4:\r\n if key not in self[dk][fk][xk][yk]:\r\n key_type, keys_found = 'view', self[dk][fk][xk][yk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}][{yk}]'.format(\r\n dk=dk, fk=fk, xk=xk, yk=yk)\r\n raise ValueError\r\n except ValueError:\r\n print error_msg.format(\r\n key_type=key_type,\r\n key=key,\r\n stack_path=stack_path,\r\n keys_found=keys_found\r\n )", "def _s_first_magic(scope, key, _t):\n err = None\n try:\n cur = scope[key]\n except KeyError as e:\n err = PathAccessError(e, Path(_t), 0) # always only one level depth, hence 0\n if err:\n raise err\n return cur", "def _load_key(client, entity_type, entity_id=None, parent_key=None):\n\n key = None\n if entity_id:\n key = client.key(entity_type, entity_id, parent=parent_key)\n else:\n # this will generate an ID\n key = client.key(entity_type)\n return key", "def testPath(self):\n self.cache._GetKeyPath.return_value = '/foo/bar'\n\n ref = cache.CacheReference(self.cache, 'key')\n self.assertEqual(ref.path, '/foo/bar')\n\n self.cache._GetKeyPath.assert_called_once_with('key')", "def get_by_path(data: Dict[str, T], path: Sequence[str]) -> T:\n return reduce(operator.getitem, path, data)", "def check_and_resolve_path(key, parameter):\n if 'paths' in key:\n return [resolve_relative_path(p) for p in parameter]\n if 'path' in key:\n return resolve_relative_path(parameter)\n return parameter", "def test_get_transaction_types_key(self):\n pass", "def access_path(data: dict or any, path: list[str]) -> any:\n if path:\n first = path[0]\n rest = path[1:]\n return access_path(data[first], rest)\n return data", "def path_type(cls, path):\n if os.path.isdir(path):\n return 'package'\n else:\n return 'object'", "def test_get_contact_person_types_key(self):\n pass", "def test_get_parent_type_name(self):\n pass", "def _replace_path_with_type_symbol(cost_trees):\n cost_trees_ = {}\n tmp_dict = {}\n for k, v in cost_trees.items():\n if k == 'subcosts':\n for kk, vv in v.items():\n type_symbol = vv.pop('_type_symbol')\n d = {type_symbol: vv}\n if tmp_dict.get(k) is not None:\n tmp_dict[k].update(d)\n else:\n tmp_dict[k] = d\n elif type(v) == dict:\n for kk, vv in v.items():\n type_symbol = vv.pop('_type_symbol')\n d = {type_symbol: _replace_path_with_type_symbol(vv)}\n if cost_trees_.get(k) is not None:\n cost_trees_[k].update(d)\n else:\n cost_trees_[k] = d\n else:\n tmp_dict[k] = v\n cost_trees_.update(tmp_dict)\n return cost_trees_", "def get_safe(dict_instance, keypath, default=None):\n try:\n obj = dict_instance\n keylist = keypath if type(keypath) is list else keypath.split('.')\n for key in keylist:\n obj = obj[key]\n return obj\n except Exception, ex:\n return default", "def read_value(\n key_path: str,\n data: dict,\n data_type: type,\n mandatory=True\n) -> any:\n\n # build the path. we expect a ``key_path`` that looks like this:\n # \"key1.key2.key3\" -> [\"key1\", \"key2\", \"key3\"]\n segments = key_path.split(\".\")\n\n # segments should always have at least one element that exists in the\n # dictionary that is provided via ``data``.\n if data is None or len(segments) == 0 or segments[0] not in data:\n if mandatory:\n raise ValueError(f\"provided key {key_path} is invalid for {data}\")\n\n return None\n\n # handle the current key. this could be any key in the hierarchy\n key = segments[0]\n value = data[key]\n\n # first we need to check for it to be not None if it is a mandatory value.\n # it is ok to return None if the value is not mandatory\n if value is None:\n if mandatory:\n raise ValueError(f\"required property {key} was not set\")\n\n return None\n\n # if there are more children, we need to return the contents of these\n # instead of the current value\n if len(segments) > 1:\n\n child_key = \".\".join(segments[1:])\n\n # handle lists separately\n if isinstance(value, list):\n return [read_value(child_key, i, data_type, mandatory)\n for i in value]\n\n # single items we can just return\n return read_value(child_key, value, data_type, mandatory)\n\n # this is the last element in the hierarchy and we need to convert it to\n # the expected data_type. Handle list separately\n if isinstance(value, list):\n return [__convert_value(key, i, data_type) for i in value]\n\n return __convert_value(key, value, data_type)", "def first_part_is(self, key):\n if self.path_is_string:\n return self.path.startswith(str(key) + '.')\n if not self.path:\n return not bool(key)\n if self.path_type is list:\n return self.path[0] == key\n if self.path_type is Path:\n return self.path.first_part_is(key)\n return self.joined().startswith(str(key) + '.')", "def json_full_path(base_path, key):\n if base_path is None or base_path == \"\":\n return key\n else:\n return f'{base_path}.{key}'", "def _get(self, key):\n current_storage_dict = self._storage\n sub_keys = key.split('.')\n i = 1\n sub_keys_count = len(sub_keys)\n for sub_key in sub_keys:\n if i < sub_keys_count:\n if sub_key in current_storage_dict:\n current_storage_dict = current_storage_dict[sub_key]\n else:\n return\n\n else:\n if sub_key in current_storage_dict:\n return current_storage_dict[sub_key]\n else:\n return\n\n i += 1", "def test_get_types(self):\n pass", "def _split_key(cls, logical_key):\n if isinstance(logical_key, str):\n path = logical_key.split('/')\n elif isinstance(logical_key, (tuple, list)):\n path = logical_key\n else:\n raise TypeError('Invalid logical_key: %r' % logical_key)\n return path", "def test_read_json(self, magic_0, magic_1):\n expected = {\n 'key_1': [1, 2, 3, 4, 5],\n 'key_2': ['a', 'b', 'c', 'd', 'e']\n }\n result = helpers.read_json(r\"path\")\n self.assertEqual(expected, result)", "def test_lookup_from(self):\n for metaprefix, key, normalize, expected in [\n (\"obofoundry\", \"GO\", False, \"go\"),\n (\"obofoundry\", \"go\", False, None),\n (\"obofoundry\", \"go\", True, \"go\"),\n ]:\n with self.subTest(meteprefix=metaprefix, key=key, norm=normalize):\n self.assertEqual(\n expected, self.manager.lookup_from(metaprefix, key, normalize=normalize)\n )", "def nested_get(\n d: t.Dict, *path: t.Tuple[str, str], raise_on_missing: bool = True\n) -> t.Optional[t.Any]:\n for name, key in path:\n d = d.get(key) # type: ignore\n if d is None:\n if raise_on_missing:\n name = \"table\" if name == \"this\" else name\n raise ValueError(f\"Unknown {name}: {key}\")\n return None\n\n return d", "def setKeyPath(*args, **kwargs)->List[AnyStr]:\n pass", "def path_in_dictionary(self, dictionary, path):\n if path:\n key = path.split('.')[0]\n if key in dictionary and dictionary[key]:\n key_exists = self.path_in_dictionary(dictionary[key], '.'.join(path.split('.')[1:]))\n else:\n key_exists = False\n else:\n key_exists = True\n return key_exists", "def load_key(self, type, keyid):\n pass", "def traverse(path, default=_RAISE_KEYERROR):", "def test_get_node_type_name(self):\n pass", "def go_recursive_on_type_level_atomic(level, n1, n2, level_type, key):\n val = None\n if key in level:\n next_level = level[key]\n if level_type == 'node':\n gen_function_1 = n1['graph'][father(n1)['id']][n1['id']]['generating_function']\n gen_function_2 = n2['graph'][father(n2)['id']][n2['id']]['generating_function']\n next_key = (gen_function_1, gen_function_2)\n next_level_type = 'edge'\n next_n1 = father(n1)\n next_n2 = father(n2)\n else:\n next_key = (n1['type'], n2['type'])\n next_level_type = 'node'\n next_n1 = n1\n next_n2 = n2\n\n val = go_recursive_on_type_level_atomic(next_level, next_n1, next_n2, next_level_type, next_key)\n\n if val is None:\n try:\n val = level['text']\n except KeyError:\n try:\n val = level['all']['text']\n except KeyError:\n pass\n\n return val", "def extract_backing_type(value: dict) -> str:\n return next(iter(value.keys()))", "def getKeyPath(self, keyPath):\n parent = self\n parts = keyPath.split(\".\")\n for part in parts[:-1]:\n child = parent.get(part, None)\n if child is None:\n return None\n parent = child\n return parent.get(parts[-1], None)", "def test_utils_get_dict_value_from_path_should_return_none_when_value_does_not_exists(\n path,\n):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) is None", "def testComparable(self):\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n data_stream=u'test', location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, data stream: test, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)", "def get_by_path(root, path):\n \n sub_data = root\n for key in path:\n sub_data = sub_data[key]\n \n return sub_data", "def get_deep(tree, path):\n for key in path[:-1]:\n tree = tree.get(key, {})\n return tree.get(path[-1])", "def lookup(self, key):", "def test_key_kind(self):\r\n parent = ParentKind.objects.create(pk=1)\r\n child = ChildKind.objects.create(\r\n pk=2, parent=parent, parents=[parent.pk])\r\n self.assertEqual(child.parent.pk, parent.pk)\r\n self.assertEqual(child.parents[0], parent.pk)\r\n\r\n from google.appengine.api.datastore import Get\r\n from google.appengine.api.datastore_types import Key\r\n parent_key = Key.from_path(parent._meta.db_table, 1)\r\n child_key = Key.from_path(child._meta.db_table, 2)\r\n parent_entity = Get(parent_key)\r\n child_entity = Get(child_key)\r\n parent_column = child._meta.get_field('parent').column\r\n parents_column = child._meta.get_field('parents').column\r\n self.assertEqual(child_entity[parent_column], parent_key)\r\n self.assertEqual(child_entity[parents_column][0], parent_key)", "def test_named_link_hierarchy_data_type_inc(self):\n link_spec = LinkSpec(doc='This Bar', target_type='Bar', name='bar_link')\n parent_spec = GroupSpec(\n doc='Something to hold a Bar',\n name='bar_bucket',\n links=[link_spec]\n )\n bar_builder = GroupBuilder(\n name='my_bar',\n attributes={\n 'data_type': 'SubBar',\n 'namespace': CORE_NAMESPACE,\n 'object_id': -1\n }\n )\n sub_builder = LinkBuilder(builder=bar_builder, name='bar_link')\n GroupBuilder(name='bar_bucket', links={'my_bar': sub_builder})\n result = self.type_map.get_subspec(parent_spec, sub_builder)\n self.assertIs(result, link_spec)", "def _object_requires_hdf5_recurse(curr: NDict, str_base: str = \"\") -> List[str]:\n keys = curr.keypaths()\n ans = []\n for k in keys:\n data = curr[k]\n if _object_requires_hdf5_single(data):\n ans.append(k)\n return ans", "def dict_find_name(some_dict: Dict[str, Referent], path: List[str]) -> Result:\n if path:\n head, *tail = path\n try:\n return NameContainer.dict_find_name(\n cast(Dict[str, Referent], some_dict[head]),\n tail)\n except KeyError:\n NameContainer.logger.debug(f\"{head!r} not found in {some_dict.keys()}\")\n raise NameContainer.NotFound(path)\n else:\n return cast(Result, some_dict)", "def deep_type(obj, depth = None, max_sample = None, get_type = None):\n return _deep_type(obj, [], 0, depth, max_sample, get_type)", "def test_split_nested_class_from_key_lambda(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$$Lambda$1')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, '$Lambda$1')", "def filetype_of(path: Path) -> str:\n\n filetype = \"unsorted\"\n\n if path.suffix == \".json\":\n filetype = \"json\"\n\n elif path.suffix == \".txt\":\n if search(pattern=\"v[0-9][0-9]_[0-9]\", string=path.stem):\n filetype = \"onsets\"\n elif \"subject_info\" in path.stem:\n filetype = \"subject info\"\n\n elif path.suffix == \".nii\":\n if \"_t1_\" in path.stem:\n filetype = \"anat\"\n elif \"_lessvoids_\" in path.stem:\n filetype = \"func\"\n elif \"field_map\" in path.stem:\n filetype = \"fieldmap\"\n\n return filetype", "def path_type(mode: str, docstring: Optional[str] = None, **kwargs) -> type:\n Path._check_mode(mode)\n name = \"Path_\" + mode\n key_name = \"path \" + \"\".join(sorted(mode))\n\n skip_check = get_private_kwargs(kwargs, skip_check=False)\n if skip_check:\n from ._deprecated import path_skip_check_deprecation\n\n path_skip_check_deprecation()\n name += \"_skip_check\"\n key_name += \" skip_check\"\n\n register_key = (key_name, str)\n if register_key in registered_types:\n return registered_types[register_key]\n\n class PathType(Path):\n _expression = name\n _mode = mode\n _skip_check = skip_check\n _type = str\n\n def __init__(self, v, **k):\n super().__init__(v, mode=self._mode, skip_check=self._skip_check, **k)\n\n restricted_type = type(name, (PathType,), {\"__doc__\": docstring})\n add_type(restricted_type, register_key, type_check=_is_path_type)\n\n return restricted_type", "def actual_key(self, key):\n key_list = []\n if key.scope == Scope.children:\n key_list.append('children')\n elif key.scope == Scope.parent:\n key_list.append('parent')\n else:\n key_list.append([\"usage\", \"definition\", \"type\", \"all\"][key.scope.block])\n\n if key.block_scope_id is not None:\n key_list.append(key.block_scope_id)\n if key.student_id:\n key_list.append(key.student_id)\n return \".\".join(key_list)", "def test_find_path_bi():\n assert True", "def pathfor( name, **matchdict ) :", "def _get_dict_model(cls, key, model, spec):\n try:\n return model[key]\n except KeyError:\n raise ObjectNotFoundError(path=spec[\"full_path\"])", "def deepget(self, key):\n if DEBUG: print(repr(self))\n if '.' in key:\n top, rest = key.split('.', 1)\n #if DEBUG: print(top, rest)\n return self[top].deepget(rest)\n else:\n return self[key]", "def get_directory(self, key):\n raise NotImplementedError", "def test_nested_dict(self):\n nested = self.TEI.nested_dict(exclude=[\"tei:note\"])\n self.assertEqual(nested[\"1\"][\"pr\"][\"1\"], \"Spero me secutum in libellis meis tale temperamen-\",\n \"Check that dictionary path is well done\")\n self.assertEqual(nested[\"1\"][\"12\"][\"1\"], \"Itur ad Herculeas gelidi qua Tiburis arces \",\n \"Check that dictionary path works on more than one passage\")\n self.assertEqual(nested[\"2\"][\"pr\"][\"1\"], \"'Quid nobis' inquis 'cum epistula? parum enim tibi \",\n \"Check that different fist level works as well\")\n self.assertEqual(nested[\"1\"][\"3\"][\"8\"], \"Ibis ab excusso missus in astra sago. \",\n \"Check that notes are removed \")\n self.assertEqual(\n [list(nested.keys()), list(nested[\"1\"].keys())[:3], list(nested[\"2\"][\"pr\"].keys())[:3]],\n [[\"1\", \"2\"], [\"pr\", \"1\", \"2\"], [\"sa\", \"1\", \"2\"]],\n \"Ensure that text keeps its order\")", "def _get_nested_value(dct, key_path):\n key = key_path[0]\n if not isinstance(dct, dict):\n raise errors.AnsibleFilterError(\n f\"stringfilter: looking for key '{key}' \"\n f\"but list item is not dict: {pformat(dct)}\"\n )\n if key not in dct:\n raise errors.AnsibleFilterError(\n f\"stringfilter: key is '{key}' \"\n f\"but it was not found in dict: {pformat(dct)}\"\n )\n value = dct[key]\n if len(key_path) > 1:\n return _get_nested_value(value, key_path[1:])\n else:\n return value", "def test_fpath():\n\n assert fpath(None, 'data.json') == 'data.json'\n assert fpath('/path/', 'data.json') == '/path/data.json'\n assert fpath(Path('/path/'), 'data.json') == '/path/data.json'", "def test_no_path():\n test = [{'key': 'val'}, []]\n t_result = fetch_data_by_keys(*test)\n assert not is_successful(t_result)\n assert 'path list empty' in str(t_result.failure())", "def getkey(attrstr, paths=None, prompt=True, promptpass=False):\n paths = paths or DEFAULT_PATHS\n for path in paths:\n filepath = os.path.expanduser(path)\n if not os.path.exists(filepath):\n continue\n with open(filepath, 'r') as handle:\n value = rget(json.load(handle), attrstr)\n if value is None:\n continue\n if isinstance(value, dict):\n raise Exception(f'Ambiguous key: {attrstr}')\n if isinstance(value, list):\n return value\n if not isinstance(value, str):\n return value\n if not value.startswith('b64:'):\n return value\n return b64decode(value[4:]).decode('utf8')\n promptfunc = getpass if promptpass else input\n if prompt:\n return promptfunc(f'Enter {attrstr}: ')\n pathstr = '\\n' + '\\n'.join(paths)\n raise Exception(f'Key not found: {attrstr}{pathstr}')", "def resolve_type(type_path, builder):\n namespaces = get_parent_namespaces(builder)\n for i in reversed(range(len(namespaces) + 1)):\n full_type = \".\".join(namespaces[0:i] + [ type_path ])\n if full_type in field_types:\n return full_type\n\n raise RuntimeError(\"Cannot resolve field type.\")", "def substitute_type_keys(type, new_keys):\n type_of_type = type['type']\n if type_of_type == TYPES.ARRAY.value:\n substitute_array_keys(type, new_keys)\n elif type_of_type == TYPES.FUNCTION.value:\n substitute_function_type_keys(type, new_keys)\n elif type_of_type == TYPES.POINTER.value:\n substitute_pointer_keys(type, new_keys)\n elif type_of_type == TYPES.QUALIFIER.value:\n substitute_qualifier_keys(type, new_keys)\n elif (type_of_type == TYPES.STRUCT.value or\n type_of_type == TYPES.UNION.value):\n substitute_composite_type_members_keys(type, new_keys)\n elif type_of_type == TYPES.TYPEDEF.value:\n substitute_typedefed_type_keys(type, new_keys)", "def _get_from_nest(nest, path):\n if not path or not nest:\n return nest\n return _get_from_nest(nest.get(path[0], None), path[1:])", "def extract_type_path(object_type):\n return \"%s.%s\" % (object_type.__module__, object_type.__name__)", "def __find_key_in_level(node, key):\n for child in node.children:\n if child.key == key:\n return child\n\n return False", "def test_set_with_shallow_path():\n shallow_key_path = 'shallow_key_path'\n test_value = 'shallow key path value'\n\n config.set(shallow_key_path, test_value)\n assert config.get(shallow_key_path) == test_value" ]
[ "0.81315565", "0.7790559", "0.73348916", "0.6510662", "0.63362354", "0.6123883", "0.5945233", "0.59033793", "0.5871133", "0.58505154", "0.58181745", "0.579614", "0.5716585", "0.56624275", "0.5648696", "0.56418836", "0.5635143", "0.5609441", "0.5574678", "0.55583984", "0.55554205", "0.55295515", "0.5523099", "0.5496569", "0.54866785", "0.54866785", "0.54539424", "0.5452325", "0.54487824", "0.54365486", "0.5436271", "0.53854746", "0.5370167", "0.53698003", "0.5368395", "0.5354351", "0.5353677", "0.5328586", "0.53189695", "0.5310416", "0.52939737", "0.5289721", "0.5279099", "0.5276762", "0.52613425", "0.5240764", "0.5230072", "0.52287537", "0.52195", "0.5206179", "0.52015895", "0.51989776", "0.519775", "0.516864", "0.51640517", "0.5157337", "0.51561767", "0.5117561", "0.51136965", "0.5100349", "0.50939995", "0.50818825", "0.50785863", "0.50610226", "0.50586385", "0.50560516", "0.50433284", "0.503717", "0.50337434", "0.501626", "0.5011702", "0.5010484", "0.5009609", "0.49919876", "0.49909085", "0.4989197", "0.49876255", "0.49563256", "0.49557772", "0.49524686", "0.4944144", "0.49391714", "0.4933396", "0.4925285", "0.4917438", "0.49124956", "0.49103996", "0.49063465", "0.49056986", "0.49000707", "0.48785123", "0.48784432", "0.4877471", "0.4877", "0.48702887", "0.48595998", "0.4855701", "0.48534736", "0.48457915", "0.48407567" ]
0.78562295
1
Test get_type_for_key_path with multi level key path
def test_get_type_for_key_path_multi_level(test_schema): assert ( get_type_for_key_path(test_schema, "EmploymentInformation.Beneficiary.Name") == "string" )
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_type_for_key_path_simple_path(test_schema):\n assert get_type_for_key_path(test_schema, \"Age\") == \"integer\"", "def test_get_type_for_key_path_depth_one_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.OriginalHireDate\")\n == \"string\"\n )", "def test_get_type_for_key_path_invalid_key_path(test_schema):\n assert get_type_for_key_path(test_schema, \"foo.bar\") == None", "def GetKeyByPath(self, key_path):", "def type(path):", "def GetSubkeyByPath(self, key_path):", "def test_add_keys_multiple_times(self):\n path = _path.Path.from_str(\"RootOper.Foo(*)\")\n with self.assertRaisesRegex(\n ValueError, \"Path element already has key information\"):\n path(4)", "def _generic_test(self, pathstr, expected):\n self.assertEqual(self._get_pe_key(pathstr), expected)", "def _validate_key(sample, path):\n mapping_tmp = sample\n for key in path:\n try:\n mapping_tmp = mapping_tmp[key]\n except KeyError:\n return False\n except TypeError:\n return False\n return True", "def _create_path(root, dict_type, path):\n for sub_path in path:\n if not isinstance(root.get(sub_path, None), dict):\n root[sub_path] = dict_type()\n\n root = root[sub_path]\n\n return root", "def __getitem__(self, key):\n path = self.path\n if self.path_is_string:\n path = [path]\n return path[key]", "def test_search_key() -> None:\n # assert that having a wrong key at root level\n # in the json will raise an error\n key = \"toto\"\n d = {\"toto\": {\"a\": \"b\"}, \"c\": \"d\"}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n # Search when the key is in a deeper nested level\n key = \"nested_key\"\n d = {\"en\": {\"level1\": {\"level2\": {\"nested_key\": \"value\"}}}}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n return", "def _is_generic_key(key):\n for prefix in [\n \"graph_rewriter_config\",\n \"model\",\n \"train_input_config\",\n \"train_config\",\n \"eval_config\"]:\n if key.startswith(prefix + \".\"):\n return True\n return False", "def test_two_keys():\n test = [{'key1': {'key2': 'val1'}}, ['key1', 'key2']]\n assert fetch_data_by_keys(*test).unwrap() == 'val1'", "def test_type_mapping(registry, item_type):\n with mappings_use_nested(False):\n mapping = type_mapping(registry[TYPES], item_type)\n assert mapping\n assert 'properties' in mapping\n if item_type == 'TestingLinkTargetElasticSearch':\n assert mapping['properties']['reverse_es'].get('type', 'object') != 'nested' # should not occur here\n\n # check calculated properties on objects/arrays of objects are mapped correctly\n if item_type == 'TestingCalculatedProperties':\n assert mapping['properties']['nested']['properties']['key']['type'] == 'text'\n assert mapping['properties']['nested']['properties']['value']['type'] == 'text'\n assert mapping['properties']['nested']['properties']['keyvalue']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['key']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['value']['type'] == 'text'\n assert mapping['properties']['nested2']['properties']['keyvalue']['type'] == 'text'", "def setKeyPath(*args, **kwargs)->List[AnyStr]:\n pass", "def test_utils_get_dict_value_from_path_should_return_given_value(path, value):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) == value", "def key_type(self) -> global___Type:", "def test_split_nested_class_from_key(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$nested')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, 'nested')", "def test_split_nested_class_from_key_no_nested(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertIsNone(part2)", "def _get_by_path(tree, keys):\n return reduce(getitem, keys, tree)", "def _get_by_path(tree, keys):\n return reduce(getitem, keys, tree)", "def test_get_transaction_types_key(self):\n pass", "def test_type_mapping_nested(registry):\n with mappings_use_nested(True):\n mapping = type_mapping(registry[TYPES], 'TestingLinkTargetElasticSearch')\n assert mapping\n assert 'properties' in mapping\n # if type is defined on this field, it should beg object, NOT nested since it is not enabled on this field\n assert mapping['properties']['reverse_es'].get('type', 'object') == 'object'", "def test_traversal__path_type_view_name(path, resource_type, view_name):\n from pyramid.traversal import traverse\n root_resource = root_resource_factory()\n t = traverse(root_resource, path)\n assert isinstance(t['context'], resource_type)\n assert t['view_name'] == view_name", "def get(self, key, key_type=None):\n pass", "def path_lookup(data_obj, xj_path, create_dict_path=False):\n\n if not xj_path or xj_path == '.':\n return data_obj, True\n\n res = list(split(xj_path, '.', maxsplit=1))\n top_key = res[0]\n leftover = res[1] if len(res) > 1 else None\n if top_key == '*':\n return _full_sub_array(data_obj, leftover, create_dict_path)\n elif top_key.startswith('@'):\n return _single_array_element(data_obj, leftover, top_key,\n create_dict_path)\n else:\n val_type, top_key = _clean_key_type(top_key)\n top_key = unescape(top_key)\n if top_key in data_obj:\n value = data_obj[top_key]\n if val_type is not None and not isinstance(value, val_type):\n raise XJPathError(\n 'Key %s expects type \"%s\", but found value type is \"%s\"' %\n (top_key, val_type.__name__, type(value).__name__))\n if leftover:\n return path_lookup(value, leftover, create_dict_path)\n else:\n return value, True\n else:\n if val_type is not None:\n if not isinstance(data_obj, dict):\n raise XJPathError('Accessed object must be a dict type '\n 'for the key: \"%s\"' % top_key)\n if create_dict_path:\n data_obj[top_key] = val_type()\n else:\n return None, False\n if leftover:\n return path_lookup(data_obj[top_key], leftover,\n create_dict_path)\n else:\n return data_obj[top_key], True\n return None, False", "def get_data(self,key=''):\n path = key.split('.')\n itm = self._root \n for ik,k in enumerate(path):\n child_found = False\n try: \n itm = itm[k]\n child_found = True\n except:\n try: \n itm = itm[int(k)]\n child_found = True\n except:\n longer_key = k\n for kk in path[ik+1:]:\n longer_key += '.'\n try: \n itm = itm[longer_key]\n child_found = True\n except: \n pass\n longer_key += kk\n try: \n itm = itm[longer_key]\n child_found = True\n except: \n pass\n if not child_found:\n raise KeyError(key)\n return itm", "def test_get_contact_person_types_key(self):\n pass", "def test_get_types(self):\n pass", "def test_utils_set_dict_value_from_path_creating_new_fields():\n dictionary = {}\n ralph_utils.set_dict_value_from_path(dictionary, [\"foo\", \"bar\"], \"baz\")\n assert dictionary == {\"foo\": {\"bar\": \"baz\"}}", "def test_utils_set_dict_value_from_path_updating_fields():\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n ralph_utils.set_dict_value_from_path(dictionary, [\"foo\", \"bar\"], \"baz\")\n assert dictionary == {\"foo\": {\"bar\": \"baz\"}}", "def get_key_recursive(key, config):\n if not isinstance(key, list):\n key = key.split(\"/\") # subdict indexing split using slash\n assert key[0] in config, f\"missing key '{key[0]}' in metadata dictionary: {config}\"\n val = config[key[0]]\n if isinstance(val, (dict, collections.OrderedDict)):\n assert len(key) > 1, \"missing keys to index metadata subdictionaries\"\n return get_key_recursive(key[1:], val)\n return int(val)", "def test_split_nested_class_from_key_numeric(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$1')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, '1')", "def get_by_dot_path(dictionary: Dict, key_path: str) -> Any:\n return get_by_list_of_keys(dictionary, key_path.split(\".\"))", "def test_set_with_deep_key_path_with_list():\n deep_key_path = ('second', 'deep', 'key', 'path')\n test_value = 'second deep key path value'\n\n config.set(deep_key_path, test_value)\n assert isinstance(config.get('second'), dict)\n assert config.get(deep_key_path) == test_value", "def test_create_node_from_key(self):\n created_node = self.test_graph.create_node_from_key(\n 'package.class$nested')\n self.assertEqual(created_node.package, 'package')\n self.assertEqual(created_node.class_name, 'class')\n self.assertEqual(created_node.name, 'package.class')", "def _split_key(cls, logical_key):\n if isinstance(logical_key, str):\n path = logical_key.split('/')\n elif isinstance(logical_key, (tuple, list)):\n path = logical_key\n else:\n raise TypeError('Invalid logical_key: %r' % logical_key)\n return path", "def _load_key(client, entity_type, entity_id=None, parent_key=None):\n\n key = None\n if entity_id:\n key = client.key(entity_type, entity_id, parent=parent_key)\n else:\n # this will generate an ID\n key = client.key(entity_type)\n return key", "def get_by_path(data: Dict[str, T], path: Sequence[str]) -> T:\n return reduce(operator.getitem, path, data)", "def test_set_with_deep_key_path_with_string():\n deep_key_path = 'deep.key.path'\n test_value = 'deep key path value'\n\n config.set(deep_key_path, test_value)\n assert isinstance(config.get('deep'), dict)\n assert config.get(deep_key_path) == test_value", "def test_access_nested_map(self, nested_map, path, result):\n self.assertEqual(access_nested_map(nested_map, path), result)", "def test_read_json(self, magic_0, magic_1):\n expected = {\n 'key_1': [1, 2, 3, 4, 5],\n 'key_2': ['a', 'b', 'c', 'd', 'e']\n }\n result = helpers.read_json(r\"path\")\n self.assertEqual(expected, result)", "def access_path(data: dict or any, path: list[str]) -> any:\n if path:\n first = path[0]\n rest = path[1:]\n return access_path(data[first], rest)\n return data", "def _verify_key_exists(self, key, stack_path=[]):\r\n error_msg = (\r\n \"Could not find the {key_type} key '{key}' in: {stack_path}. \"\r\n \"Found {keys_found} instead.\"\r\n )\r\n try:\r\n dk = stack_path[0]\r\n fk = stack_path[1]\r\n xk = stack_path[2]\r\n yk = stack_path[3]\r\n vk = stack_path[4]\r\n except:\r\n pass\r\n try:\r\n if len(stack_path) == 0:\r\n if key not in self:\r\n key_type, keys_found = 'data', self.keys()\r\n stack_path = 'stack'\r\n raise ValueError\r\n elif len(stack_path) == 1:\r\n if key not in self[dk]:\r\n key_type, keys_found = 'filter', self[dk].keys()\r\n stack_path = 'stack[{dk}]'.format(\r\n dk=dk)\r\n raise ValueError\r\n elif len(stack_path) == 2:\r\n if key not in self[dk][fk]:\r\n key_type, keys_found = 'x', self[dk][fk].keys()\r\n stack_path = 'stack[{dk}][{fk}]'.format(\r\n dk=dk, fk=fk)\r\n raise ValueError\r\n elif len(stack_path) == 3:\r\n if key not in self[dk][fk][xk]:\r\n key_type, keys_found = 'y', self[dk][fk][xk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}]'.format(\r\n dk=dk, fk=fk, xk=xk)\r\n raise ValueError\r\n elif len(stack_path) == 4:\r\n if key not in self[dk][fk][xk][yk]:\r\n key_type, keys_found = 'view', self[dk][fk][xk][yk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}][{yk}]'.format(\r\n dk=dk, fk=fk, xk=xk, yk=yk)\r\n raise ValueError\r\n except ValueError:\r\n print error_msg.format(\r\n key_type=key_type,\r\n key=key,\r\n stack_path=stack_path,\r\n keys_found=keys_found\r\n )", "def load_key(self, type, keyid):\n pass", "def test_access_nested_map_exception(self, nested_map, path):\n with self.assertRaises(KeyError) as error:\n access_nested_map(nested_map, path)\n self.assertEqual(error.exception.args[0], path[-1])", "def test_lookup_from(self):\n for metaprefix, key, normalize, expected in [\n (\"obofoundry\", \"GO\", False, \"go\"),\n (\"obofoundry\", \"go\", False, None),\n (\"obofoundry\", \"go\", True, \"go\"),\n ]:\n with self.subTest(meteprefix=metaprefix, key=key, norm=normalize):\n self.assertEqual(\n expected, self.manager.lookup_from(metaprefix, key, normalize=normalize)\n )", "def substitute_type_keys(type, new_keys):\n type_of_type = type['type']\n if type_of_type == TYPES.ARRAY.value:\n substitute_array_keys(type, new_keys)\n elif type_of_type == TYPES.FUNCTION.value:\n substitute_function_type_keys(type, new_keys)\n elif type_of_type == TYPES.POINTER.value:\n substitute_pointer_keys(type, new_keys)\n elif type_of_type == TYPES.QUALIFIER.value:\n substitute_qualifier_keys(type, new_keys)\n elif (type_of_type == TYPES.STRUCT.value or\n type_of_type == TYPES.UNION.value):\n substitute_composite_type_members_keys(type, new_keys)\n elif type_of_type == TYPES.TYPEDEF.value:\n substitute_typedefed_type_keys(type, new_keys)", "def check_path_hyperparameters(self, hyperparameters):\n\n new_list = {}\n mypath = self.get_directory()\n hyperparameters_keys = hyperparameters.keys()\n\n for hyperparameter in hyperparameters_keys:\n mypath = mypath + '/'\n primitive_file_name = hyperparameter + '.json'\n\n if(mypath in hyperparameter and os.path.exists(primitive_file_name)):\n new_list[hyperparameter] = hyperparameters[hyperparameter]\n elif(os.path.exists(mypath + hyperparameter + \".json\")):\n new_list[mypath + hyperparameter] = hyperparameters[hyperparameter]\n if new_list == {}:\n raise ValueError(list(hyperparameters_keys), 'is not found in MLprimitives.')\n return new_list", "def testPath(self):\n self.cache._GetKeyPath.return_value = '/foo/bar'\n\n ref = cache.CacheReference(self.cache, 'key')\n self.assertEqual(ref.path, '/foo/bar')\n\n self.cache._GetKeyPath.assert_called_once_with('key')", "def get_by_path(root, path):\n \n sub_data = root\n for key in path:\n sub_data = sub_data[key]\n \n return sub_data", "def get_safe(dict_instance, keypath, default=None):\n try:\n obj = dict_instance\n keylist = keypath if type(keypath) is list else keypath.split('.')\n for key in keylist:\n obj = obj[key]\n return obj\n except Exception, ex:\n return default", "def path_type(cls, path):\n if os.path.isdir(path):\n return 'package'\n else:\n return 'object'", "def check_and_resolve_path(key, parameter):\n if 'paths' in key:\n return [resolve_relative_path(p) for p in parameter]\n if 'path' in key:\n return resolve_relative_path(parameter)\n return parameter", "def get_type_from_path(path):\n return path.split('.')[-1]", "def _get(self, key):\n current_storage_dict = self._storage\n sub_keys = key.split('.')\n i = 1\n sub_keys_count = len(sub_keys)\n for sub_key in sub_keys:\n if i < sub_keys_count:\n if sub_key in current_storage_dict:\n current_storage_dict = current_storage_dict[sub_key]\n else:\n return\n\n else:\n if sub_key in current_storage_dict:\n return current_storage_dict[sub_key]\n else:\n return\n\n i += 1", "def testComparable(self):\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n data_stream=u'test', location=u'/test', parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, data stream: test, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)\n\n path_spec = tsk_path_spec.TSKPathSpec(\n location=u'/test', inode=1, parent=self._path_spec)\n\n self.assertIsNotNone(path_spec)\n\n expected_comparable = u'\\n'.join([\n u'type: TEST',\n u'type: TSK, inode: 1, location: /test',\n u''])\n\n self.assertEqual(path_spec.comparable, expected_comparable)", "def read_value(\n key_path: str,\n data: dict,\n data_type: type,\n mandatory=True\n) -> any:\n\n # build the path. we expect a ``key_path`` that looks like this:\n # \"key1.key2.key3\" -> [\"key1\", \"key2\", \"key3\"]\n segments = key_path.split(\".\")\n\n # segments should always have at least one element that exists in the\n # dictionary that is provided via ``data``.\n if data is None or len(segments) == 0 or segments[0] not in data:\n if mandatory:\n raise ValueError(f\"provided key {key_path} is invalid for {data}\")\n\n return None\n\n # handle the current key. this could be any key in the hierarchy\n key = segments[0]\n value = data[key]\n\n # first we need to check for it to be not None if it is a mandatory value.\n # it is ok to return None if the value is not mandatory\n if value is None:\n if mandatory:\n raise ValueError(f\"required property {key} was not set\")\n\n return None\n\n # if there are more children, we need to return the contents of these\n # instead of the current value\n if len(segments) > 1:\n\n child_key = \".\".join(segments[1:])\n\n # handle lists separately\n if isinstance(value, list):\n return [read_value(child_key, i, data_type, mandatory)\n for i in value]\n\n # single items we can just return\n return read_value(child_key, value, data_type, mandatory)\n\n # this is the last element in the hierarchy and we need to convert it to\n # the expected data_type. Handle list separately\n if isinstance(value, list):\n return [__convert_value(key, i, data_type) for i in value]\n\n return __convert_value(key, value, data_type)", "def pathlookup(obj_or_path_tuple, depth=None, include_origin=True):", "def test_key_kind(self):\r\n parent = ParentKind.objects.create(pk=1)\r\n child = ChildKind.objects.create(\r\n pk=2, parent=parent, parents=[parent.pk])\r\n self.assertEqual(child.parent.pk, parent.pk)\r\n self.assertEqual(child.parents[0], parent.pk)\r\n\r\n from google.appengine.api.datastore import Get\r\n from google.appengine.api.datastore_types import Key\r\n parent_key = Key.from_path(parent._meta.db_table, 1)\r\n child_key = Key.from_path(child._meta.db_table, 2)\r\n parent_entity = Get(parent_key)\r\n child_entity = Get(child_key)\r\n parent_column = child._meta.get_field('parent').column\r\n parents_column = child._meta.get_field('parents').column\r\n self.assertEqual(child_entity[parent_column], parent_key)\r\n self.assertEqual(child_entity[parents_column][0], parent_key)", "def _replace_path_with_type_symbol(cost_trees):\n cost_trees_ = {}\n tmp_dict = {}\n for k, v in cost_trees.items():\n if k == 'subcosts':\n for kk, vv in v.items():\n type_symbol = vv.pop('_type_symbol')\n d = {type_symbol: vv}\n if tmp_dict.get(k) is not None:\n tmp_dict[k].update(d)\n else:\n tmp_dict[k] = d\n elif type(v) == dict:\n for kk, vv in v.items():\n type_symbol = vv.pop('_type_symbol')\n d = {type_symbol: _replace_path_with_type_symbol(vv)}\n if cost_trees_.get(k) is not None:\n cost_trees_[k].update(d)\n else:\n cost_trees_[k] = d\n else:\n tmp_dict[k] = v\n cost_trees_.update(tmp_dict)\n return cost_trees_", "def filetype_of(path: Path) -> str:\n\n filetype = \"unsorted\"\n\n if path.suffix == \".json\":\n filetype = \"json\"\n\n elif path.suffix == \".txt\":\n if search(pattern=\"v[0-9][0-9]_[0-9]\", string=path.stem):\n filetype = \"onsets\"\n elif \"subject_info\" in path.stem:\n filetype = \"subject info\"\n\n elif path.suffix == \".nii\":\n if \"_t1_\" in path.stem:\n filetype = \"anat\"\n elif \"_lessvoids_\" in path.stem:\n filetype = \"func\"\n elif \"field_map\" in path.stem:\n filetype = \"fieldmap\"\n\n return filetype", "def test_no_path():\n test = [{'key': 'val'}, []]\n t_result = fetch_data_by_keys(*test)\n assert not is_successful(t_result)\n assert 'path list empty' in str(t_result.failure())", "def get_path(self, key):\n return get_path(self, key)", "def first_part_is(self, key):\n if self.path_is_string:\n return self.path.startswith(str(key) + '.')\n if not self.path:\n return not bool(key)\n if self.path_type is list:\n return self.path[0] == key\n if self.path_type is Path:\n return self.path.first_part_is(key)\n return self.joined().startswith(str(key) + '.')", "def getkey(attrstr, paths=None, prompt=True, promptpass=False):\n paths = paths or DEFAULT_PATHS\n for path in paths:\n filepath = os.path.expanduser(path)\n if not os.path.exists(filepath):\n continue\n with open(filepath, 'r') as handle:\n value = rget(json.load(handle), attrstr)\n if value is None:\n continue\n if isinstance(value, dict):\n raise Exception(f'Ambiguous key: {attrstr}')\n if isinstance(value, list):\n return value\n if not isinstance(value, str):\n return value\n if not value.startswith('b64:'):\n return value\n return b64decode(value[4:]).decode('utf8')\n promptfunc = getpass if promptpass else input\n if prompt:\n return promptfunc(f'Enter {attrstr}: ')\n pathstr = '\\n' + '\\n'.join(paths)\n raise Exception(f'Key not found: {attrstr}{pathstr}')", "def strict_path_lookup(data_obj, xj_path, force_type=None):\n\n value, exists = path_lookup(data_obj, xj_path)\n if exists:\n if force_type is not None:\n if not isinstance(value, force_type):\n raise XJPathError('Found value is a wrong type',\n (xj_path, force_type))\n return value\n else:\n raise XJPathError('Path does not exist', (xj_path,))", "def testSplit(self):\r\n data={\r\n # 1\r\n 'emptyPath':\r\n ['',{'fileBase':'',\r\n 'fileExt':None,\r\n 'dirs':[]}],\r\n\r\n # 2\r\n 'fileBaseOnly':\r\n ['fileBase',{'fileBase':'fileBase',\r\n 'fileExt':None,\r\n 'dirs':[]}],\r\n \r\n # 3\r\n 'fileExtOnly':\r\n ['.ext',{'fileBase':'',\r\n 'fileExt':'ext',\r\n 'dirs':[]}],\r\n\r\n # 4\r\n 'fileBaseEmptyFileExt':\r\n ['fileBase.',{'fileBase':'fileBase',\r\n 'fileExt':'',\r\n 'dirs':[]}],\r\n\r\n # 5\r\n 'fullFileName':\r\n ['fileBase.ext',{'fileBase':'fileBase',\r\n 'fileExt':'ext',\r\n 'dirs':[]}],\r\n\r\n # 6\r\n 'singleDir':\r\n ['dir/',{'fileBase':'',\r\n 'fileExt':None,\r\n 'dirs':['dir']}],\r\n\r\n # 7\r\n 'twoDirs':\r\n ['dir1/dir2/',{'fileBase':'',\r\n 'fileExt':None,\r\n 'dirs':['dir1','dir2']}],\r\n\r\n # 8\r\n 'absolutePathTwoDirsFullFileName':\r\n ['/dir1/dir2/fileBase.ext',{'fileBase':'fileBase',\r\n 'fileExt':'ext',\r\n 'dirs':['','dir1','dir2']}],\r\n\r\n # 9\r\n 'dirWithAPeriod':\r\n ['/dir.dirExt/fileBase.fileExt',{'fileBase':'fileBase',\r\n 'fileExt':'fileExt',\r\n 'dirs':['','dir.dirExt']}]\r\n }\r\n\r\n for k in data.iterkeys():\r\n s1=ufsi.NativeUnixPath(data[k][0]).split()\r\n s2=data[k][1]\r\n for s2k in s2.iterkeys():\r\n self.assertEquals(s1[s2k],s2[s2k],\r\n '%s: Item %s of dict %r should be %s'\r\n %(k,s2k,s1,s2[s2k]))", "def get_by_list_of_keys(dictionary: Dict, key_path: List[Any]) -> Dict:\n if len(key_path) == 1:\n return dictionary[key_path[0]]\n else:\n return get_by_list_of_keys(dictionary[key_path[0]], key_path[1:])", "def test_get_parent_type_name(self):\n pass", "def GetSubkeys(self):", "def json_full_path(base_path, key):\n if base_path is None or base_path == \"\":\n return key\n else:\n return f'{base_path}.{key}'", "def path_in_dictionary(self, dictionary, path):\n if path:\n key = path.split('.')[0]\n if key in dictionary and dictionary[key]:\n key_exists = self.path_in_dictionary(dictionary[key], '.'.join(path.split('.')[1:]))\n else:\n key_exists = False\n else:\n key_exists = True\n return key_exists", "def key_to_obj_type(self, key):\n\t\tif key.endswith('ids'):\n\t\t\tkey = key[0:-1]\n\t\tif key == 'order_id' or key == 'user_id':\n\t\t\treturn key[0:-2]\n\t\telif key == 'partner_id' or key == 'demand_partner_id':\n\t\t\treturn 'account'\n\t\telif key == 'openx_buyer_id':\n\t\t\treturn 'buyer'\n\t\telse:\n\t\t\treturn key[0:-3]", "def path_type(mode: str, docstring: Optional[str] = None, **kwargs) -> type:\n Path._check_mode(mode)\n name = \"Path_\" + mode\n key_name = \"path \" + \"\".join(sorted(mode))\n\n skip_check = get_private_kwargs(kwargs, skip_check=False)\n if skip_check:\n from ._deprecated import path_skip_check_deprecation\n\n path_skip_check_deprecation()\n name += \"_skip_check\"\n key_name += \" skip_check\"\n\n register_key = (key_name, str)\n if register_key in registered_types:\n return registered_types[register_key]\n\n class PathType(Path):\n _expression = name\n _mode = mode\n _skip_check = skip_check\n _type = str\n\n def __init__(self, v, **k):\n super().__init__(v, mode=self._mode, skip_check=self._skip_check, **k)\n\n restricted_type = type(name, (PathType,), {\"__doc__\": docstring})\n add_type(restricted_type, register_key, type_check=_is_path_type)\n\n return restricted_type", "def _object_requires_hdf5_recurse(curr: NDict, str_base: str = \"\") -> List[str]:\n keys = curr.keypaths()\n ans = []\n for k in keys:\n data = curr[k]\n if _object_requires_hdf5_single(data):\n ans.append(k)\n return ans", "def test_named_link_hierarchy_data_type_inc(self):\n link_spec = LinkSpec(doc='This Bar', target_type='Bar', name='bar_link')\n parent_spec = GroupSpec(\n doc='Something to hold a Bar',\n name='bar_bucket',\n links=[link_spec]\n )\n bar_builder = GroupBuilder(\n name='my_bar',\n attributes={\n 'data_type': 'SubBar',\n 'namespace': CORE_NAMESPACE,\n 'object_id': -1\n }\n )\n sub_builder = LinkBuilder(builder=bar_builder, name='bar_link')\n GroupBuilder(name='bar_bucket', links={'my_bar': sub_builder})\n result = self.type_map.get_subspec(parent_spec, sub_builder)\n self.assertIs(result, link_spec)", "def _check_keys(self, dict, filetype):\n self.filetype = filetype\n for key in dict:\n if isinstance(dict[key], scipy.io.matlab.mio5_params.mat_struct):\n dict[key] = self._todict(dict[key], self.filetype)\n return dict", "def pathfor( name, **matchdict ) :", "def nested_get(\n d: t.Dict, *path: t.Tuple[str, str], raise_on_missing: bool = True\n) -> t.Optional[t.Any]:\n for name, key in path:\n d = d.get(key) # type: ignore\n if d is None:\n if raise_on_missing:\n name = \"table\" if name == \"this\" else name\n raise ValueError(f\"Unknown {name}: {key}\")\n return None\n\n return d", "def test_get_node_type_name(self):\n pass", "def lookup(self, key):", "def test_find_inputs(self):\n codeanswer = find_inputs()\n keys = ['./indir-twofile5', './indir-good2', './indir-deep/deepdir', './indir-good1']\n for key in codeanswer:\n self.assertEqual(isinstance(codeanswer[key][1], Job), True)", "def __getitem__(self, key):\n return self.types[key]", "def make_asset_key(self, asset_type, path):\r\n raise NotImplementedError()", "def _GetFileMappingsByPath(self, safe_key_path):\n candidate_mappings = []\n for mapping in self._REGISTRY_FILE_MAPPINGS_NT:\n if safe_key_path.startswith(mapping.key_path_prefix):\n candidate_mappings.append(mapping)\n\n # Sort the candidate mappings by longest (most specific) match first.\n candidate_mappings.sort(\n key=lambda mapping: len(mapping.key_path_prefix), reverse=True)\n for mapping in candidate_mappings:\n yield mapping", "def readKey(self, keyPath):\n\t\ttry:", "def getKeyPath(self, keyPath):\n parent = self\n parts = keyPath.split(\".\")\n for part in parts[:-1]:\n child = parent.get(part, None)\n if child is None:\n return None\n parent = child\n return parent.get(parts[-1], None)", "def test_get(schema, schemas, key, expected_value):\n returned_artifacts = artifacts.model.get(schemas, schema)\n\n assert getattr(returned_artifacts, key) == expected_value", "def load_key():", "def traverse(path, default=_RAISE_KEYERROR):", "def test_key_for_dict_of_cacheable_objects(self):\n Meat.get_tastier_option({self.chicken: 'Tasty'}, {self.celery: 'Terrible'})\n expected_cache_key = 'tests.Meat.get_tastier_option;' \\\n ',,9ff36157b4df732256fe3b151cbf8a6bdcc22969d4d6ceaad588bccbbd5c942f,Tasty,' \\\n ',,8a332387e40497a972a0ab2099659b49b99be0d00130158f9cb92ecc93ca5b18,Terrible;'\n self.assertExpectedKeyInCache(expected_cache_key)", "def test_nested_dict(self):\n nested = self.TEI.nested_dict(exclude=[\"tei:note\"])\n self.assertEqual(nested[\"1\"][\"pr\"][\"1\"], \"Spero me secutum in libellis meis tale temperamen-\",\n \"Check that dictionary path is well done\")\n self.assertEqual(nested[\"1\"][\"12\"][\"1\"], \"Itur ad Herculeas gelidi qua Tiburis arces \",\n \"Check that dictionary path works on more than one passage\")\n self.assertEqual(nested[\"2\"][\"pr\"][\"1\"], \"'Quid nobis' inquis 'cum epistula? parum enim tibi \",\n \"Check that different fist level works as well\")\n self.assertEqual(nested[\"1\"][\"3\"][\"8\"], \"Ibis ab excusso missus in astra sago. \",\n \"Check that notes are removed \")\n self.assertEqual(\n [list(nested.keys()), list(nested[\"1\"].keys())[:3], list(nested[\"2\"][\"pr\"].keys())[:3]],\n [[\"1\", \"2\"], [\"pr\", \"1\", \"2\"], [\"sa\", \"1\", \"2\"]],\n \"Ensure that text keeps its order\")", "def _check_key_type(cls, key: Any) -> K:\n if not isinstance(key, cls.keytype):\n raise KeyError(\n f\"{cls!r} accepts only keys of type {cls.keytype!r}, \"\n f\"got {type(key)!r}\"\n )\n return cast(K, key)", "def test_metadata_inherit(self):\r\n\r\n print(\"Starting import\")\r\n course = self.get_course('toy')\r\n\r\n def check_for_key(key, node, value):\r\n \"recursive check for presence of key\"\r\n print(\"Checking {0}\".format(node.location.to_deprecated_string()))\r\n self.assertEqual(getattr(node, key), value)\r\n for c in node.get_children():\r\n check_for_key(key, c, value)\r\n\r\n check_for_key('graceperiod', course, course.graceperiod)", "def test_split_nested_class_from_key_lambda(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class$$Lambda$1')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertEqual(part2, '$Lambda$1')", "def actual_key(self, key):\n key_list = []\n if key.scope == Scope.children:\n key_list.append('children')\n elif key.scope == Scope.parent:\n key_list.append('parent')\n else:\n key_list.append([\"usage\", \"definition\", \"type\", \"all\"][key.scope.block])\n\n if key.block_scope_id is not None:\n key_list.append(key.block_scope_id)\n if key.student_id:\n key_list.append(key.student_id)\n return \".\".join(key_list)", "def test_get_resourcetypes_extract(self):\n self.assertEqual(\"nodes\", util.get_resourcetypes(\"/resource/nodes\"))\n self.assertEqual(\"dests\", util.get_resourcetypes(\"http://localhost:8080/resource/dests/\"))\n self.assertEqual(\"flows\", util.get_resourcetypes(\"/resource/flows/ABCDEF\"))\n self.assertEqual(\"nodes\", util.get_resourcetypes(\"http://localhost:8080/resource/nodes/ABCDEF/\"))", "def test_find_relation_types(self):\n pass" ]
[ "0.78702086", "0.77100694", "0.718522", "0.6605864", "0.6282174", "0.62626797", "0.5969291", "0.59365714", "0.59263676", "0.5847719", "0.5822397", "0.5740006", "0.5724306", "0.57241476", "0.56666523", "0.56429887", "0.563662", "0.5624624", "0.56129014", "0.55840045", "0.5535988", "0.5535988", "0.5510817", "0.54844755", "0.54772043", "0.54347163", "0.5424431", "0.5419458", "0.5401134", "0.5395989", "0.5384956", "0.53775936", "0.5374581", "0.5374116", "0.5368823", "0.5367547", "0.53337747", "0.5318994", "0.52839875", "0.5275865", "0.5275373", "0.5274747", "0.52630746", "0.52574223", "0.5253341", "0.5250902", "0.5247588", "0.51766974", "0.51567656", "0.5150099", "0.5146196", "0.5132796", "0.5132721", "0.51248777", "0.5112654", "0.51036435", "0.51008964", "0.50818783", "0.5067008", "0.5049075", "0.5048364", "0.50377166", "0.50360596", "0.50238526", "0.50101596", "0.50075006", "0.49972978", "0.4994478", "0.49906158", "0.498628", "0.49752116", "0.49649626", "0.496481", "0.49609765", "0.49590868", "0.49489257", "0.49449006", "0.49387914", "0.4932674", "0.49264348", "0.4925214", "0.49102938", "0.48880458", "0.4880046", "0.48760092", "0.48695347", "0.48655844", "0.48640892", "0.4839949", "0.4835665", "0.4835087", "0.48310304", "0.4824518", "0.48202547", "0.48179275", "0.48135352", "0.48127222", "0.48104903", "0.4803711", "0.4802593" ]
0.8355806
0
Test get_type_for_key_path with invalid key path
def test_get_type_for_key_path_invalid_key_path(test_schema): assert get_type_for_key_path(test_schema, "foo.bar") == None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_get_type_for_key_path_simple_path(test_schema):\n assert get_type_for_key_path(test_schema, \"Age\") == \"integer\"", "def test_get_type_for_key_path_multi_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.Beneficiary.Name\")\n == \"string\"\n )", "def test_get_type_for_key_path_depth_one_level(test_schema):\n assert (\n get_type_for_key_path(test_schema, \"EmploymentInformation.OriginalHireDate\")\n == \"string\"\n )", "def _generic_test(self, pathstr, expected):\n self.assertEqual(self._get_pe_key(pathstr), expected)", "def _validate_key(sample, path):\n mapping_tmp = sample\n for key in path:\n try:\n mapping_tmp = mapping_tmp[key]\n except KeyError:\n return False\n except TypeError:\n return False\n return True", "def test_map_missing_key_encountered():\n with pytest.raises(KeyError):\n Map().read_key(10, b\"\")", "def test_keys_failure(self):\n storage = Storage()\n storage._keys_dict = {'1': 'one',\n 'abc': '1'}\n self.assertRaises(StoragePatternError, storage.keys, 'ab[cd')", "def GetKeyByPath(self, key_path):", "def _check_key(self, key):\n raise NotImplementedError", "def test_get_invalid_key(test_file):\n md = OSXMetaData(test_file.name)\n with pytest.raises(KeyError):\n md[\"invalid_key\"]", "def test_getKey_nokey(self):\n filename = os.path.join(os.getcwd(), 'sekrit')\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def test_validate_with_invalid_key_format_type(self):\n key_format_type = \"invalid\"\n kwargs = {'key_format_type': key_format_type}\n\n self.assertRaisesRegex(\n TypeError, \"invalid key format type\", Digest, **kwargs)", "def test_handle_key_error():\n\n @handle_key_error\n def get_item(key):\n data = {\"A\": 1, \"B\": 2}\n return data[key]\n\n value = get_item(\"A\")\n assert value == 1\n\n with pytest.raises(InvalidParameter) as exc:\n get_item(\"C\")\n\n assert \"C\" in str(exc.value)", "def testFromStringTypeError(self):\n for path_type in dicom_path.Type:\n if path_type != dicom_path.Type.STORE:\n self.assertRaises(ValueError, dicom_path.FromString,\n tdpu.STORE_PATH_STR, path_type)\n if path_type != dicom_path.Type.STUDY:\n self.assertRaises(ValueError, dicom_path.FromString,\n tdpu.STUDY_PATH_STR, path_type)\n if path_type != dicom_path.Type.SERIES:\n self.assertRaises(ValueError, dicom_path.FromString,\n tdpu.SERIES_PATH_STR, path_type)\n if path_type != dicom_path.Type.INSTANCE:\n self.assertRaises(ValueError, dicom_path.FromString,\n tdpu.INSTANCE_PATH_STR, path_type)", "def test_get_unhappy_paths():\n with pytest.raises(TypeError):\n ContractHandler.get(\"foo name\")\n\n with pytest.raises(TypeError):\n ContractHandler.get(\"foo name\", \"foo address\")\n\n with pytest.raises(InvalidAddress):\n ContractHandler.get(\"DataTokenTemplate\", \"foo address\")", "def test_value_error_for_computing_missing_type():\n with pytest.raises(ValueError):\n compute_type(\"missing_type\", {})", "def test_add_keys_multiple_times(self):\n path = _path.Path.from_str(\"RootOper.Foo(*)\")\n with self.assertRaisesRegex(\n ValueError, \"Path element already has key information\"):\n path(4)", "def test_get_invalid_key(self):\n pairs = {'library': '~/home/documents/dms',\n 'key': 'value',\n }\n exceptionKeys = ['Hello', 'spam']\n try:\n tempconfig = tempfile.NamedTemporaryFile(\n suffix=\".yaml\", delete=False)\n tempconfig.write('ham: eggs'.encode('UTF-8'))\n tempconfig.close()\n config = easydms.config.Config(tempconfig.name)\n\n for key, value in pairs.items():\n self.assertEqual(config.getKey(key, value), value)\n\n for key in exceptionKeys:\n with self.assertRaises(easydms.config.ErrorConfigKeyNotFound):\n config.getRequiredKey(key)\n finally:\n os.remove(tempconfig.name)", "def test_get_path_returns_none_for_bad_key(\n self, audio_store_and_expected_files, key):\n audio_store = audio_store_and_expected_files[0]\n assert audio_store.get_path(key) is None", "def test_valid_key(self):\n f = lws.valid_data_key\n assert f('string', int, r'string') is False\n assert f('string', str, r'test') is False\n assert f(123, int, '123') is False\n assert f(123.00, float, '123') is False\n assert f('123', str, r'[0-9]*') is True", "def test_bad_valuetype():\n test = [{'key': {'key1': 'val'}}, ['key']]\n t_result = fetch_data_by_keys(*test)\n assert not is_successful(t_result)\n assert 'Bad data found' in str(t_result.failure())", "def _check_key_type(cls, key: Any) -> K:\n if not isinstance(key, cls.keytype):\n raise KeyError(\n f\"{cls!r} accepts only keys of type {cls.keytype!r}, \"\n f\"got {type(key)!r}\"\n )\n return cast(K, key)", "def type(path):", "def _validate_type(self, key, type_):\n if type_ is None:\n type_ = \"\"\n \n if not isinstance(type_, (str, unicode)):\n raise TypeError(\"FileLink.type should be a str or unicode, \"\n \"not %s\" % type_.__class__.__name__)\n \n return type_", "def test_search_key() -> None:\n # assert that having a wrong key at root level\n # in the json will raise an error\n key = \"toto\"\n d = {\"toto\": {\"a\": \"b\"}, \"c\": \"d\"}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n # Search when the key is in a deeper nested level\n key = \"nested_key\"\n d = {\"en\": {\"level1\": {\"level2\": {\"nested_key\": \"value\"}}}}\n\n with pytest.raises(Exception):\n Translator.search_key(d, key)\n\n return", "def _is_generic_key(key):\n for prefix in [\n \"graph_rewriter_config\",\n \"model\",\n \"train_input_config\",\n \"train_config\",\n \"eval_config\"]:\n if key.startswith(prefix + \".\"):\n return True\n return False", "def test_get_storage_invalid_suffix(self):\r\n self.assertRaises(KeyError, self.profile.get_storage, ('testing.json,'))", "def test_getKey_tmpfile(self):\n filename = self.mktemp()\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))", "def test_utils_get_dict_value_from_path_should_return_none_when_value_does_not_exists(\n path,\n):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) is None", "def key_type(self) -> global___Type:", "def test_getObjectByKey_raises_KeyError(self):\n try:\n self.tile_bucket.getObjectByKey('foo_key')\n except KeyError:\n return\n assert(False)", "def test_config_key_error():\n c = core.Config()\n\n with pytest.raises(KeyError):\n c['doesNotExist']", "def test_get_key_digest_with_only_ns(self):\n with pytest.raises(TypeError) as typeError:\n self.as_connection.get_key_digest(\"test\")", "def test_invalid_path(self):\n self.assertRaises(argparse.ArgumentTypeError, generic.check_path, 'foo')", "def get(self, key, key_type=None):\n pass", "def test_get_key_digest_with_invalid_ns_type(self, ns):\n\n with pytest.raises(TypeError):\n self.as_connection.get_key_digest(ns, \"demo\",\n \"get_digest_key\")", "def _validate_path(self, key, path):\n if path is None:\n raise TypeError(\"FileLink.path can not be None\")\n \n if not isinstance(path, (str, unicode)):\n raise TypeError(\"FileLink.path should be a str or unicode, \"\n \"not %s\" % path.__class__.__name__)\n return path", "def test_no_path():\n test = [{'key': 'val'}, []]\n t_result = fetch_data_by_keys(*test)\n assert not is_successful(t_result)\n assert 'path list empty' in str(t_result.failure())", "def test_wrong_key(self):\n with pytest.raises(FinderInsideProException) as ex:\n FinderInsidePro(\"\")\n assert FinderInsideProException.EXCEPTION_TEXT_KEY_NOT_SET in str(ex)\n assert ex.value.extype == FinderInsideProException.TYPE_KEY_IS_WRONG\n\n with pytest.raises(FinderInsideProException) as ex:\n FinderInsidePro('aaa')\n assert FinderInsideProException.EXCEPTION_TEXT_WRONG_KEY in str(ex)\n assert ex.value.extype == FinderInsideProException.TYPE_KEY_IS_WRONG", "def test_etype__invalid(self):\n\n for etype in (\"SyntaxError\", self):\n self.assertRaises(TypeError, encode_file_path, \"test\", etype)", "def test_anchortype_nokey(testchannel):\n key = testchannel.add(list)\n for i in range(5):\n testchannel.add(list)\n testchannel.remove(key)\n\n with pytest.raises(KeyError) as err:\n testchannel.anchortype(key)\n\n assert err.value.args == (key, )", "def _verify_key_exists(self, key, stack_path=[]):\r\n error_msg = (\r\n \"Could not find the {key_type} key '{key}' in: {stack_path}. \"\r\n \"Found {keys_found} instead.\"\r\n )\r\n try:\r\n dk = stack_path[0]\r\n fk = stack_path[1]\r\n xk = stack_path[2]\r\n yk = stack_path[3]\r\n vk = stack_path[4]\r\n except:\r\n pass\r\n try:\r\n if len(stack_path) == 0:\r\n if key not in self:\r\n key_type, keys_found = 'data', self.keys()\r\n stack_path = 'stack'\r\n raise ValueError\r\n elif len(stack_path) == 1:\r\n if key not in self[dk]:\r\n key_type, keys_found = 'filter', self[dk].keys()\r\n stack_path = 'stack[{dk}]'.format(\r\n dk=dk)\r\n raise ValueError\r\n elif len(stack_path) == 2:\r\n if key not in self[dk][fk]:\r\n key_type, keys_found = 'x', self[dk][fk].keys()\r\n stack_path = 'stack[{dk}][{fk}]'.format(\r\n dk=dk, fk=fk)\r\n raise ValueError\r\n elif len(stack_path) == 3:\r\n if key not in self[dk][fk][xk]:\r\n key_type, keys_found = 'y', self[dk][fk][xk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}]'.format(\r\n dk=dk, fk=fk, xk=xk)\r\n raise ValueError\r\n elif len(stack_path) == 4:\r\n if key not in self[dk][fk][xk][yk]:\r\n key_type, keys_found = 'view', self[dk][fk][xk][yk].keys()\r\n stack_path = 'stack[{dk}][{fk}][{xk}][{yk}]'.format(\r\n dk=dk, fk=fk, xk=xk, yk=yk)\r\n raise ValueError\r\n except ValueError:\r\n print error_msg.format(\r\n key_type=key_type,\r\n key=key,\r\n stack_path=stack_path,\r\n keys_found=keys_found\r\n )", "def test_entities__Entity__getField__1(entity):\n with pytest.raises(KeyError):\n entity.getField('asdf')", "def test_read_type_error():\n filename = {}\n with pytest.raises(TypeError):\n read_file(filename)", "def test_with_nonexisting_attr(create_file_with_text):\n test_class = KeyValueStorage(create_file_with_text)\n with pytest.raises(ValueError, match=\"No such key\"):\n test_class[\"wrong_attribute\"]", "def test_get_transaction_types_key(self):\n pass", "def test_utils_get_dict_value_from_path_should_return_given_value(path, value):\n dictionary = {\"foo\": {\"bar\": \"bar_value\"}}\n assert ralph_utils.get_dict_value_from_path(dictionary, path) == value", "def load_key(self, type, keyid):\n pass", "def test_get_single_different(single_bucket): # pylint: disable=redefined-outer-name\n with pytest.raises(KeyError):\n single_bucket.get(\"key 2\")", "def test_get_path_valid(self, audio_collection, key_idx, file_idx):\n # TODO: the hard-coded parametrization is gross.\n keys = list(audio_collection.audio_stores.keys())\n keys.sort()\n store_name = keys[key_idx]\n assert os.path.exists(audio_collection.get_path(\n store_name, file_idx))", "def test_generate_key(self): \n k = Key().generate()\n self.assertRegex(k, \"[a-zA-Z0-9+\\/]+={0,2}\")", "def test_getter_key_error(self):\n root = netapp_api.NaElement('root')\n self.mock_object(root, 'get_child_by_name', return_value=None)\n self.mock_object(root, 'has_attr', return_value=None)\n\n self.assertRaises(KeyError,\n netapp_api.NaElement.__getitem__,\n root, '123')", "def check_type( string_key ) : \r\n\r\n if type( string_key ) != type( '' ) : \r\n\r\n # raise self.__class__( \"'%s': EGI wants the key to be four _characters_ (not %s) !\" % (type(string_key), ) ) \r\n raise Eggog( \"'%s': EGI wants the key to be four _characters_ (not %s) !\" % (type(string_key), ) ) \r\n \r\n else :\r\n \r\n return True", "def test_incompatible_option_type(key, value):\n wrong_types = {int, str, list, bool} - {type(value)}\n for wrong_type in wrong_types:\n test_value = wrong_type()\n with pytest.raises(InputError):\n _check_input_config({key: test_value})", "def mock_from_string(*args, **kwargs):\n raise InvalidKeyError('foo', 'bar')", "def test_traversal_invalid_string(traversal_test_trie):\n with pytest.raises(KeyError):\n gen = traversal_test_trie.traversal('invalid')\n next(gen)", "def test_invalid_file_path(self):\n # Test with an invalid file path\n\n #setup\n filepath = \".data/kano/test.txt\"\n expected_result = {\n \"type\": \"\",\n \"city\": \"\",\n \"state\": \"\",\n \"coordinates\": [\"\", \"\"],\n '': {}\n }\n\n #result\n assert extractor.get_metadata_from_filepath(filepath) == expected_result", "def test_set_invalid_key(test_file):\n md = OSXMetaData(test_file.name)\n with pytest.raises(KeyError):\n md[\"invalid_key\"] = \"value\"", "def test_missingKey(self):\n self.assertIsNone(self.users.key(\"mystery domain\", \"mystery user\"))", "def test_get_key_digest_with_only_ns_and_set(self):\n with pytest.raises(TypeError) as typeError:\n self.as_connection.get_key_digest(\"test\", \"set\")", "def readKey(self, keyPath):\n\t\ttry:", "def test_get_contact_person_types_key(self):\n pass", "def testBadKeyToToken(self):\n key = 5\n self.assertRaises(ValueError, dataToToken, key, data='hey')", "def test_keyerror(self):\n try:\n self.db['foo']\n except KeyError, e:\n assert \"no key 'foo' in database <SequenceFileDB\" in str(e), str(e)", "def test_load_unsupported_type(self):\n expected = {\n \"name\": \"Kevin\",\n \"age\": 21,\n \"pet\": {\n \"name\": \"Trippy Jack\",\n \"age\": 20762,\n \"__type__\": \"hyperdimensional.hamster\"\n }\n }\n with open('tests/unsupported_type.json', 'r') as json_file:\n self.assertEqual(expected, morejson.load(json_file))", "def test_get_key_digest_with_no_parameter(self):\n with pytest.raises(TypeError) as typeError:\n self.as_connection.get_key_digest()\n\n assert \"argument 'ns' (pos 1)\" in str(\n typeError.value)", "def test_getitem_error(self):\n with self.assertRaises(KeyError):\n self.tester['Not_a_Sample']", "def test_getitem_error(self):\n with self.assertRaises(KeyError):\n self.tester['Not_a_Sample']", "def test_access_nested_map_exception(self, nested_map, path):\n with self.assertRaises(KeyError) as error:\n access_nested_map(nested_map, path)\n self.assertEqual(error.exception.args[0], path[-1])", "def key_type(self):\n raise exceptions.NotImplementedError()", "def test_toomanykeys(self):\n self.assertRaises(recordparser.KeyListMismatchError,\n recordparser.getfields, \"1234567890\", \"10s\", (\"key1\", \"key2\"))", "def test_is_valid_annotation_key_invalid_input():\n # test length violations\n assert not is_valid_annotation_key(key=None) # Too short\n assert not is_valid_annotation_key(key=\"\") # Too short\n assert not is_valid_annotation_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_annotation_key(key=\"/n\") # prefix too short\n assert not is_valid_annotation_key(key=\"p/\") # name too short\n assert not is_valid_annotation_key(key=\"a\" * 254) # name too long\n assert not is_valid_annotation_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"-a\")\n assert not is_valid_annotation_key(key=\".b\")\n assert not is_valid_annotation_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_annotation_key(key=\"a-\")\n assert not is_valid_annotation_key(key=\"b.\")\n assert not is_valid_annotation_key(key=\"c \")\n assert not is_valid_annotation_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_annotation_key(key=\"a$$a\")\n assert not is_valid_annotation_key(key=\"b b\")", "def _check_key(key): # type: (str) -> None\n if not key:\n raise ValueError('Key must not be empty.')\n if '.' in key:\n raise ValueError('Key must not contain dots.')", "def test_getKey_keyexists(self):\n filename = self.mktemp()\n with open(filename, 'wb') as fh:\n fh.write(SEKRIT_KEY)\n fh.flush()\n\n key = crypto.getKey(filename)\n self.failUnlessIsInstance(key, basestring,\n \"key isn't a string! type=%r\" % type(key))\n self.assertEqual(SEKRIT_KEY, key,\n \"\"\"The example key and the one read from file differ!\n key (in hex): %s\n SEKRIT_KEY (in hex): %s\"\"\"\n % (key.encode('hex'), SEKRIT_KEY.encode('hex')))", "def test_key_not_found(self):\n self.expect_datatore_lookup('SomeBlobKey', False)\n self.mox.ReplayAll()\n self.assertResponse('404 %s' % httplib.responses[404], [], '', self.app,\n self._environ)", "def test_key_type(self):\n self.failureResultOf(self.producer.send_messages(\"topic\", key=\"key\", msgs=[b\"msg\"]), TypeError)", "def test_get_types(self):\n pass", "def test_is_valid_label_key_invalid_input():\n # test length violations\n assert not is_valid_label_key(key=None) # Too short\n assert not is_valid_label_key(key=\"\") # Too short\n assert not is_valid_label_key(key=f\"{'p' * 254}/n\") # prefix too long\n assert not is_valid_label_key(key=\"/n\") # prefix too short\n assert not is_valid_label_key(key=\"p/\") # name too short\n assert not is_valid_label_key(key=\"a\" * 254) # name too long\n assert not is_valid_label_key(key=f\"d/{'b'*64}\") # name too long\n # test first character violations (not alphanum)\n assert not is_valid_label_key(key=\"-a\")\n assert not is_valid_label_key(key=\".b\")\n assert not is_valid_label_key(key=\" c\")\n # test last character violations (not alphanum)\n assert not is_valid_label_key(key=\"a-\")\n assert not is_valid_label_key(key=\"b.\")\n assert not is_valid_label_key(key=\"c \")\n assert not is_valid_label_key(key=\"sw33T#\")\n # test middle characters violations\n assert not is_valid_label_key(key=\"a$$a\")\n assert not is_valid_label_key(key=\"b b\")", "def test_is_valid_label_key_valid_input():\n # test valid label keys\n assert is_valid_label_key(key=\"l0l\")\n assert is_valid_label_key(key=\"l0L\")\n assert is_valid_label_key(key=\"L-l\")\n assert is_valid_label_key(key=\"L.L\")\n assert is_valid_label_key(key=\"4-you\")\n assert is_valid_label_key(key=\"you.2\")\n assert is_valid_label_key(key=\"p/n\")\n assert is_valid_label_key(key=\"prefix/you.2\")\n assert is_valid_label_key(key=\"how.sad/to-see\")\n assert is_valid_label_key(key=f\"{'d'*253}/{'n'*63}\")", "def test_invalid_pathname(self):\n self.assertFalse(Util.is_pathname_valid(''))", "def _raiseIfWebsafeKeyNotValid(websafeKey, kind):\n # Check that websafeKey is not None\n if not websafeKey:\n raise endpoints.BadRequestException(\n \"Websafe key not provided for '%s'\" % kind)\n # Try to decode the websafe key into a real key\n try:\n key = ndb.Key(urlsafe=websafeKey)\n except:\n raise endpoints.BadRequestException(\n \"Websafe key provided for '%s' could not be decoded: %s\" %\n (kind, websafeKey))\n # Ensure that the key is of the desired kind\n if key.kind() != kind:\n raise endpoints.BadRequestException(\n \"Websafe key is not of the '%s' kind: %s\" % (kind, websafeKey))\n # If all is well, return the key\n return key", "def test_get_factory_invalid(self):\n order_processor = OrderProcessor()\n self.assertRaises(KeyError,\n order_processor.get_factory('AppleRepublic'))", "def load_key():", "def test_is_valid_annotation_key_valid_input():\n # test valid label keys\n assert is_valid_annotation_key(key=\"l0l\")\n assert is_valid_annotation_key(key=\"l0L\")\n assert is_valid_annotation_key(key=\"L-l\")\n assert is_valid_annotation_key(key=\"L.L\")\n assert is_valid_annotation_key(key=\"4-you\")\n assert is_valid_annotation_key(key=\"you.2\")\n assert is_valid_annotation_key(key=\"p/n\")\n assert is_valid_annotation_key(key=\"prefix/you.2\")\n assert is_valid_annotation_key(key=\"how.sad/to-see\")\n assert is_valid_annotation_key(key=f\"{'d'*253}/{'n'*63}\")", "def test_set_key_filename_missing(self):\n command_line = self._MENU + [self._KEYNAME, \"--keyfile-path\", \"/bogus\"]\n self.check_error(StratisCliKeyfileNotFoundError, command_line, _ERROR)", "def testKeyInfoTooShort(self):\n key = 5\n self.assertRaises(ValueError, dataToToken, key, data='x', keyInfo='xx')", "def test_empty_key_raises_error(empty_viewset):\n viewset = empty_viewset\n system1 = viewset.model.add_software_system(name=\"sys1\")\n\n with pytest.raises(ValueError, match=\"A key must be specified\"):\n viewset.create_container_view(\n key=\"\", description=\"container\", software_system=system1\n )", "def keyfunc(invalid: utils.TraceTuple) -> Path:\n traces_path = invalid.traces_path\n assert isinstance(traces_path, Path)\n return traces_path", "def test_nokey(testchannel, methodname):\n key = 42\n\n with pytest.raises(KeyError) as err:\n getattr(testchannel, methodname)(key)\n\n assert err.value.args == (key, )", "def test_missing_entry_raises_KeyError(self):\n with self.assertRaises(KeyError):\n self.phonebook.lookup(\"missing\")", "def test_other_user_kvs_get_failure(self):\r\n with self.assertRaises(AssertionError):\r\n self.kvs.get(self.other_key_factory(self.existing_field_name))", "def test_no_key_raises_error(empty_viewset):\n viewset = empty_viewset\n system1 = viewset.model.add_software_system(name=\"sys1\")\n\n with pytest.raises(ValueError, match=\"A key must be specified\"):\n viewset.create_container_view(description=\"container\", software_system=system1)", "def test_split_nested_class_from_key_no_nested(self):\n part1, part2 = class_dependency.split_nested_class_from_key(\n 'pkg.name.class')\n self.assertEqual(part1, 'pkg.name.class')\n self.assertIsNone(part2)", "def __guess_key_type(self):\n if self.__key:\n if isinstance(self.__key, types.IntType) or \\\n isinstance(self.__key, types.LongType) or \\\n isinstance(self.__key, types.FloatType):\n return NUMBER_KEY_TYPE\n elif isinstance(self.__key, types.StringTypes):\n return STRING_KEY_TYPE\n\n return UNKNOWN_KEY_TYPE", "def PathType(path_str):\n orig = path_str\n path_str = os.path.expanduser(path_str) # Expand user path if necessary\n path_str = os.path.abspath(path_str)\n\n if os.path.exists(path_str):\n return path_str\n else:\n raise argparse.ArgumentError(message='\"{}\" is not a valid path'.format(orig))", "def isValidKey(key):\n return True", "def _check_transform_key(key: Hashable) -> None:\n _test_hashable = hash(key) # The only 'real' way to make sure is hashable\n # if not isinstance(key, Hashable):\n # raise TypeError((type(key), \"transformation lookup key is not hashable\"))", "def require(obj, key, required_type=None):\n if key not in obj:\n raise KeyError(f'{key} not found')\n if required_type is not None and not isinstance(obj[key], required_type):\n raise TypeError(f'{key} is not a {required_type}')\n return obj[key]", "def test_getitem_error(self):\n with self.assertRaises(KeyError):\n self.tester['Not_a_Category']", "def test_getitem_error(self):\n with self.assertRaises(KeyError):\n self.tester['Not_a_Category']" ]
[ "0.7941828", "0.74922115", "0.73346525", "0.67035246", "0.6578848", "0.64337885", "0.64240384", "0.64203644", "0.6280829", "0.6259001", "0.6171811", "0.61548215", "0.61433345", "0.61050516", "0.6074959", "0.60654056", "0.6059521", "0.6050425", "0.60475475", "0.6045783", "0.6044268", "0.5995072", "0.59939396", "0.599052", "0.59665674", "0.5964877", "0.5953391", "0.5947495", "0.5922025", "0.59154105", "0.5907641", "0.58941394", "0.58801323", "0.5868779", "0.5864099", "0.58634746", "0.5832747", "0.58219427", "0.5777885", "0.5777759", "0.5758337", "0.57419306", "0.5733731", "0.5722773", "0.57216364", "0.572154", "0.57107085", "0.5709984", "0.5704288", "0.5652223", "0.5651598", "0.56497836", "0.56419295", "0.5634819", "0.5625074", "0.5605991", "0.55896163", "0.558455", "0.5581472", "0.55778253", "0.5573247", "0.5571643", "0.5569409", "0.5559334", "0.55575114", "0.55511737", "0.55510986", "0.55510986", "0.554882", "0.5529197", "0.5527225", "0.5516942", "0.5511008", "0.55023706", "0.5502051", "0.5497536", "0.5460683", "0.5455039", "0.5450571", "0.5448415", "0.5446354", "0.54457337", "0.5441881", "0.5435754", "0.54259706", "0.54156345", "0.54095984", "0.5408202", "0.54073834", "0.5397145", "0.5391338", "0.5382965", "0.5381182", "0.5379561", "0.5373194", "0.53727716", "0.5370323", "0.5370313", "0.5369551", "0.5369551" ]
0.8730529
0
Create a mesh of points to plot in
def make_meshgrid(x, y, h=.02): x_min, x_max = x.min() - 1, x.max() + 1 y_min, y_max = y.min() - 1, y.max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) return xx, yy
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def to_mesh(\n self,\n lims_x: array_like = (-1, 1),\n lims_y: array_like = (-1, 1),\n ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n a, b, c, d = self.cartesian()\n x_center, y_center = self.point[:2]\n\n values_x = x_center + lims_x\n values_y = y_center + lims_y\n\n X, Y = np.meshgrid(values_x, values_y)\n\n if c != 0:\n Z = -(a * X + b * Y + d) / c\n\n elif b != 0:\n Z = -(a * X + c * Y + d) / b\n X, Y, Z = X, Z, Y\n\n else:\n Z = -(b * X + c * Y + d) / a\n X, Y, Z = Z, X, Y\n\n return X, Y, Z", "def plotMesh(verts,tris):\n x = verts[:,0]\n y = verts[:,1]\n\n plt.figure()\n plt.gca().set_aspect('equal')\n plt.triplot(x, y, tris, 'k-')\n plt.title('Unstructured Mesh')\n plt.xlabel('distance (m)')\n plt.ylabel('distance (m)')", "def xy_mesh(nx, ny, x_min=0, x_max=1, y_min=0, y_max=1):\n\n\tx = np.linspace(x_min, x_max, nx)\n\ty = np.linspace(y_min, y_max, ny)\n\txv, yv = np.meshgrid(x, y)\n\t\n\treturn xv, yv", "def build_mesh(self):\n vertices = []\n indices = []\n step = 10\n istep = (pi * 2) / float(step)\n for i in range(step):\n x = 350 + cos(istep * i) * 100\n y = 350 + sin(istep * i) * 100\n vertices.extend([x, y, 0, 0])\n indices.append(i)\n return Mesh(vertices=vertices, indices=indices)", "def draw_stl_from_mesh(m):\n plt.ion()\n # Create a new plot\n figure = plt.figure()\n axes = mplot3d.Axes3D(figure)\n\n # Render the cube faces\n #for m in meshes:\n axes.add_collection3d(mplot3d.art3d.Poly3DCollection(m.vectors))\n\n # Auto scale to the mesh size\n scale = m.points.flatten(-1)\n axes.auto_scale_xyz(scale, scale, scale)", "def uniform_mesh(n, x_0=0.0, x_1=1.0):\n\n assert n>0\n\n\n points = x_0 + (x_1 - x_0)*numpy.arange(n+1,dtype=numpy.float)/n\n boundary = {(0, 0): 'left', (n-1, 1): 'right'}\n\n return points, boundary", "def create( vertices, faces, colour=(0.6,0.6,0.6) ):\r\n \r\n mesh = GLUtils.GLMesh()\r\n \r\n for vertex in vertices:\r\n mesh.addVertex( PyUtils.toPoint3d(vertex) )\r\n \r\n for face in faces:\r\n poly = GLUtils.GLIndexedPoly()\r\n for index in face:\r\n poly.addVertexIndex( index )\r\n mesh.addPoly(poly)\r\n \r\n try:\r\n mesh.setColour( *colour )\r\n except TypeError:\r\n mesh.setColour( *(colour + (1,)) )\r\n\r\n mesh.computeNormals()\r\n\r\n return mesh", "def _create_meshgrid(self):\n x = np.linspace(self.limits[0], self.limits[1], self.resolution)\n y = np.linspace(self.limits[2], self.limits[3], self.resolution)\n X, Y = np.meshgrid(x, y)\n return X, Y", "def createMesh(width, height):\r\n mesh = [(x,y) for x in range(0, width+1) for y in range(0,height+1)]\r\n return mesh", "def mesh_generation(coordinates):\n # Get the minimum and maximum for the latitudes\n min_latitude = np.min(coordinates[:, 0])\n max_latitude = np.max(coordinates[:, 0])\n # Get the minimum and maximum for the longitudes\n min_longitude = np.min(coordinates[:, 1])\n max_longitude = np.max(coordinates[:, 1])\n # Get the number of provided coordinates\n size = int(np.min([1e5, np.max([5e4, len(coordinates)])]))\n # Create an array of uniform-random points as a mesh\n mesh_1 = np.random.uniform(min_latitude, max_latitude, size)\n mesh_2 = np.random.uniform(min_longitude, max_longitude, size)\n mesh = np.vstack((mesh_1.flatten(), mesh_2.flatten())).T\n # Return the evenly-spaced mesh for the coordinates\n return mesh", "def new_mesh_set(self, all_meshes):\n if isinstance(all_meshes, Mesh):\n mesh_tp = []\n mesh_tp.append(all_meshes)\n all_meshes = mesh_tp\n\n if not isinstance(all_meshes, list):\n raise TypeError(\"Please send a list of mesh to update_mesh\")\n self.all_meshes = all_meshes\n\n # Remove previous actors from the scene\n for actor in self.mesh_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.mesh_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtkPoints()\n for i, mesh in enumerate(self.all_meshes):\n if mesh.time.size != 1:\n raise IndexError(\"Mesh should be from one frame only\")\n\n points = vtkPoints()\n for j in range(mesh.channel.size):\n # points.InsertNextPoint([0, 0, 0])\n points.InsertNextPoint(mesh.data[:3, j, 0].tolist())\n\n # Create an array for each triangle\n draw_patch = not mesh.automatic_triangles and not self.force_wireframe\n if draw_patch:\n poly_type = vtkPolygon\n n_ids = 3\n color = self.patch_color[i]\n else:\n poly_type = vtkPolyLine\n n_ids = 4\n color = self.mesh_color\n cells = vtkCellArray()\n\n # Create the polygons\n for j in range(mesh.triangles.shape[1]):\n poly = poly_type()\n poly.GetPointIds().SetNumberOfIds(n_ids) # make a tri\n for k in range(len(mesh.triangles[:, j])):\n poly.GetPointIds().SetId(k, mesh.triangles[k, j])\n if not draw_patch:\n poly.GetPointIds().SetId(3, mesh.triangles[0, j]) # Close the triangle\n cells.InsertNextCell(poly)\n\n poly_data = vtkPolyData()\n poly_data.SetPoints(points)\n if draw_patch:\n poly_data.SetPolys(cells)\n else:\n poly_data.SetLines(cells)\n\n mapper = vtkPolyDataMapper()\n mapper.SetInputData(poly_data)\n\n # Create an actor\n self.mesh_actors.append(vtkActor())\n self.mesh_actors[i].SetMapper(mapper)\n self.mesh_actors[i].GetProperty().SetColor(color)\n self.mesh_actors[i].GetProperty().SetOpacity(self.mesh_opacity)\n\n self.parent_window.ren.AddActor(self.mesh_actors[i])\n\n # Update marker position\n self.update_mesh(self.all_meshes)", "def mesh(self, centered=True):\n xx = np.linspace(0, self.Lx, self.Nx, endpoint=False)\n yy = np.linspace(0, self.Ly, self.Ny, endpoint=False)\n\n if centered:\n xx += self.hx/2\n yy += self.hy/2\n\n return np.meshgrid(xx, yy, indexing=\"ij\")", "def makemesh_regular(data,vecs,grid):\n\tdata = beyonder(data,vecs,growsize=0.1)\n\txypts = np.array([[i,j] for i in np.linspace(0,vecs[0],grid[0].astype(int)) \n\t\tfor j in np.linspace(0,vecs[1],grid[1].astype(int))])\n\tinterp = scipy.interpolate.LinearNDInterpolator(data[:,0:2],data[:,2],fill_value=0.0)\n\tbilinear_pts = np.array([[i[0],i[1],interp(i[0],i[1])] for i in xypts])\n\tresult = scipy.interpolate.griddata(bilinear_pts[:,0:2],bilinear_pts[:,2],bilinear_pts[:,0:2],\n\t\tmethod='cubic')\n\t#---observed that griddata returns points where we cycle through the points in the following\n\t#---...order:x0,y0),(x0,y1),...(x0,yn),(x1,y0),... and so on, suggesting that the following \n\t#---...reshape command (which reshape function claims to use the \"C\" programming language convention\n\t#---...for reshaping objects by default, which convention has the last index changing \"fastest\")\n\txyz_pts = np.array([[bilinear_pts[i,0],bilinear_pts[i,1],result[i]] for i in range(len(result))])\n\treturn np.reshape(xyz_pts[:,2],grid.astype(int))", "def test_plot_mesh(self):\n plt.close('all')\n\n #\n # Initialize\n #\n fig, ax = plt.subplots(3,3)\n plot = Plot()\n #\n # Define mesh\n # \n mesh = Mesh.newmesh(grid_size=(2,2))\n mesh.refine() \n mesh.root_node().children[1,1].mark(1)\n mesh.refine(1)\n \n # Plot simple mesh\n ax[0,0] = plot.mesh(ax[0,0], mesh)\n \n #\n # Flag a few cells\n # \n mesh.unmark(nodes=True)\n mesh.root_node().children[0,0].mark(2)\n mesh.root_node().children[1,0].mark(1)\n mesh.root_node().children[1,1].children['SW'].mark(3)\n mesh.root_node().children[1,1].children['NE'].mark(3)\n \n # Color flagged cells\n ax[0,1] = plot.mesh(ax[0,1], mesh, color_marked=[1,2,3], nested=True)\n \n # Plot vertex numbers\n ax[0,2] = plot.mesh(ax[0,2], mesh, vertex_numbers=True)\n \n # Plot edge numbers\n ax[1,0] = plot.mesh(ax[1,0], mesh, edge_numbers=True)\n \n # Plot cell numbers nested off\n mesh.refine(2)\n ax[1,1] = plot.mesh(ax[1,1], mesh, cell_numbers=True)\n \n # Plot cell numbers nested on\n ax[1,2] = plot.mesh(ax[1,2], mesh, cell_numbers=True, nested=True)\n\n # Plot dofs\n element = QuadFE(2,'Q1')\n ax[2,0] = plot.mesh(ax[2,0], mesh, element=element, dofs=True)\n \n # Assign dofs in a nested way\n ax[2,1] = plot.mesh(ax[2,1], mesh, element=element, dofs=True, \\\n nested=True)\n \n # Display only dofs of flagged nodes \n ax[2,2] = plot.mesh(ax[2,2], mesh, element=element, dofs=True, \\\n node_flag=3, nested=True, show_axis=True)", "def mesh(self):\n return numpy.meshgrid(*self.edges, indexing='ij')", "def add_to_plot(self, m=None, **kwargs):\n\n # Create empty lists of coordinates\n xx = []\n yy = []\n # Read each element coordinates and add values that will lead to projected values\n # that will be discarded\n #\n # Previous solution: we add a NaN to avoid plotting lines joining 2 elements\n for j in range(0, self.nelements):\n xx.extend((self.xnode[self.i1[j]], self.xnode[self.i2[j]],\n self.xnode[self.i3[j]], self.xnode[self.i1[j]], np.nan))\n yy.extend((self.ynode[self.i1[j]], self.ynode[self.i2[j]],\n self.ynode[self.i3[j]], self.ynode[self.i1[j]], np.nan))\n\n if m is None:\n logger.debug(\"No projection defined\")\n logger.debug('Adding finite-element mesh to plot')\n meshplot = plt.plot(xx, yy, **kwargs)\n\n logger.debug('Setting limits to axes')\n # ax.set_xlim(self.xnode.min(), self.xnode.max())\n # ax.set_ylim(self.ynode.min(), self.ynode.max())\n else:\n logger.debug(\"Applying projection to coordinates\")\n logger.debug('Adding finite-element mesh to map')\n\n # Apply projection\n # (to avoid warnings if we did it through the plot)\n xx, yy = m(xx, yy)\n\n # Mask large values\n xx = np.ma.masked_greater(xx, 1e+10, copy=True)\n yy = np.ma.masked_greater(yy, 1e+10, copy=True)\n\n meshplot = m.plot(xx, yy, latlon=False, **kwargs)\n\n \"\"\"\n logger.debug('Setting limits to axes')\n m.ax.set_xlim(np.nanmin(xx), np.nanmax(xx))\n m.ax.set_ylim(np.nanmin(yy), np.nanmax(yy))\n \"\"\"\n return meshplot", "def box_mesh(point1=Point(0,0,0), point2=Point(2,1,1),\n numptsX=8, numptsY=4, numptsZ=4):\n mesh = BoxMesh(point1, point2, numptsX, numptsY, numptsZ)\n print_mesh_stats(mesh)\n\n return mesh", "def make_meshgrid(x_min,x_max,y_min,y_max, h=.02):\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def make_meshgrid(x_min,x_max,y_min,y_max, h=.02):\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def make_meshgrid(x, y, h=.02):\r\n x_min, x_max = x.min() - 1, x.max() + 1\r\n y_min, y_max = y.min() - 1, y.max() + 1\r\n xx, yy = np.meshgrid(np.arange(x_min,x_max,h),np.arange(y_min,y_max,h))\r\n return xx, yy", "def make_meshgrid(x, y, h=.02):\r\n x_min, x_max = x.min() - 1, x.max() + 1\r\n y_min, y_max = y.min() - 1, y.max() + 1\r\n xx, yy = np.meshgrid(np.arange(x_min,x_max,h),np.arange(y_min,y_max,h))\r\n return xx, yy", "def make_meshgrid(x, y, h = 5):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def buildGrid(self, plot=False):\r\n\r\n print(\"Constructing grid\")\r\n # print(\"Grid dims\", self.ne, self.nn, self.nz)\r\n # print(\"Num points\", 2*(self.ne+1)*(self.nn+1)*3, len(self.coords))\r\n\r\n # number of edges\r\n self.ndx = self.ne + 1\r\n self.ndy = self.nn + 1\r\n self.ndz = self.nz + 1\r\n\r\n # extract the triplets\r\n self.points = {}\r\n self.points[\"e\"] = self.coords[0::3]\r\n self.points[\"n\"] = self.coords[1::3]\r\n self.points[\"z\"] = self.coords[2::3]\r\n\r\n print('points e')\r\n print(self.points[\"e\"])\r\n\r\n # Here are the coordinates\r\n self.X0 = np.reshape(self.points[\"e\"][0::2] , (self.ndx,self.ndy), order=\"F\")\r\n self.Y0 = np.reshape(self.points[\"n\"][0::2] , (self.ndx,self.ndy), order=\"F\")\r\n self.Z0 = np.reshape(self.points[\"z\"][0::2] , (self.ndx,self.ndy), order=\"F\")\r\n\r\n self.X1 = np.reshape(self.points[\"e\"][1::2] , (self.ndx,self.ndy), order=\"F\")\r\n self.Y1 = np.reshape(self.points[\"n\"][1::2] , (self.ndx,self.ndy), order=\"F\")\r\n self.Z1 = np.reshape(self.points[\"z\"][1::2] , (self.ndx,self.ndy), order=\"F\")\r\n #\r\n # # visualize\r\n # if plot:\r\n # print(\"plotting\")\r\n # fig = plt.figure()\r\n # ax = fig.add_subplot(111, projection='3d')\r\n # ax.plot_wireframe(f2m*self.X0, f2m*self.Y0, f2m*self.Z0, rstride=1, cstride=1)\r\n # ax.plot_wireframe(f2m*self.X1, f2m*self.Y1, f2m*self.Z1, rstride=1, cstride=1)\r\n # plt.show()\r", "def Draw1D(mesh, coefs, keep=False, n_p=2, figsize=(20,4)):\n if n_p <= 2:\n n_p = 2\n \n eps = 1e-6 \n \n x_v = [p[0] for p in mesh.ngmesh.Points()]\n x_s = []\n f_s = {}\n\n miny = 1e99\n for f, name in coefs:\n f_s[name] = []\n \n x_s.append(nan)\n for f,name in coefs:\n f_s[name].append(nan)\n \n for el in mesh.ngmesh.Elements1D():\n left = mesh.ngmesh.Points()[el.points[0]][0]\n right = mesh.ngmesh.Points()[el.points[1]][0]\n for l in range(n_p):\n y = left + eps + (l / (n_p-1)) * (right - eps -left) \n x_s.append(y)\n for f,name in coefs:\n ff = f(mesh(y))\n miny = min(miny,ff)\n f_s[name].append(ff)\n \n x_s.append(nan)\n for f,name in coefs:\n f_s[name].append(nan)\n\n \n # plt.clf()\n # display.display(plt.gcf())\n plt.figure(figsize=figsize)\n for f,name in coefs:\n plt.plot(x_s,f_s[name],label=name)\n plt.plot(x_v,[miny for v in x_v],'|',label='vertices')\n plt.xlabel(\"x\")\n plt.legend()\n plt.show()\n if keep:\n display.clear_output(wait=True)", "def mplot_mesh(meshtriang: df.Mesh) -> Tuple[plt.Figure, Any]:\n fig, ax = plt.subplots(1)\n ax.triplot(meshtriang, 'ko-', lw=1)\n return fig, ax", "def create_meshgrid(x, y, h=0.015):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def getXYZ(self, layout=None):\n def mesh2d(xlim, ylim):\n xx = [[xlim[0],xlim[1]],\n [xlim[0],xlim[1]]]\n yy = [[ylim[0],ylim[0]],\n [ylim[1],ylim[1]]]\n return xx,yy\n def calc(x, y, z, normal, offset):\n unknown = normal.dot(offset) - normal.vec[0]*x - normal.vec[1]*y - normal.vec[2]*z\n return unknown\n \n xlim = [-1,1]\n ylim = [-1,1]\n zlim = [-1,1]\n n = self.normal\n off = self.offset\n \n if n.vec[2] == 0:\n if n.vec[1] == 0:\n if n.vec[0] == 0:\n raise ValueError(\"Normal vector is zero vector.\")\n else:\n #cannot generate z or y but can x, try generating x for yz mesh\n yy, zz = mesh2d(ylim, zlim)\n xx = [[None,None],[None,None]]\n for i in [0,1]:\n for j in [0,1]:\n xx[i][j] = calc(0, yy[i][j], zz[i][j], n, off)\n else:\n #cannot generate z but can y, try generating y for xz mesh\n xx, zz = mesh2d(xlim, zlim)\n yy = [[None,None],[None,None]]\n for i in [0,1]:\n for j in [0,1]:\n yy[i][j] = calc(xx[i][j], 0, zz[i][j], n, off)\n else:\n #try generating z\n xx, yy = mesh2d(xlim, ylim)\n zz = [[None,None],[None,None]]\n for i in [0,1]:\n for j in [0,1]:\n zz[i][j] = calc(xx[i][j], yy[i][j], 0, n, off)\n return xx, yy, zz", "def cmesh(self):\n return numpy.meshgrid(*self.centers, indexing='ij')", "def make_meshgrid(x, y,h=0.02):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def to_mesh(self, n_angles: int = 30) -> Tuple[np.ndarray, np.ndarray, np.ndarray]:\n angles_a = np.linspace(0, np.pi, n_angles)\n angles_b = np.linspace(0, 2 * np.pi, n_angles)\n\n sin_angles_a = np.sin(angles_a)\n cos_angles_a = np.cos(angles_a)\n\n sin_angles_b = np.sin(angles_b)\n cos_angles_b = np.cos(angles_b)\n\n X = self.point[0] + self.radius * np.outer(sin_angles_a, sin_angles_b)\n Y = self.point[1] + self.radius * np.outer(sin_angles_a, cos_angles_b)\n Z = self.point[2] + self.radius * np.outer(cos_angles_a, np.ones_like(angles_b))\n\n return X, Y, Z", "def make_meshgrid(x, y, h=.02):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def make_meshgrid(x, y, h=.02):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def make_meshgrid(x, y, h=.02):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def make_meshgrid(x, y, h=.02):\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def create_points(self):\n v1 = 0.0\n v2 = 0.5\n v3 = 0.25\n v4 = 0.2 # only used for hexgrid\n\n points = []\n\n points.append((v1, v1, v1)) # 0\n points.append((v2, v1, v1)) # 1\n points.append((v2, v2, v1)) # 2\n points.append((v1, v2, v1)) # 3\n\n points.append((v1, v1, v2)) # 4\n points.append((v2, v1, v2)) # 5\n points.append((v2, v2, v2)) # 6\n points.append((v1, v2, v2)) # 7\n\n points.append((v3, v1, v1)) # 8\n points.append((v2, v3, v1)) # 9\n points.append((v3, v2, v1)) # 10\n points.append((v1, v3, v1)) # 11\n\n points.append((v1, v1, v3)) # 12\n points.append((v2, v1, v3)) # 13\n points.append((v2, v2, v3)) # 14\n points.append((v1, v2, v3)) # 15\n\n points.append((v3, v1, v2)) # 16\n points.append((v2, v3, v2)) # 17\n points.append((v3, v2, v2)) # 18\n points.append((v1, v3, v2)) # 19\n\n points.append((v4, v1, v1)) # 20\n points.append((v1, v4, v1)) # 21\n points.append((v1, v1, v4)) # 22\n\n return points", "def plot_mesh(corners):\r\n triangle = tri.Triangulation(corners[:, 0], corners[:, 1])\r\n\r\n refiner = tri.UniformTriRefiner(triangle)\r\n trimesh = refiner.refine_triangulation(subdiv=4)\r\n \r\n plt.figure(figsize=(6, 4))\r\n for i, mesh in enumerate((triangle, trimesh)):\r\n plt.subplot(1, 2, i+1)\r\n plt.triplot(mesh)\r\n plt.axis('off')\r\n plt.axis('equal')", "def DisplayMesh():\r\n \r\n # Load Surface Mesh Data and generate normals\r\n VTKString = OpenData('C:/Users/Tim/Documents/University/Year 4/Final Project/FinalYearProjectCode/TEH_Code/InputFiles','muscle_surface.vtk')\r\n header, Vertices, Triangles = CreateMatrixVTK(VTKString)\r\n \r\n fig = plt.figure()\r\n ax1 = fig.add_subplot(111,projection = '3d')\r\n ax1.plot_trisurf(Vertices[:,0],Vertices[:,1],Vertices[:,2],triangles= Triangles[:,1:])\r\n ax1.set_zlabel('z')\r\n ax1.set_ylabel('y')\r\n ax1.set_xlabel('x')\r\n plt.show()", "def createVertexData(self, fv_indices_flattened, points):\n mesh_points = points[fv_indices_flattened]\n data_mesh_points = mesh_points.flatten()\n\n return data_mesh_points", "def plot_surface(self):\n X, Y = np.meshgrid(self.x, self.y)\n fig = plt.figure()\n ax = fig.add_subplot(111, projection='3d')\n ax.plot_surface(X=X, Y=Y, Z=self.z)\n plt.show()", "def create_mesh(self):\n print(\"create_mesh\")\n faces = self.get_faces()\n print(\"num faces: {}\".format(len(faces)))\n\n # TODO: perform face filtering to remove long edges in Z direction\n # filtered_faces = self.get_filtered_faces(faces)\n # print(\"num filtered faces: {}\".format(len(filtered_faces)))\n\n vertices = self.xyz_points.T\n\n # handle texture mappings\n vertex_index_to_texture = []\n for j in range(0, self.height):\n for i in range(0, self.width):\n # vertex_index = (j * self.width) + ij\n w = i / self.width\n h = (self.height - j - 1) / self.height\n vertex_index_to_texture.append(\n (w, h)\n )\n\n # Create material.\n # TODO: make the string/filename randomly generated and unique\n file0 = open(os.path.join(self.args.path, \"triangle_mesh.obj.mtl\"), \"w\") # write mode\n file0.write(\"newmtl material_0\\n\")\n # Save image here.\n cv2.imwrite(os.path.join(self.args.path, \"triangle_mesh.png\"), self.bgr)\n file0.write(\"map_Kd triangle_mesh.png\\n\")\n file0.close()\n\n # https://en.wikipedia.org/wiki/Wavefront_.obj_file\n # https://github.com/mmatl/pyrender/blob/master/examples/models/fuze.obj\n obj_path = os.path.join(self.args.path, \"triangle_mesh.obj\")\n file1 = open(obj_path, \"w\") # write mode\n file1.write(\"mtllib ./triangle_mesh.obj.mtl\\n\")\n for vertex in vertices:\n x, y, z = vertex\n file1.write(\"v {} {} {}\\n\".format(x, y, z))\n file1.write(\"usemtl material_0\\n\")\n for w, h in vertex_index_to_texture:\n file1.write(\"vt {} {}\\n\".format(w, h))\n for face in faces:\n a, b, c = face\n a += 1\n b += 1\n c += 1\n file1.write(\"f {}/{} {}/{} {}/{}\\n\".format(\n a, a, b, b, c, c\n )\n )\n file1.close()\n\n # Load the trimesh from OBJ file.\n trimesh_mesh = trimesh.load(obj_path)\n # trimesh_mesh.show()\n\n mesh = pyrender.Mesh.from_trimesh(trimesh_mesh, smooth=False)\n self.scene = pyrender.Scene(ambient_light=[3.0, 3.0, 3.0])\n\n camera = pyrender.IntrinsicsCamera(\n self.focal_length, self.focal_length, self.width / 2, self.height / 2\n )\n self.camera_pose = np.array([\n [1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ])\n # https://pyrender.readthedocs.io/en/latest/examples/cameras.html#creating-cameras\n # https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.transform.Rotation.html\n r = R.from_rotvec(np.array([0, np.pi, 0]))\n r = R.from_rotvec(np.array([0.0, 0, np.pi])) * r\n matrix = r.as_matrix()\n self.camera_pose[:3, :3] = matrix\n\n light = pyrender.PointLight(\n color=[1.0, 1.0, 1.0],\n intensity=0.0\n )\n\n self.nm = pyrender.Node(mesh=mesh, matrix=np.eye(4))\n self.nl = pyrender.Node(light=light, matrix=np.eye(4))\n self.nc = pyrender.Node(camera=camera, matrix=np.eye(4))\n self.scene.add_node(self.nm)\n self.scene.add_node(self.nl)\n self.scene.add_node(self.nc)\n\n # Set the pose and show the image.\n temppose = self.extrinsics @ self.camera_pose\n self.scene.set_pose(self.nl, pose=temppose)\n self.scene.set_pose(self.nc, pose=temppose)\n pyrender.Viewer(self.scene, use_raymond_lighting=True,\n viewport_size=(self.width, self.height))", "def _mesh(self):\n from scipy.spatial import Delaunay\n points = self.cluster.get_positions()\n delaunay = Delaunay(points)\n simplices = self._filter_max_dist_in_element(delaunay.simplices)\n delaunay.simplices = simplices\n return delaunay", "def plotSurface(X):\n from mpl_toolkits.mplot3d import Axes3D\n from mpl_toolkits.mplot3d import proj3d\n f=plt.figure()\n ax=f.add_subplot(111,projection='3d')\n xi=np.arange(10,14,0.05)\n yi=np.arange(12,16,0.05)\n z = matplotlib.mlab.griddata(X[:,0], X[:,1], X[:,2], xi, yi, interp='nn')\n x, y = np.meshgrid(xi, yi)\n ax.plot_surface(x, y, z)\n return f", "def meshgrid(self):\n vecs = self.coord_vecs\n return np.meshgrid(*vecs, indexing='ij')", "def create_point_cloud(self):\n pixels = []\n colors = []\n my_pixels = []\n for j in range(self.height):\n for i in range(self.width):\n depth = self.depth[j, i]\n pixels.append(\n [i * depth, j * depth, depth]\n )\n my_pixels.append(\n [i, j, 1]\n )\n # make rgb with flip()\n colors.append(np.flip(self.bgr[j, i, :]))\n # colors.append(self.bgr[j, i, :])\n self.my_pixels = my_pixels\n pixels = np.array(pixels)\n\n # project pixels to camera space\n self.xyz_points = self.intrinsics_inv @ np.transpose(pixels)\n self.color_points = colors\n\n # now add 1s to the points for homogenous coordinates\n num_points = self.get_num_xyz_points()\n ones = np.ones((1, num_points))\n self.xyzw_points = np.concatenate((self.xyz_points, ones), axis=0)\n\n self.scene = None\n self.camera_pose = None\n self.nm = None\n self.nl = None\n self.nc = None\n self.create_mesh()", "def create_mesh(name):\n return bpy.data.meshes.new(name)", "def plot(mesh):\n from scipy.spatial import delaunay_plot_2d\n fig = delaunay_plot_2d(SimpleMesh(mesh))\n ax = fig.gca()\n ax.set_aspect(\"equal\")\n return fig, ax", "def make_meshgrid(x, y, h=0.02):\n space = 0.3\n x_min, x_max = x.min() - space, x.max() + space\n y_min, y_max = y.min() - space, y.max() + space\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy", "def rectangle_mesh(point1=Point(0,0), point2=Point(2,1), numptsX=10, numptsY=5):\n\n mesh = RectangleMesh(point1, point2, numptsX, numptsY )\n print_mesh_stats(mesh)\n \n return mesh", "def create_single_triangle_mesh():\n vertices = np.array(\n ((0, 0, 0), (1, 0, 0), (0, 1, 0)), dtype=np.float32)\n faces = np.array(((0, 1, 2),), dtype=np.int32)\n return vertices, faces", "def mlab_plt_cube(xmin, xmax, ymin, ymax, zmin, zmax):\n faces = cube_faces(xmin, xmax, ymin, ymax, zmin, zmax)\n for grid in faces:\n x, y, z = grid\n mlab.mesh(x, y, z, opacity=0.1, color=(0.1, 0.2, 0.3))", "def draw(self, color=None):\n color = color or self.color\n vertices = [list(vertex) for vertex in self.shape.vertices]\n faces = self.shape.faces\n mesh = compas_ghpython.draw_mesh(vertices,\n faces,\n color=color)\n return mesh", "def Parallelepiped(self,lower_left_rear_point=(0,0,0), upper_right_front_point=(2,4,10),\n nx=2, ny=4, nz=10, element_type=\"hex\"):\n\n if self.elements is not None and self.points is not None:\n self.__reset__()\n\n if element_type != \"tet\" and element_type != \"hex\":\n raise ValueError(\"Can only generate parallelepiped mesh using tetrahedrals or hexahedrals\")\n\n if (lower_left_rear_point[0] > upper_right_front_point[0]) or \\\n (lower_left_rear_point[1] > upper_right_front_point[1]) or \\\n (lower_left_rear_point[2] > upper_right_front_point[2]):\n raise ValueError(\"Incorrect coordinate for lower left rear and upper right front vertices\")\n\n nx, ny, nz = int(nx), int(ny), int(nz)\n if nx <= 0 or ny <= 0 or nz <= 0:\n raise ValueError(\"Number of discretisation cannot be zero or negative: nx={} ny={} nz={}\".format(nx,ny,nz))\n\n\n x=np.linspace(lower_left_rear_point[0],upper_right_front_point[0],nx+1)\n y=np.linspace(lower_left_rear_point[1],upper_right_front_point[1],ny+1)\n z=np.linspace(lower_left_rear_point[2],upper_right_front_point[2],nz+1)\n\n Y,X,Z = np.meshgrid(y,x,z)\n coordinates = np.dstack((X.T.flatten(),Y.T.flatten(),Z.T.flatten()))[0,:,:]\n\n self.element_type = \"hex\"\n self.nelem = int(nx*ny*nz)\n elements = np.zeros((self.nelem,8),dtype=np.int64)\n\n dum_0 = np.arange((nx+1)*ny)\n dum_1 = np.array([(nx+1)*i+nx for i in range(ny)])\n dum_2 = np.delete(dum_0,dum_1)\n col0 = np.array([dum_2+i*(nx+1)*(ny+1) for i in range(nz)]).flatten()\n\n elements[:,0] = col0\n elements[:,1] = col0 + 1\n elements[:,2] = col0 + nx + 2\n elements[:,3] = col0 + nx + 1\n elements[:,4] = col0 + (nx + 1) * (ny + 1)\n elements[:,5] = col0 + (nx + 1) * (ny + 1) + 1\n elements[:,6] = col0 + (nx + 1) * (ny + 1) + nx + 2\n elements[:,7] = col0 + (nx + 1) * (ny + 1) + nx + 1\n\n self.elements = elements\n self.points = coordinates\n self.nnode = self.points.shape[0]\n\n self.GetBoundaryFacesHex()\n self.GetBoundaryEdgesHex()\n\n if element_type == \"tet\":\n sys.stdout = open(os.devnull, \"w\")\n self.ConvertHexesToTets()\n sys.stdout = sys.__stdout__", "def trimesh_from_point_cloud(cloud):\n points = np.asarray(cloud)\n hull = scipy.spatial.ConvexHull(points)\n hull = scipy.spatial.ConvexHull(points[hull.vertices])\n ru.transforms.counterclockwise_hull(hull)\n vertices = hull.points\n faces = hull.simplices\n return vertices, faces", "def __init__(self, points, verbose=False):\n assert(type(points) == np.ndarray)\n assert(points.dtype==int)\n assert(points.shape[1] == 3)\n assert(points.shape[0]>1)\n\n # Make points unique to avoid duplicate vertices:\n self.points = np.unique(points, axis=0)\n self.verbose = verbose\n self.g = self.__generate()", "def PlotMeshNumbering(self, figure=None, show_plot=True):\n\n self.__do_essential_memebers_exist__()\n\n import matplotlib.pyplot as plt\n import matplotlib as mpl\n\n if self.element_type == \"tri\":\n\n if figure is None:\n figure = plt.figure()\n plt.triplot(self.points[:,0],self.points[:,1], self.elements[:,:3])\n plt.tricontourf(self.points[:,0], self.points[:,1], self.elements[:,:3], np.ones(self.points.shape[0]), 100,alpha=0.3)\n\n for i in range(0,self.elements.shape[0]):\n coord = self.points[self.elements[i,:],:]\n x_avg = np.sum(coord[:,0])/self.elements.shape[1]\n y_avg = np.sum(coord[:,1])/self.elements.shape[1]\n plt.text(x_avg,y_avg,str(i),backgroundcolor='#F88379',ha='center')\n\n for i in range(0,self.points.shape[0]):\n plt.text(self.points[i,0],self.points[i,1],str(i),backgroundcolor='#0087BD',ha='center')\n\n plt.axis('equal')\n if show_plot:\n plt.show()\n\n elif self.element_type == \"quad\":\n\n if figure is None:\n figure = plt.figure()\n point_radius = 3.\n\n C = self.InferPolynomialDegree() - 1\n\n edge_elements = self.GetElementsEdgeNumberingQuad()\n reference_edges = NodeArrangementQuad(C)[0]\n reference_edges = np.concatenate((reference_edges,reference_edges[:,1,None]),axis=1)\n reference_edges = np.delete(reference_edges,1,1)\n\n self.GetEdgesQuad()\n x_edges = np.zeros((C+2,self.all_edges.shape[0]))\n y_edges = np.zeros((C+2,self.all_edges.shape[0]))\n\n BasesOneD = np.eye(2,2)\n for iedge in range(self.all_edges.shape[0]):\n ielem = edge_elements[iedge,0]\n edge = self.elements[ielem,reference_edges[edge_elements[iedge,1],:]]\n x_edges[:,iedge], y_edges[:,iedge] = self.points[edge,:].T\n\n\n plt.plot(x_edges,y_edges,'-k')\n\n for i in range(self.elements.shape[0]):\n coord = self.points[self.elements[i,:],:]\n x_avg = np.sum(coord[:,0])/self.elements.shape[1]\n y_avg = np.sum(coord[:,1])/self.elements.shape[1]\n plt.text(x_avg,y_avg,str(i),backgroundcolor='#F88379',ha='center')\n\n for i in range(0,self.points.shape[0]):\n plt.text(self.points[i,0],self.points[i,1],str(i),backgroundcolor='#0087BD',ha='center')\n\n plt.axis('equal')\n if show_plot:\n plt.show()\n\n elif self.element_type == \"tet\" or self.element_type == \"hex\":\n\n import matplotlib as mpl\n import os\n os.environ['ETS_TOOLKIT'] = 'qt4'\n from mayavi import mlab\n\n if figure is None:\n figure = mlab.figure(bgcolor=(1,1,1),fgcolor=(1,1,1),size=(800,600))\n view = mlab.view()\n figure.scene.disable_render = True\n\n color = mpl.colors.hex2color('#F88379')\n\n linewidth = 3.\n # trimesh_h = mlab.triangular_mesh(self.points[:,0],\n # self.points[:,1], self.points[:,2], self.faces[:,:3],\n # line_width=linewidth,tube_radius=linewidth,color=(0,0.6,0.4),\n # representation='wireframe') # representation='surface'\n\n # # CHANGE LIGHTING OPTION\n # trimesh_h.actor.property.interpolation = 'phong'\n # trimesh_h.actor.property.specular = 0.1\n # trimesh_h.actor.property.specular_power = 5\n\n # PLOTTING EDGES\n from Florence.PostProcessing import PostProcess\n tmesh = PostProcess(3,3).Tessellate(self, np.zeros_like(self.points), interpolation_degree=0,\n plot_points=True, plot_edges=True, plot_surfaces=False)\n\n x_edges = tmesh.x_edges\n y_edges = tmesh.y_edges\n z_edges = tmesh.z_edges\n connections = tmesh.connections\n\n src = mlab.pipeline.scalar_scatter(x_edges.T.copy().flatten(), y_edges.T.copy().flatten(), z_edges.T.copy().flatten())\n src.mlab_source.dataset.lines = connections\n h_edges = mlab.pipeline.surface(src, color = (0,0.6,0.4), line_width=linewidth)\n # AVOID WARNINGS\n # lines = mlab.pipeline.stripper(src)\n # h_edges = mlab.pipeline.surface(lines, color = (0,0.6,0.4), line_width=linewidth)\n\n # ELEMENT NUMBERING\n # for i in range(0,self.elements.shape[0]):\n # coord = self.points[self.elements[i,:],:]\n # x_avg = np.sum(coord[:,0])/self.elements.shape[1]\n # y_avg = np.sum(coord[:,1])/self.elements.shape[1]\n # z_avg = np.sum(coord[:,2])/self.elements.shape[1]\n\n # # mlab.text3d(x_avg,y_avg,z_avg,str(i),color=color)\n # mlab.text3d(x_avg,y_avg,z_avg,str(i),color=(0,0,0.),scale=2)\n\n # POINT NUMBERING\n for i in range(self.elements.shape[0]):\n for j in range(self.elements.shape[1]):\n text_obj = mlab.text3d(self.points[self.elements[i,j],0],\n self.points[self.elements[i,j],1],self.points[self.elements[i,j],2],str(self.elements[i,j]),\n color=(0,0,0.),scale=0.05)\n\n\n figure.scene.disable_render = False\n\n if show_plot:\n # mlab.view(*view)\n mlab.show()", "def create_uniform_mesh(a=0, b=100, n_elem=4):\n pts = arange(a, b, float(b-a)/(n_elem))\n pts = list(pts) + [b]\n assert len(pts) == n_elem + 1\n return array(pts)", "def setup_kpoints(self):\n kpoints_mesh = KpointsData()\n kpoints_mesh.set_cell_from_structure(self.inputs.structure)\n kpoints_mesh.set_kpoints_mesh_from_density(\n distance=self.ctx.protocol['kpoints_mesh_density'],\n offset=self.ctx.protocol['kpoints_mesh_offset']\n )\n\n self.ctx.kpoints_mesh = kpoints_mesh", "def generate_mesh(\n poly_coords: np.ndarray,\n hole_coords: Optional[List[np.ndarray]] = None,\n min_points: Optional[int] = None,\n max_edge_length: Optional[float] = None,\n convex_hull: bool = False,\n boundary: Optional[np.ndarray] = None,\n preserve_boundary: bool = False,\n min_angle: float = 32.5,\n **kwargs,\n) -> Tuple[np.ndarray, np.ndarray]:\n poly_coords = ensure_unique(poly_coords)\n if hole_coords is None:\n hole_coords = []\n hole_coords = [ensure_unique(coords) for coords in hole_coords]\n # Facets is a shape (m, 2) array of edge indices.\n # coords[facets] is a shape (m, 2, 2) array of edge coordinates:\n # [(x0, y0), (x1, y1)]\n coords = np.concatenate([poly_coords] + hole_coords, axis=0)\n xmin = coords[:, 0].min()\n dx = np.ptp(coords[:, 0])\n ymin = coords[:, 1].min()\n dy = np.ptp(coords[:, 1])\n r0 = np.array([[xmin, ymin]]) + np.array([[dx, dy]]) / 2\n # Center the coordinates at (0, 0) to avoid floating point issues.\n coords = coords - r0\n indices = np.arange(len(poly_coords), dtype=int)\n if convex_hull:\n if boundary is not None:\n raise ValueError(\n \"Cannot have both boundary is not None and convex_hull = True.\"\n )\n facets = spatial.ConvexHull(coords).simplices\n else:\n if boundary is not None:\n boundary = list(map(tuple, ensure_unique(boundary - r0)))\n indices = [i for i in indices if tuple(coords[i]) in boundary]\n facets = np.array([indices, np.roll(indices, -1)]).T\n # Create facets for the holes.\n for hole in hole_coords:\n hole_indices = np.arange(\n indices[-1] + 1, indices[-1] + 1 + len(hole), dtype=int\n )\n hole_facets = np.array([hole_indices, np.roll(hole_indices, -1)]).T\n indices = np.concatenate([indices, hole_indices], axis=0)\n facets = np.concatenate([facets, hole_facets], axis=0)\n\n mesh_info = triangle.MeshInfo()\n mesh_info.set_points(coords)\n mesh_info.set_facets(facets)\n if hole_coords:\n # Triangle allows you to set holes by specifying a single point\n # that lies in each hole. Here we use the centroid of the hole.\n holes = [\n np.array(Polygon(hole).centroid.coords[0]) - r0.squeeze()\n for hole in hole_coords\n ]\n mesh_info.set_holes(holes)\n\n kwargs = kwargs.copy()\n kwargs[\"allow_boundary_steiner\"] = not preserve_boundary\n if \"min_angle\" not in kwargs:\n kwargs[\"min_angle\"] = min_angle\n\n mesh = triangle.build(mesh_info=mesh_info, **kwargs)\n points = np.array(mesh.points) + r0\n triangles = np.array(mesh.elements)\n if min_points is None and (max_edge_length is None or max_edge_length <= 0):\n return points, triangles\n\n kwargs[\"max_volume\"] = dx * dy / 100\n i = 1\n if min_points is None:\n min_points = 0\n if max_edge_length is None or max_edge_length <= 0:\n max_edge_length = np.inf\n max_length = get_edge_lengths(points, triangles).max()\n while (len(points) < min_points) or (max_length > max_edge_length):\n mesh = triangle.build(mesh_info=mesh_info, **kwargs)\n points = np.array(mesh.points) + r0\n triangles = np.array(mesh.elements)\n edges, is_boundary = get_edges(triangles)\n if preserve_boundary:\n # Only constrain the length of interior edges, i.e. edges not on the boundary.\n edges = edges[~is_boundary]\n edge_lengths = np.linalg.norm(np.diff(points[edges], axis=1), axis=2)\n max_length = edge_lengths.max()\n logger.debug(\n f\"Iteration {i}: Made mesh with {len(points)} points and \"\n f\"{len(triangles)} triangles with maximum interior edge length: \"\n f\"{max_length:.2e}. Target maximum edge length: {max_edge_length:.2e}.\"\n )\n if np.isfinite(max_edge_length):\n kwargs[\"max_volume\"] *= min(0.98, np.sqrt(max_edge_length / max_length))\n else:\n kwargs[\"max_volume\"] *= 0.98\n i += 1\n return points, triangles", "def ppcolormesh_from_meshgrid(ax: Axes, x: np.ndarray, y: np.ndarray,\n z: np.ndarray, **kw) -> AxesImage:\n cmap = kw.get('cmap', cm.viridis)\n\n x = x.astype(float)\n y = y.astype(float)\n z = z.astype(float)\n\n # first check if we need to fill some masked values in\n if np.ma.is_masked(x):\n x = x.filled(np.nan)\n if np.ma.is_masked(y):\n y = y.filled(np.nan)\n if np.ma.is_masked(z):\n z = z.filled(np.nan)\n\n # next: try some surgery, if possible\n if np.all(num.is_invalid(x)) or np.all(num.is_invalid(y)):\n return\n if np.any(np.isnan(x)) or np.any(np.isnan(y)):\n x, y = interp_meshgrid_2d(x, y)\n if np.any(num.is_invalid(x)) or np.any(num.is_invalid(y)):\n x, y, z = num.crop2d(x, y, z)\n\n # next, check if the resulting grids are even still plotable\n for g in x, y, z:\n if g.size == 0:\n return\n elif len(g.shape) < 2:\n return\n\n # special case: if we have a single line, a pcolor-type plot won't work.\n elif min(g.shape) < 2:\n im = ax.scatter(x, y, c=z)\n return im\n\n # and finally: the meshgrid we have describes coordinates, but for plotting\n # with pcolormesh we need vertices.\n try:\n x = centers2edges_2d(x)\n y = centers2edges_2d(y)\n except:\n return\n\n im = ax.pcolormesh(x, y, z, cmap=cmap, **kw)\n ax.set_xlim(x.min(), x.max())\n ax.set_ylim(y.min(), y.max())\n return im", "def plot_2D_edp(self, xmin=-100, xmax=100, zmin=-100, zmax=100, N=201):\n rho_xz = []\n xgrid = np.linspace(xmin, xmax, num=N)\n zgrid = np.linspace(zmin, zmax, num=N)\n for x in xgrid:\n for z in zgrid:\n tmp = self.phase * self.F * np.cos(self.qx*x+self.qz*z)\n rho_xz.append([x, z, tmp.sum(axis=0)])\n rho_xz = np.array(rho_xz, float) \n X, Y, Z= rho_xz[:,0], rho_xz[:,1], rho_xz[:,2]\n #Y = rho_xz[:,1]\n #Z = rho_xz[:,2]\n X.shape = (N, N)\n Y.shape = (N, N)\n Z.shape = (N, N)\n plt.figure()\n plt.contourf(X, Y, Z)", "def plot_fenics_mesh(mesh, new_fig=True):\n if(new_fig):\n plt.figure()\n\n plot(mesh)\n #plt.title(\"FEniCS mesh\")\n plt.show(block=False)\n\n pass", "def square_mesh(N):\n xs,ys = np.meshgrid(np.linspace(0,1,N),np.linspace(0,1,N))\n xs = xs.flatten(1)\n ys = ys.flatten(1)\n _,_,t,_ = triang.delaunay(xs,ys)\n p = np.vstack((xs,ys)).T\n\n return Trimesh(p,t)", "def create_square_triangle_mesh():\n vertices = np.array(\n ((0, 0, 0), (1, 0, 0), (0, 1, 0), (1, 1, 0), (0.5, 0.5, 0)),\n dtype=np.float32)\n faces = np.array(\n ((0, 1, 4), (1, 3, 4), (3, 2, 4), (2, 0, 4)), dtype=np.int32)\n return vertices, faces", "def MakeCoordinates3D(self):\n\n self.points = np.concatenate((self.points, np.zeros((self.points.shape[0],1)) ), axis=1)\n self.points = np.ascontiguousarray(self.points)", "def defineCircleLayout(self):\n # Define a 2-D array representing the position of each mesh point\n self.xPoints = self.frange(0,self.R,self.h)\n self.yPoints = self.frange(0,self.R,self.h)\n\n # Position of internal mesh points\n internal_xyCoord = [(i,j) for i in self.xPoints for j in self.yPoints if (i - self.R)**2 + (j - self.R)**2 < self.R^2] \n\n # Define the dictionary containing internal points\n for k in internal_xyCoord:\n x = k[0]\n y = k[1]\n xLabel = xPoints.index(x)\n yLabel = yPoints.index(y)\n self.internalPoints[(xLabel,yLabel)] = meshPoint(type = 'internal',x = x, y = y, xLabel = xLabel, yLabel = yLabel) \n\n # Position of the boundary points\n # Find the intersection of each mesh line with the circle\n # For a given vertical mesh line: \n # y = R - sqrt(R^2 - (x-R)^2) & y = R + sqrt(R^2 - (x-R)^2)\n # For a given horizontal mesh line: \n # x = R - sqrt(R^2 - (y-R)^2) & x = R + sqrt(R^2 - (y-R)^2)\n boundary_xyCoord = [(0,self.R),(self.R,0),(self.R,2*self.R),(2*self.R,self.R)] + [(x,self.R - math.sqrt(self.R**2 - (x-self.R)**2)) for x in self.xPoints[1:len(self.xPoints)-1]] + [(x,self.R - math.sqrt(self.R**2 + (x-self.R)**2)) for x in self.xPoints[1:len(self.xPoints)-1]] + [(self.R - math.sqrt(self.R**2 - (y-self.R)**2),y) for y in self.yPoints[1:len(yPoints)-1]] + [(self.R + math.sqrt(self.R**2 - (y-self.R)**2),y) for y in self.yPoints[1:len(yPoints)-1]] \n\n # Define the dictionary containing boundary points\n for k in boundary_xyCoord:\n x = k[0]\n y = k[1]\n [xLabel,yLabel] = self.findLabel(x,y)\n self.boundaryPoints[(xLabel,yLabel)] = meshPoint(type = 'boundary',x = x, y = y, xLabel = xLabel, yLabel = yLabel) \n \n # Now that we have assigned the labels we can define fE, fW, fN and fS\n self.fCalc()", "def meshgrid(x, y, row_major=True):\n # type: (int, int, bool)->Tensor\n a = torch.arange(0, x)\n b = torch.arange(0, y)\n xx = a.repeat(y).view(-1, 1).float()\n yy = b.view(-1, 1).repeat(1, x).view(-1, 1).float()\n return torch.cat([xx, yy], 1) if row_major else torch.cat([yy, xx], 1)", "def generate(pts):\n cmds.polyCreateFacet(name=\"shirt\", p=points)\n cmds.polyTriangulate()\n cmds.polySubdivideFacet(dv=SUBDIVISIONS)\n cmds.polyTriangulate()", "def meshgrid(x,y):\n x = asarray(x)\n y = asarray(y)\n numRows, numCols = len(y), len(x) # yes, reversed\n x = x.reshape(1,numCols)\n X = x.repeat(numRows, axis=0)\n\n y = y.reshape(numRows,1)\n Y = y.repeat(numCols, axis=1)\n return X, Y", "def ProjectToPlane(self):\n\n self.__do_essential_memebers_exist__()\n if self.element_type != \"tri\":\n raise ValueError(\"Project to plane is only applicable to triangles\")\n\n imesh = deepcopy(self)\n coordinates = []\n connectivities = []\n for counter, elem in enumerate(imesh.elements):\n\n elementCoordinates = imesh.points[elem,:]\n\n A = elementCoordinates[0,:]\n B = elementCoordinates[1,:]\n C = elementCoordinates[2,:]\n\n X = (B - A); X /= np.linalg.norm(X)\n Z = np.cross(X, C - A); Z /= np.linalg.norm(Z)\n Y = np.cross(Z, X)\n\n # PROJECT THE TRIANGLE TO THIS BASES\n a = [0., 0.]\n b = [np.linalg.norm((B - A)), 0.]\n c = [(C - A).dot(X), (C - A).dot(Y)]\n\n coordinates.append(a)\n coordinates.append(b)\n coordinates.append(c)\n\n elementConnectivity = [3 * counter, 3 * counter + 1, 3 * counter + 2]\n connectivities.append(elementConnectivity)\n\n coordinates = np.array(coordinates)\n connectivities = np.array(connectivities)\n imesh.points = coordinates\n imesh.elements = connectivities\n imesh.nelem = imesh.elements.shape[0]\n imesh.nnode = imesh.points.shape[0]\n\n return imesh", "def MeshPyTri(points,facets,*args,**kwargs):\n info = triangle.MeshInfo()\n info.set_points(points)\n info.set_facets(facets)\n\n return triangle.build(info,*args,**kwargs)", "def vertices(self):\n d = self.space_dimension()\n v = vector(ZZ, d)\n points = []\n for g in self.minimized_generators():\n for i in range(0,d):\n v[i] = g.coefficient(Variable(i))\n v_copy = copy.copy(v)\n v_copy.set_immutable()\n points.append(v_copy)\n return tuple(points)", "def make_xy_grid(samples_x, samples_y=None, radius=1):\n if samples_y is None:\n samples_y = samples_x\n x = e.linspace(-radius, radius, samples_x, dtype=config.precision)\n y = e.linspace(-radius, radius, samples_y, dtype=config.precision)\n xx, yy = e.meshgrid(x, y)\n return xx, yy", "def plot_mesh_function(mesh, f, title=\"\", colormap = \"hot\", edges = False, mybounds = [], myticks = []) :\n if mesh.dimension() == 1 :\n # get the mesh points\n x = mesh_axes(mesh)\n # plot the map\n plt.plot(x, f)\n \n elif mesh.dimension() == 2 :\n\n # Get the mesh axes and then make a grid of them for plotting.\n x, y = mesh_axes(mesh)\n X, Y = np.meshgrid(x, y)\n # Reshape the function\n f = f.reshape(mesh.number_cells_x(), mesh.number_cells_y())\n if edges :\n plt.pcolor(X, Y, f, cmap=colormap, edgecolors='k')\n else :\n plt.pcolor(X, Y, f, cmap=colormap)\n plt.axis(\"scaled\") \n plt.xlabel(\"x [cm]\")\n plt.ylabel(\"y [cm]\")\n if len(myticks) :\n cbar = plt.colorbar(boundaries=mybounds,ticks=myticks)\n else : \n cbar = plt.colorbar()\n else :\n print \"not ready for 3d\"\n return\n plt.title(title)\n # show the plot\n plt.show()", "def smooth_mesh(\n points: np.ndarray, triangles: np.ndarray, iterations: int\n) -> Tuple[np.ndarray, np.ndarray]:\n edges, _ = get_edges(triangles)\n n = points.shape[0]\n shape = (n, 2)\n boundary = boundary_vertices(points, triangles)\n for _ in range(iterations):\n num_neighbors = np.bincount(edges.ravel(), minlength=shape[0])\n new_points = np.zeros(shape)\n vals = points[edges[:, 1]].T\n new_points += np.array(\n [np.bincount(edges[:, 0], val, minlength=n) for val in vals]\n ).T\n vals = points[edges[:, 0]].T\n new_points += np.array(\n [np.bincount(edges[:, 1], val, minlength=n) for val in vals]\n ).T\n new_points /= num_neighbors[:, np.newaxis]\n # reset boundary points\n new_points[boundary] = points[boundary]\n points = new_points\n return points, triangles", "def x_mesh(N,interval):\n (a,b) = interval\n h = (b-a)/N\n xmesh1=[a]\n for i in range(1,N):\n xmesh1.append(a+i*h)\n xmesh1.append(b)\n xmesh2=xmesh1[1:N]\n \n return xmesh1,xmesh2", "def createMesh(objname,Vert,Edges=[],Faces=[]):\n me = bpy.data.meshes.new(objname)\n ob = bpy.data.objects.new(objname,me)\n bpy.context.scene.objects.link(ob)\n \n me.from_pydata(Vert,Edges,Faces)\n me.update(calc_edges=True)", "def create_mesh(verts, faces, name):\n thisfunc = thisfile + '->create_mesh()'\n\n verts = np.array(verts)\n\n # Create mesh\n mesh_data = bpy.data.meshes.new(name)\n mesh_data.from_pydata(verts, [], faces)\n mesh_data.update()\n\n logging.info(\"%s: Mesh '%s' created\", thisfunc, name)\n\n return mesh_data", "def tricolormesh(x_vert, y_vert, z=None, vmin=None, vmax=None, edgecolor='none', linewidth=0.2, cmap=None, rasterized=False):\n \n # Set default color map\n if cmap is None:\n cmap = plt.cm.jet\n\n # Some (very limited..) input checks\n if(x_vert.ndim != 2 or y_vert.ndim != 2):\n raise Exception('x_vert and y_vert must have 2 dimensions [cells,edges_of_cell]')\n\n # Clip the z array to take vmin and vmax into account\n if z is not None:\n z_clip = z.copy()\n if(vmin is not None):\n z_clip[z<vmin] = vmin\n if(vmax is not None):\n z_clip[z>vmax] = vmax\n else:\n z_clip = z\n \n # Create iterator with correct format for input in PolyCollection()\n tri = (zip(x_vert[i,:],y_vert[i,:]) for i in range(x_vert[:,0].size))\n\n # Create the polygons\n if z is None:\n col = PolyCollection(tri, cmap=cmap, edgecolors=edgecolor, facecolors='none', linewidths=linewidth, rasterized=rasterized)\n else:\n col = PolyCollection(tri, array=z_clip, cmap=cmap, edgecolors=edgecolor, linewidths=linewidth, rasterized=rasterized)\n\n # Add to axes\n ax = plt.gca() \n ax.add_collection(col)\n\n # Draw second time such that there are really no edges visible (bit hackish...)\n if(edgecolor == 'none'):\n ax.add_collection(col)\n\n # Scale axis to wrap PolyCollection\n ax.autoscale_view() \n\n # Return the polycollection, for drawing a colorbar\n return col", "def render_vertices_3d(self, **kwds):\n return point3d(self.coordinates_of(self.points), **kwds)", "def CreateDummyLowerDimensionalMesh(self):\n\n\n sys.stdout = open(os.devnull, \"w\")\n p = self.InferPolynomialDegree()\n mesh = Mesh()\n if self.element_type == \"tet\":\n mesh.Rectangle(nx=1,ny=1, element_type=\"tri\")\n mesh.GetHighOrderMesh(p=p)\n elif self.element_type == \"hex\":\n mesh.Rectangle(nx=1,ny=1, element_type=\"quad\")\n mesh.GetHighOrderMesh(p=p)\n elif self.element_type == \"tri\" or self.element_type == \"quad\":\n mesh.Line(n=1, p=p)\n elif self.element_type == \"line\":\n mesh.element_type = \"point\"\n mesh.nelem = 1\n mesh.nnode = 1\n mesh.degree = p\n mesh.elements = np.array([[0]])\n mesh.points = np.array([[0.,0.,0.]])\n sys.stdout = sys.__stdout__\n\n return mesh", "def subdivision(mesh):\n\t\n\t\n\t# 1. generate new nodes in the centre of quad\n\t# 1/4 o-------o 1/4 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# | * |\n\t# | |\n\t# 1/4 o-------o 1/4\n\n\tnew_coor = mesh.give_nodes().give_coor()\n\t\n\tfor face_index in range(mesh.give_model_inf()[2]): \n\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\tfor vertex_index in range(4):\n\t\t\tmesh.give_faces()\n\t\t\tnode_index = mesh.give_faces().give_node_list(face_index)[vertex_index]\n\n\t\t\tnew_x += 0.25*mesh.give_nodes().give_coor(node_index)[0]\n\t\t\tnew_y += 0.25*mesh.give_nodes().give_coor(node_index)[1]\n\t\t\tnew_z += 0.25*mesh.give_nodes().give_coor(node_index)[2]\n\t\t\t\n\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\n\t# generating new nodes on the edge\n\t# figure out one edge is shared by how many surfaces\n\tedge_shared_by_faces_list = helper.find_edge_shared_by_which_faces(mesh.give_edges(), mesh.give_faces())\n\t\n\tfor edge_index in range(mesh.give_model_inf()[1]):\n\n\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\n\t# 2. generate new node on boundary edge\n\t# o: existing vertices\n\t# 1/2 o---*---o 1/2 *: newly-generated vertices\n\t# \n\n\t\tnew_coor = mesh.give_nodes().give_coor()\n\t\tif len(edge_shared_by_faces_list[edge_index]) == 1:\t\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tnew_x += 0.5*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 0.5*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 0.5*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\t\t\t\t\n\t# 3. generate new node on interior edge\n\t# 1/16 o-------o 1/16 o: existing vertices\n\t# | | *: newly-generated vertices\n\t# 3/8 o---*---o 3/8\n\t# | |\n\t# 1/16 o-------o 1/16\n\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0., 0., 0.)\n\t\t\tconsidered_node = []\n\t\t\tfor vertex_index in range(2):\n\t\t\t\tthis_node = mesh.give_edges().give_node(edge_index)[vertex_index]\n\t\t\t\tconsidered_node.append(this_node)\n\t\t\t\tnew_x += 3./8.*mesh.give_nodes().give_coor()[this_node][0]\n\t\t\t\tnew_y += 3./8.*mesh.give_nodes().give_coor()[this_node][1]\n\t\t\t\tnew_z += 3./8.*mesh.give_nodes().give_coor()[this_node][2]\n\t\t\t\n\t\t\t# faces contain this node\n\t\t\tpotential_node = []\n\t\t\tfor face_index in edge_shared_by_faces_list[edge_index]:\t\t\n\t\t\t\tfor vertex_index in range(4):\n\t\t\t\t\t\tpotential_node.append(mesh.give_faces().give_node_list(face_index)[vertex_index])\n\t\t\t\n\t\t\touter_node = []\n\t\t\tfor node in potential_node:\n\t\t\t\tif (node not in considered_node) & (node not in outer_node):\n\t\t\t\t\touter_node.append(node)\n\t\t\t\t\t\n\t\t\tfor vertex_index in outer_node:\n\t\t\t\tnew_x += 1./16.*mesh.give_nodes().give_coor()[vertex_index][0]\n\t\t\t\tnew_y += 1./16.*mesh.give_nodes().give_coor()[vertex_index][1]\n\t\t\t\tnew_z += 1./16.*mesh.give_nodes().give_coor()[vertex_index][2]\n\t\t\t\n\t\t\tnew_coor.append((new_x, new_y, new_z))\n\n\t# update the links of edges and surfaces\n\tnew_edge_list = []\n\tnew_face_list = []\n\tfor face_index in range(mesh.give_model_inf()[2]):\n\t\told_node0 = mesh.give_faces().give_node_list(face_index)[0]\n\t\told_node1 = mesh.give_faces().give_node_list(face_index)[1]\n\t\told_node2 = mesh.give_faces().give_node_list(face_index)[2]\n\t\told_node3 = mesh.give_faces().give_node_list(face_index)[3]\n\t\t\n\t\told_edge0 = mesh.give_faces().give_edge_list(face_index)[0]\n\t\told_edge1 = mesh.give_faces().give_edge_list(face_index)[1]\n\t\told_edge2 = mesh.give_faces().give_edge_list(face_index)[2]\n\t\told_edge3 = mesh.give_faces().give_edge_list(face_index)[3]\n\t\t\n\t\tnew_node4 = old_edge0 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2] \n\t\tnew_node5 = old_edge1 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node6 = old_edge2 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\n\t\tnew_node7 = old_edge3 + mesh.give_model_inf()[0] + mesh.give_model_inf()[2]\t\n\t\tnew_node8 = mesh.give_model_inf()[0] + face_index\n\t\t\n\t\tif helper.in_list((old_node0, new_node4), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node0, new_node4))\n\t\tif helper.in_list((new_node4, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, new_node8))\n\t\tif helper.in_list((new_node8, new_node7), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node8, new_node7))\n\t\tif helper.in_list((new_node7, old_node0), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node0))\n\t\tif helper.in_list((new_node4, old_node1), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node4, old_node1))\n\t\tif helper.in_list((old_node1, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node1, new_node5))\n\t\tif helper.in_list((new_node5, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node5, new_node8))\n\t\tif helper.in_list((new_node7, old_node3), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node7, old_node3))\n\t\tif helper.in_list((old_node3, new_node6), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node3, new_node6))\n\t\tif helper.in_list((new_node6, new_node8), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, new_node8))\n\t\tif helper.in_list((new_node6, old_node2), new_edge_list) == False: \n\t\t\tnew_edge_list.append((new_node6, old_node2))\n\t\tif helper.in_list((old_node2, new_node5), new_edge_list) == False: \n\t\t\tnew_edge_list.append((old_node2, new_node5))\n\t\n\t\tnew_face_list.append((old_node0, new_node4, new_node8, new_node7))\n\t\tnew_face_list.append((new_node4, old_node1, new_node5, new_node8))\n\t\tnew_face_list.append((new_node7, new_node8, new_node6, old_node3))\n\t\tnew_face_list.append((new_node8, new_node5, old_node2, new_node6))\n\t\t\n\tnew_edges = geo.Edge(new_edge_list)\n\t\n\tnew_faces = geo.Face(new_face_list, new_edges)\n\t\t\n\t# update existing nodes\t\n\tfor node_index in range(mesh.give_model_inf()[0]):\n\t\t\n\t\tring1, ring2 = helper.find_neighbour_node(new_edges, new_faces, node_index)\n\t\tvalence = helper.find_valence(node_index, new_faces) \n\t\t#: valence: the number of faces sharing on specific edge\n\n\t# 4. update existing corner vertex\n\t# 2/4 @---* 1/4 *: newly-generated vertices\n\t# | | @: existing vertices to be updated\n\t# 1/4 *---* 0 The higher mask values on neighbouring vertices, \n\t# the more likely a square mesh will be refined into a sphere.\n\t \n\t\tif valence == 1:\n\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tprint\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += 1./4.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += 0.*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += 0.*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += 0.*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\t\n\t\t\tnew_x += 2./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 2./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 2./4.*mesh.give_nodes().give_coor()[node_index][2]\n\n\t# 5. update existing boundary joint vertex\n\t# 3/4\n\t# 1/8 *---*---* 1/8 *: newly-generated vertices\n\t# | | | @: existing vertices to be updated\n\t# 0 *---*---* 0\n\n\t\telif valence == 2:\n\t\t\t\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tif helper.find_valence(node_in_ring1, new_faces) <= 2: \n\t\t\t\t\tnew_x += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\t\tnew_y += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\t\tnew_z += 1./8.*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\t\t\n\t\t\tnew_x += 3./4.*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += 3./4.*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += 3./4.*mesh.give_nodes().give_coor()[node_index][2]\n\t\n\t# 6. update new node on interior edge\n\t# * r/k\n\t# /\\ b/k*\n\t# *__/ \\___ r/k\n\t# \\ \\ /¬¬/ *: newly-generated vertices: \n\t# \\ \\/ / b = 3/2/valence, r = 1/4/valence\n\t# *--@--* b/k\t @: existing vertices to be updated: 1-b-r\t\t\n\t# / /\\ \\\n\t# /__/ \\__\\\n\t# * \\ / * r/k\n\t# \\/\n\t\t\n\t\telse:\n\t\t\tnew_x, new_y, new_z = (0, 0, 0)\n\t\t\tbeta = 3./2./valence\n\t\t\tgamma = 1./4./valence\n\t\t\tfor node_in_ring1 in ring1:\n\t\t\t\tnew_x += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][0]\n\t\t\t\tnew_y += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][1]\n\t\t\t\tnew_z += beta/valence*mesh.give_nodes().give_coor()[node_in_ring1][2]\n\t\t\t\n\t\t\tfor node_in_ring2 in ring2:\n\t\t\t\tnew_x += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][0]\n\t\t\t\tnew_y += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][1]\n\t\t\t\tnew_z += gamma/valence*mesh.give_nodes().give_coor()[node_in_ring2][2]\n\t\t\t\n\t\t\tnew_x += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][0]\n\t\t\tnew_y += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][1]\n\t\t\tnew_z += (1. - beta - gamma)*mesh.give_nodes().give_coor()[node_index][2]\n\t\t\n\t\tnew_coor[node_index] = (new_x, new_y, new_z)\n\t\n\tnew_nodes = geo.Node(new_coor)\n\t\n\tmesh.update(new_nodes, new_edges, new_faces)\n\t\n\t# return new_mesh\n\treturn mesh", "def get_regular_points(self, npoints=None, device=\"gpu0\"):\n if not self.npoints == npoints:\n self.mesh = pymesh.generate_icosphere(1, [0, 0, 0], 4) # 2562 vertices\n self.vertex = torch.from_numpy(self.mesh.vertices).to(device).float()\n self.num_vertex = self.vertex.size(0)\n self.vertex = self.vertex.transpose(0,1).contiguous().unsqueeze(0)\n self.npoints = npoints\n\n return Variable(self.vertex.to(device))", "def makegridnd(meshsize, dimension):\n x = np.meshgrid(*[np.linspace(MIN_POINT_PRECISION, 1,meshsize) for d in range(dimension)])\n mesh = np.asarray(x)\n total = np.sum(mesh,axis=0)\n plane_mesh = mesh[:,np.isclose(total,1.0,atol=1e-2)]\n\n return plane_mesh", "def plot_multigroup_flux(mesh, state, edges = False) :\n if mesh.dimension() == 1 :\n # get the mesh points\n x = mesh_axes(mesh)\n # plot the map\n plt.plot(x, f)\n \n elif mesh.dimension() == 2 :\n\n # Get the mesh axes and then make a grid of them for plotting.\n x, y = mesh_axes(mesh)\n X, Y = np.meshgrid(x, y)\n edgec = 'none'\n if edges :\n edgec = 'k'\n plt.pcolor(X, Y, f, cmap=colormap, edgecolors=edgec)\n \n else :\n print \"not ready for 3d\"\n return\n # show the plot\n plt.show()", "def run(self): \n\n # Dictionaries whose keys are labels of the points in a 2-D grid and values\n # are an instance of the class meshPoint holding the informaiton about \n # that mesh point\n self.boundaryPoints = {}\n self.internalPoints = {}\n\n # Rectangle \n if self.layoutType.lower() == 'rectangle': \n # Define the mesh for a rectanglular layout\n self.defineRectangleLayout()\n # Circle \n elif self.layoutType.lower() == 'circle':\n # Define the mesh for a circular layout\n self.defineCircleLayout()\n\n return [self.internalPoints,self.boundaryPoints]", "def plot(self):\n # Find only unmasked data :\n # xyz, sData, sColor, _ = self._select_unmasked()\n xyz, sData, sColor = self.xyz, self.sData, self.sColor\n\n # Render as cloud points :\n self.mesh = visu.Markers(name='Sources')\n self.mesh.set_data(xyz, edge_color=self.edgecolor, face_color=sColor,\n size=sData, scaling=self.scaling,\n edge_width=self.edgewidth, symbol=self.symbol)\n self.mesh.set_gl_state('translucent')", "def make_grid(data=None, xmin=-5, xmax=5, ymin=-5, ymax=5, n_points = 400):\n if data is not None:\n xmin, ymin = np.min(data, axis = 0)\n xmax, ymax = np.max(data, axis = 0)\n\n plt.ylim(ymin, ymax)\n plt.xlim(xmin, xmax)\n\n x, y = np.meshgrid(np.linspace(xmin, xmax, n_points), np.linspace(ymin, ymax, n_points))\n grid = np.c_[x.ravel(), y.ravel()] # grid has n_points ^2 row and 2 columns\n return x, y, grid", "def new_mesh_set(self, all_meshes):\n if isinstance(all_meshes, Mesh):\n mesh_tp = MeshCollection()\n mesh_tp.append(all_meshes)\n all_meshes = mesh_tp\n\n if all_meshes.get_num_frames() is not 1:\n raise IndexError(\"Mesh should be from one frame only\")\n\n if not isinstance(all_meshes, MeshCollection):\n raise TypeError(\"Please send a list of mesh to update_mesh\")\n self.all_meshes = all_meshes\n\n # Remove previous actors from the scene\n for actor in self.mesh_actors:\n self.parent_window.ren.RemoveActor(actor)\n self.mesh_actors = list()\n\n # Create the geometry of a point (the coordinate) points = vtkPoints()\n for (i, mesh) in enumerate(self.all_meshes):\n points = vtkPoints()\n for j in range(mesh.get_num_vertex()):\n points.InsertNextPoint([0, 0, 0])\n\n # Create an array for each triangle\n cell = vtkCellArray()\n for j in range(mesh.get_num_triangles()): # For each triangle\n line = vtkPolyLine()\n line.GetPointIds().SetNumberOfIds(4)\n for k in range(len(mesh.triangles[j])): # For each index\n line.GetPointIds().SetId(k, mesh.triangles[j, k])\n line.GetPointIds().SetId(3, mesh.triangles[j, 0]) # Close the triangle\n cell.InsertNextCell(line)\n poly_line = vtkPolyData()\n poly_line.SetPoints(points)\n poly_line.SetLines(cell)\n\n # Create a mapper\n mapper = vtkPolyDataMapper()\n mapper.SetInputData(poly_line)\n\n # Create an actor\n self.mesh_actors.append(vtkActor())\n self.mesh_actors[i].SetMapper(mapper)\n\n self.parent_window.ren.AddActor(self.mesh_actors[i])\n self.parent_window.ren.ResetCamera()\n\n # Update marker position\n self.update_mesh(self.all_meshes)", "def show(self):\n from matplotlib import pyplot as plt\n from mpl_toolkits.mplot3d import Axes3D\n\n fig = plt.figure()\n ax = Axes3D(fig)\n pos = self.cluster.get_positions()\n from itertools import combinations\n for tri in self.mesh.simplices:\n for comb in combinations(tri, 2):\n x1 = pos[comb[0], 0]\n x2 = pos[comb[1], 0]\n y1 = pos[comb[0], 1]\n y2 = pos[comb[1], 1]\n z1 = pos[comb[0], 2]\n z2 = pos[comb[1], 2]\n ax.plot([x1, x2], [y1, y2], zs=[z1, z2], color=\"black\")\n plt.show()", "def mesh_grid(self,width,height):\n # get\n \n x_linspace=tf.linspace(-self.cx_,1-self.cx_,width)\n y_linspace=tf.linspace(-self.cy_,1-self.cy_,height)\n \n# x_cord,y_cord=tf.meshgrid(x_linspace,y_linspace)\n y_cord,x_cord=tf.meshgrid(y_linspace,x_linspace)\n \n \n x_cord=tf.reshape(x_cord,[-1])\n y_cord=tf.reshape(y_cord,[-1])\n \n f_=tf.ones_like(x_cord)\n \n x_=tf.div(x_cord,self.cf)\n y_=tf.div(y_cord,self.cf)\n \n grid=tf.concat([x_,y_,f_],0)\n return grid", "def _vertices(self, point):\n vertex_0, vertex_1, vertex_2 = tuple(\n gs.take(point, indices=self.faces[:, i], axis=-2) for i in range(3)\n )\n if point.ndim == 3 and vertex_0.ndim == 2:\n vertex_0 = gs.expand_dims(vertex_0, axis=0)\n vertex_1 = gs.expand_dims(vertex_1, axis=0)\n vertex_2 = gs.expand_dims(vertex_2, axis=0)\n return vertex_0, vertex_1, vertex_2", "def strang_mesh(filename):\n\n from math import pi\n from anuga.utilities.numerical_tools import anglediff\n\n\n fid = open(filename)\n points = [] # List of x, y coordinates\n triangles = [] # List of vertex ids as listed in the file\n\n for line in fid.readlines():\n fields = line.split()\n if len(fields) == 2:\n # we are reading vertex coordinates\n points.append([float(fields[0]), float(fields[1])])\n elif len(fields) == 3:\n # we are reading triangle point id's (format ae+b)\n triangles.append([int(float(fields[0]))-1,\n int(float(fields[1]))-1,\n int(float(fields[2]))-1])\n else:\n raise Excetion('wrong format in %s' % filename)\n\n elements = [] #Final list of elements\n\n for t in triangles:\n #Get vertex coordinates\n v0 = t[0]\n v1 = t[1]\n v2 = t[2]\n\n x0 = points[v0][0]\n y0 = points[v0][1]\n x1 = points[v1][0]\n y1 = points[v1][1]\n x2 = points[v2][0]\n y2 = points[v2][1]\n\n #Check that points are arranged in counter clock-wise order\n vec0 = [x1-x0, y1-y0]\n vec1 = [x2-x1, y2-y1]\n vec2 = [x0-x2, y0-y2]\n\n a0 = anglediff(vec1, vec0)\n a1 = anglediff(vec2, vec1)\n a2 = anglediff(vec0, vec2)\n\n if a0 < pi and a1 < pi and a2 < pi:\n elements.append([v0, v1, v2])\n else:\n elements.append([v0, v2, v1])\n\n return points, elements", "def plot_vertices(self, f=None, index_row=0, index_col=0, show=True, plotter=None, cmap='jet', title='',\n title_location=\"upper_edge\", font_size=10, font_color='black', camera=None):\n\n if not plotter:\n plotter = pv.Plotter()\n plotter.subplot(index_column=index_col, index_row=index_row)\n plotter.add_text(title, position=title_location, font_size=font_size, color=font_color)\n if camera is not None:\n plotter.set_position(camera[0])\n plotter.set_focus(camera[1])\n plotter.set_viewup(camera[2])\n plotter.add_mesh(self.vertices, scalars=f, cmap=cmap, render_points_as_spheres=True)\n if show:\n plotter.show()\n return plotter", "def makemesh(pts,vec,growsize=0.2,curvilinear_neighbors=10,\n\tcurvilinear=True,debug=False,growsize_nm=None,excise=True,areas_only=False):\n\tif debug: \n\t\timport time\n\t\tst = time.time()\n\t\tdef checkpoint(note):\n\t\t\tprint(note)\n\t\t\tst = time.time()\n\n\tnmol = len(pts)\n\tpts = pts\n\tvec = vec\n\tif debug: \n\t\timport time\n\t\tst = time.time()\n\t\tprint(\"[STATUS] start makemesh %0.2f\"%(time.time()-st))\n\tptsb,ptsb_inds = beyonder(pts,vec,growsize=growsize,growsize_nm=growsize_nm,return_ids=True)\n\tif debug: print(\"[STATUS] project curvilinear=\"+str(curvilinear)+\" %0.2f\"%(time.time()-st))\n\t#---if curvilinear then use the isomap otherwise project onto the xy plane\n\tif curvilinear: \n\t\timport sklearn\n\t\tfrom sklearn import manifold\n\t\tproj = manifold.Isomap(curvilinear_neighbors,2).fit_transform(ptsb)\n\telse: proj = ptsb[...,:2]\n\tif debug: checkpoint(\"[STATUS] delaunay %0.2f\"%(time.time()-st))\n\tif debug: checkpoint(\"[STATUS] shape=\"+str(np.shape(ptsb)))\n\tdl = scipy.spatial.Delaunay(proj)\n\tif debug: checkpoint(\"[STATUS] reclock %0.2f\"%(time.time()-st))\n\tsimplices = np.array([a[reclock(ptsb[a])] for a in dl.simplices])\n\t#---rework simplices and ptsb to exclude superfluous points\n\tif debug: checkpoint(\"[STATUS] trim %0.2f\"%(time.time()-st))\n\t#---relevants is a unique list of simplices with exactly one member that is equal to a core vertex point\n\trelevants = np.unique(np.concatenate([simplices[\n\t\tnp.where(np.sum(simplices==i,axis=1)==1)[0]] for i in range(nmol)]))\n\tpoints = ptsb[relevants]\n\tghost_indices = ptsb_inds[relevants]\n\tptsb = points\n\n\tif debug: checkpoint(\"[STATUS] simplices %0.2f\"%(time.time()-st))\n\tsimplices = np.array([[np.where(relevants==r)[0][0] for r in s] \n\t\tfor s in simplices if np.all([r in relevants for r in s])])\n\t#---end rework\n\tif debug: checkpoint(\"[STATUS] areas %0.2f\"%(time.time()-st))\n\tareas = np.array([triarea(ptsb[a]) for a in simplices])\n\tif areas_only: return {'simplices':simplices,'areas':areas,'nmol':nmol,'vec':vec,'points':points}\n\tif debug: checkpoint(\"[STATUS] facenorms %0.2f\"%(time.time()-st))\n\tfacenorms = np.array([vecnorm(facenorm(ptsb[a])) for a in simplices])\t\n\tif debug: checkpoint(\"[STATUS] vertex-to-simplex %0.2f\"%(time.time()-st))\n\tv2s = [np.where(np.any(simplices==i,axis=1))[0] for i in range(nmol)]\n\tif debug: checkpoint(\"[STATUS] vertex normals %0.2f\"%(time.time()-st))\n\tvertnorms = np.array([vecnorm(np.sum(facenorms[ind]*\\\n\t\tnp.transpose([areas[ind]/np.sum(areas[ind])]),axis=0)) for ind in v2s])\n\tprincipals = np.zeros((nmol,2))\n\tnl = []\n\tif debug: checkpoint(\"[STATUS] curvatures %0.2f\"%(time.time()-st))\n\tfor v in range(nmol):\n\t\tneighbors = np.unique(simplices[np.where(np.any(simplices==v,axis=1))[0]])\n\t\tneighbors = neighbors[neighbors!=v]\n\t\tnl.append(neighbors)\n\t\tedges = ptsb[neighbors]-ptsb[v]\n\t\tweights = [areas[sl]/2./np.sum(areas[v2s[v]]) for sl in v2s[v]]\n\t\ttijs = [vecnorm(np.dot(np.identity(3)-np.outer(vertnorms[v],\n\t\t\tvertnorms[v].T),ab)) for ab in edges]\n\t\tkijs = [np.dot(vertnorms[v].T,ab)/linalg.norm(ab)**2 for ab in edges]\n\t\tct = np.sum([weights[ind]*kijs[ind]*np.outer(tijs[ind],tijs[ind]) \n\t\t\tfor ind,i in enumerate(v2s[v])],axis=0)\n\t\twsign = 1-2*(linalg.norm(np.array([1,0,0])+\\\n\t\t\tvertnorms[v])<linalg.norm(np.array([1,0,0])-vertnorms[v]))\n\t\twvi = vecnorm(np.array([1,0,0])+wsign*vertnorms[v])\n\t\thm = np.identity(3)-2*np.outer(wvi,wvi.T)\n\t\thhm = np.dot(np.dot(hm.T,ct),hm)\n\t\tprincipals[v] = -1*hhm[1,1],-1*hhm[2,2]\n\tif debug: checkpoint(\"[STATUS] PBC neighborlist %0.2f\"%(time.time()-st))\n\t#---neighborlist under PBCs\n\tchecksubssort,nlsubs = np.where(torusnorm(points[nmol:],points[:nmol],vec)==0)\n\t#if not all(checksubssort==np.arange(len(points)-nmol)): raise Exception('torusnorm lookup fail')\n\ttry: nlpbc = [[(i if i<nmol else nlsubs[i-nmol]) for i in n] for n in nl]\n\texcept: nlpbc = []\n\tgauss = (3*principals[:,0]-principals[:,1])*(3*principals[:,1]-\\\n\t\tprincipals[:,0])\n\tmean = 1./2*((3*principals[:,0]-principals[:,1])+\\\n\t\t(3*principals[:,1]-principals[:,0]))\n\tif debug: checkpoint(\"[STATUS] complete %0.2f\"%(time.time()-st))\n\n\tif debug:\n\t\timport matplotlib as mpl;import matplotlib.pylab as plt\n\t\tplt.scatter(points[:,0],points[:,1])\n\t\tplt.show()\n\t\timport pdb;pdb.set_trace()\n\n\treturn {'nmol':nmol,'vec':vec,'simplices':simplices,'points':points,\n\t\t'areas':areas,'facenorms':facenorms,'vertnorms':vertnorms,'principals':principals,\n\t\t'ghost_ids':ghost_indices,'gauss':gauss,'mean':mean}", "def __plot_convex_hull(self, ax=None) -> None:\n ax.plot(self.points[:, 0], self.points[:, 1], \"o\")\n for simplex in self.hull.simplices:\n ax.plot(self.points[simplex, 0], self.points[simplex, 1], \"k-\")", "def generaCubo(self):\r\n #Use Panda predefined format for vertex coordinate only\r\n format = GeomVertexFormat.getV3()\r\n \r\n #Build Vertex data using the created format. Vertex will never change so I use Static attribute \r\n vdata = GeomVertexData('CuboData', format, Geom.UHStatic)\r\n \r\n #I will have to write vertex data so I create a writer for these data\r\n vertex = GeomVertexWriter(vdata, 'vertex')\r\n \r\n #I now use the writer to add vertex data\r\n vertex.addData3f(0, 0, 0)\r\n vertex.addData3f(1, 1, 1)\r\n vertex.addData3f(0, 1, 1)\r\n vertex.addData3f(0, 1, 0)\r\n vertex.addData3f(0, 0, 1)\r\n vertex.addData3f(1, 0, 0)\r\n vertex.addData3f(1, 0, 1)\r\n vertex.addData3f(1, 1, 0)\r\n \r\n #I now create 12 triangles\r\n prim = GeomTriangles(Geom.UHStatic)\r\n\r\n #and then I add vertex to them\r\n #Next time use addVertices(0,1,2) !!!\r\n prim.addVertex(7)\r\n prim.addVertex(0)\r\n prim.addVertex(5)\r\n prim.closePrimitive()\r\n \r\n prim.addVertex(3)\r\n prim.addVertex(0)\r\n prim.addVertex(7)\r\n prim.closePrimitive()\r\n \r\n prim.addVertex(2)\r\n prim.addVertex(6)\r\n prim.addVertex(4)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(1)\r\n prim.addVertex(6)\r\n prim.addVertex(2)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(7)\r\n prim.addVertex(2)\r\n prim.addVertex(3)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(1)\r\n prim.addVertex(2)\r\n prim.addVertex(7)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(3)\r\n prim.addVertex(4)\r\n prim.addVertex(0)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(2)\r\n prim.addVertex(4)\r\n prim.addVertex(3)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(0)\r\n prim.addVertex(6)\r\n prim.addVertex(5)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(4)\r\n prim.addVertex(6)\r\n prim.addVertex(0)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(5)\r\n prim.addVertex(1)\r\n prim.addVertex(7)\r\n prim.closePrimitive()\r\n\r\n prim.addVertex(6)\r\n prim.addVertex(1)\r\n prim.addVertex(5)\r\n prim.closePrimitive()\r\n\r\n #Create a Geom to bing vertex data to primitives\r\n geom = Geom(vdata)\r\n geom.addPrimitive(prim)\r\n\r\n #Create a node for the Geom in order to be able to render it\r\n node = GeomNode('gnode')\r\n node.addGeom(geom)\r\n\r\n #Adde the node to the scene graph == render it!\r\n nodePath = render.attachNewNode(node)\r\n \r\n #is this needed?\r\n nodePath.setPos( 0, 5, 0)\r\n \r\n self.camera.lookAt(nodePath)\r\n \r\n base.setBackgroundColor( .0, .0, .0 )\r\n \r\n taskMgr.add(self.SpinCameraTask, \"SpinCameraTask\")", "def get_regular_points(self, npoints=2500, device=\"gpu0\"):\n if not self.npoints == npoints:\n self.npoints = npoints\n vertices, faces = self.generate_square(np.sqrt(npoints))\n self.mesh = pymesh.form_mesh(vertices=vertices, faces=faces) # 10k vertices\n self.vertex = torch.from_numpy(self.mesh.vertices).to(device).float()\n self.num_vertex = self.vertex.size(0)\n self.vertex = self.vertex.transpose(0,1).contiguous().unsqueeze(0)\n\n return Variable(self.vertex[:, :2].contiguous().to(device))", "def ConvertToLinearMesh(self):\n\n self.__do_essential_memebers_exist__()\n p = self.InferPolynomialDegree()\n\n if p <= 1:\n return self\n\n if self.element_type == \"quad\" or self.element_type == \"hex\":\n if p>13 and p!=16 and p!=32:\n raise NotImplementedError(\"Cannot convert p>8 {} mesh to linear mesh\".format(self.element_type))\n\n lmesh = Mesh()\n elements = np.copy(self.elements)\n\n if self.element_type == \"quad\" or self.element_type == \"hex\":\n # This is an aranger for internal use - use PlotMeshNumbering\n # to understand the remainder of this algorithm\n if p == 2:\n aranger = [0, 1, 2]\n elif p == 3:\n aranger = [0, 3, 2, 1]\n elif p == 4:\n aranger = [0, 1, 2, 3, 4]\n elif p == 5:\n aranger = [0, 3, 4, 2, 5, 1]\n elif p == 6:\n aranger = [0, 3, 6, 1, 5, 4, 2]\n elif p == 7:\n aranger = [0, 3, 7, 5, 2, 4, 6, 1]\n elif p == 8:\n aranger = range(9)\n elif p == 9:\n aranger = [0, 2, 3, 8, 4, 5, 6, 7, 9, 1]\n elif p == 10:\n aranger = [0, 6, 7, 3, 8, 1, 4, 5, 9, 10, 2]\n elif p == 11:\n aranger = [0, 9, 10, 6, 11, 2, 3, 4, 5, 7, 8, 1]\n elif p == 12:\n aranger = [0, 7, 6, 1, 12, 5, 2, 8, 10, 3, 9, 11, 4]\n elif p == 13:\n aranger = [0, 13, 6, 2, 7, 5, 3, 9, 8, 12, 11, 10, 4, 1]\n elif p == 16:\n aranger = range(17)\n elif p == 32:\n aranger = range(33)\n else:\n # This seemingly works fine for quads for some reason, so\n # quads essentially don't need an aranger and can accomdate\n # arbitrary p\n aranger = range(p+1)\n\n\n if self.element_type == \"quad\":\n # Create a dummy hex mesh with 1 element for indexing\n mesh = Mesh()\n mesh.Square(n=1, element_type=\"quad\")\n mesh.GetHighOrderMesh(p=p, equally_spaced=True)\n # Create the mapping indices from the high order mesh\n mapper = []\n for i in range(mesh.elements.shape[1]):\n x = np.where(mesh.elements.ravel()==i)[0][0]\n mapper.append(x)\n mapper = np.array(mapper).ravel()\n\n # Create layers\n layer0 = []\n for j in range(p+1):\n l0 = np.linspace(0,p*(p+1), p+1).astype(np.int64) + j#*(p+1)**2\n layer0.append(l0)\n # This layers values can be used in conjunction\n # with mesh.PlotMeshNumbering() to get aranger values for other ps\n layers2d = np.array(layer0)\n # print(layers2d)\n # mesh.PlotMeshNumbering()\n\n layers2d = layers2d[:,aranger]\n layers2d = layers2d[aranger,:]\n\n # Create connectivity from layers now\n indexer = []\n for j in range(p):\n for k in range(p):\n indexer.append(layers2d[j:j+2,k:k+2].ravel()[[0,2,3,1]])\n indexer = np.array(indexer)\n\n # Create the final mapp from high to linear mesh.\n # This is equivalent to p==2 for all a1, a2 ... arrays\n a_s = mapper.ravel()[indexer].reshape(indexer.shape)\n\n lmesh.elements = np.zeros((1,4), dtype=np.int64)\n for counter, a in enumerate(a_s):\n lmesh.elements = np.concatenate((\n lmesh.elements, elements[:,a]\n ))\n lmesh.elements = lmesh.elements[1:,:].astype(np.int64)\n\n\n elif self.element_type == \"hex\":\n\n if p == 2:\n\n a1 = [ 0, 8, 10, 9, 13, 17, 19, 18]\n a2 = [13, 17, 19, 18, 4, 22, 24, 23]\n a3 = [ 8, 1, 11, 10, 17, 14, 20, 19]\n a4 = [17, 14, 20, 19, 22, 5, 25, 24]\n a5 = [ 9, 10, 12, 3, 18, 19, 21, 16]\n a6 = [18, 19, 21, 16, 23, 24, 26, 7]\n a7 = [10, 11, 2, 12, 19, 20, 15, 21]\n a8 = [19, 20, 15, 21, 24, 25, 6, 26]\n\n lmesh.elements = np.concatenate(\n (elements[:,a1],\n elements[:,a2],\n elements[:,a3],\n elements[:,a4],\n elements[:,a5],\n elements[:,a6],\n elements[:,a7],\n elements[:,a8]\n ))\n\n else:\n\n # Create a dummy hex mesh with 1 element for indexing\n mesh = Mesh()\n mesh.Cube(n=1, element_type=\"hex\")\n mesh.GetHighOrderMesh(p=p, equally_spaced=True)\n # Create the mapping indices from the high order mesh\n mapper = []\n for i in range(mesh.elements.shape[1]):\n x = np.where(mesh.elements.ravel()==i)[0][0]\n mapper.append(x)\n mapper = np.array(mapper).ravel()\n\n # Create layers\n layer0 = []\n for j in range(p+1):\n l0 = np.linspace(0,p*(p+1), p+1).astype(np.int64) + j*(p+1)**2\n layer0.append(l0)\n\n layers = [layer0]\n for j in range(1, p+1):\n layers.append([layer+j for layer in layer0])\n # This layers values can be used in conjunction\n # with mesh.PlotMeshNumbering() to get aranger values for other ps\n layers = np.array(layers)\n\n layers = layers[:,:,aranger]\n layers = layers[:,aranger,:]\n layers = layers[aranger,:,:]\n\n\n # Create connectivity from layers now\n indexer = []\n for i in range(p):\n for j in range(p):\n for k in range(p):\n indexer.append(\n np.hstack((\n layers[i,j:j+2,k:k+2].ravel()[[0,2,3,1]],\n layers[i+1,j:j+2,k:k+2].ravel()[[0,2,3,1]]\n )))\n indexer = np.array(indexer)\n\n # Create the final mapp from high to linear mesh.\n # This is equivalent to p==2 for all a1, a2 ... arrays\n a_s = mapper.ravel()[indexer].reshape(indexer.shape)\n\n lmesh.elements = np.zeros((1,8), dtype=np.int64)\n for counter, a in enumerate(a_s):\n lmesh.elements = np.concatenate((\n lmesh.elements, elements[:,a]\n ))\n lmesh.elements = lmesh.elements[1:,:].astype(np.int64)\n\n\n elif self.element_type == \"tet\":\n\n from Florence.QuadratureRules.FeketePointsTet import FeketePointsTet\n from scipy.spatial import Delaunay\n\n # BUILD DELAUNAY TRIANGULATION OF REFERENCE ELEMENTS\n gpoints = FeketePointsTet(p-1)\n Tfunc = Delaunay(gpoints)\n simplex = Tfunc.simplices.copy()\n\n lmesh.elements = np.zeros((1,4))\n for i in range(Tfunc.nsimplex):\n lmesh.elements = np.concatenate((lmesh.elements,elements[:,simplex[i,:]]))\n lmesh.elements = lmesh.elements[1:,:]\n\n elif self.element_type == \"tri\":\n\n from Florence.QuadratureRules.FeketePointsTri import FeketePointsTri\n from scipy.spatial import Delaunay\n\n # BUILD DELAUNAY TRIANGULATION OF REFERENCE ELEMENTS\n gpoints = FeketePointsTri(p-1)\n Tfunc = Delaunay(gpoints)\n simplex = Tfunc.simplices.copy()\n\n lmesh.elements = np.zeros((1,3))\n for i in range(Tfunc.nsimplex):\n lmesh.elements = np.concatenate((lmesh.elements,elements[:,simplex[i,:]]))\n lmesh.elements = lmesh.elements[1:,:]\n\n else:\n raise NotImplementedError(\"Converting to linear mesh with {} elements not implemented yet\".format(self.element_type))\n\n\n lmesh.elements = np.ascontiguousarray(lmesh.elements,dtype=np.int64)\n lmesh.points = np.copy(self.points)\n lmesh.degree = 1\n lmesh.element_type = self.element_type\n lmesh.nelem = lmesh.elements.shape[0]\n lmesh.nnode = lmesh.points.shape[0]\n edim = self.InferElementalDimension()\n if edim == 3:\n lmesh.GetBoundaryFaces()\n lmesh.GetBoundaryEdges()\n elif edim == 2:\n lmesh.GetBoundaryEdges()\n\n return lmesh", "def xy_coordinates(self):\n\n return np.meshgrid(self.x_coord, self.y_coord)", "def mesh_geometry(mesh_file):\n\n # Reading mesh results\n # msh = datread(mesh_file) # Deprecated\n\n with open(mesh_file, 'r') as fr:\n msh = np.array([list(map(float, l.replace('T', '').split())) for l in fr.readlines()])\n\n nnodes = int(msh[0][0])\n nelem = int(msh[1][1])\n ncol = int(msh[0][3])\n nlin = int(msh[0][4])\n\n nodes = np.array(msh[4:(nnodes + 4)])\n xn = np.array(list(chunks([nodes[i][1] for i in range(nnodes)], ncol + 1))).flatten()\n yn = np.array(list(chunks([nodes[i][2] for i in range(nnodes)], nlin + 1))).flatten()\n\n xy = np.array([[xn[i], yn[i]] for i in range(len(xn))])\n\n layers = np.array(list(chunks(xy, ncol + 1)))\n\n # Computing the 4-corners coordinates of each blocks based on the node position\n\n blocks = []\n\n s = layers.shape\n\n for i in range(s[0] - 1):\n for j in range(s[1] - 1):\n blocks.append([\n [layers[i, j, 0], layers[i, j, 1]],\n [layers[i + 1, j, 0], layers[i + 1, j, 1]],\n [layers[i + 1, j + 1, 0], layers[i + 1, j + 1, 1]],\n [layers[i + 1, j + 1, 0], layers[i, j + 1, 1]]\n ])\n\n blocks = np.array(blocks)\n\n centerxy = np.array([[np.mean(blocks[i, :, 0]), np.mean(blocks[i, :, 1])] for i in range(nelem)])\n\n return ncol, nlin, nelem, blocks, centerxy" ]
[ "0.7197377", "0.6756", "0.6733346", "0.6643686", "0.6442712", "0.6364836", "0.63441443", "0.6343205", "0.63342935", "0.62988", "0.627023", "0.623843", "0.62137896", "0.61971325", "0.6183295", "0.6164846", "0.6139129", "0.61126757", "0.61126757", "0.6105098", "0.6105098", "0.6083872", "0.6077391", "0.6059901", "0.60188633", "0.6016909", "0.6013761", "0.6005731", "0.6004868", "0.600083", "0.5990357", "0.5990357", "0.5990357", "0.5990357", "0.5985251", "0.5978137", "0.5971783", "0.5967281", "0.5950271", "0.5937113", "0.5922678", "0.5908749", "0.58891714", "0.58815366", "0.5878638", "0.5865206", "0.58533794", "0.58524305", "0.5847537", "0.58223283", "0.5793463", "0.57924384", "0.5782011", "0.57739437", "0.5717423", "0.5704882", "0.57039624", "0.5700783", "0.5698146", "0.56949854", "0.5688879", "0.56816053", "0.5677601", "0.5670427", "0.5668955", "0.56656224", "0.5654163", "0.56527954", "0.56508934", "0.5649576", "0.5631516", "0.5626699", "0.56142545", "0.5613194", "0.5601349", "0.5598631", "0.5598237", "0.5597082", "0.5593518", "0.5592317", "0.55824953", "0.5579542", "0.5573218", "0.5568952", "0.55688745", "0.5564515", "0.5546714", "0.5545769", "0.55440086", "0.5542016", "0.5540231", "0.5539885", "0.5530503", "0.5529822", "0.55282986", "0.55255944", "0.55203646", "0.5519263", "0.5518739", "0.55120516" ]
0.6036985
24
Plot the decision boundaries for a classifier.
def plot_contours(ax, clf, xx, yy, **params): Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) out = ax.contourf(xx, yy, Z, **params) return out
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def plot_decision_regions(X, y, classifier, resolution=0.02):\n #setup marker generator and color map\n markers = ('s', 'x', 'o', '^', 'v')\n colors = ('red', 'blue', 'lightgreen', 'gray', 'cyan')\n cmap = ListedColormap(colors[:len(np.unique(y))])\n\n #plot the decision surface\n #just find the limit and/reduce 1\n x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n #np.arange(start, stop, step): create list of tupple from start to stop with step of step\n #np.meshgrid convert: create accessible arrays from list of tupple\n #(-1,-2) (-1,0) (-1,1) xx1 = [-1 -1 -1][0 0 0 ][1 1 1]\n #(0,-2)(0,0)(0,1) ==> \n #(1,-2)(1,0)(1,1) xx2 = [-2 -2 -2][0 0 0 ][1 1 1]\n xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution),\n np.arange(x2_min, x2_max, resolution))\n\n #ravel() xx1 = [-1 -1 -1 0 0 0 1 1 1]\n # xx2 = [-2 -2 -2 0 0 0 1 1 1]\n #array() [[-1 -1 -1 0 0 0 1 1 1]\n # [-2 -2 -2 0 0 0 1 1 1]] concatenation... sort of\n #.T , transpose from in this case a 2x9 to 9x2\n\n Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T)\n Z = Z.reshape(xx1.shape)\n plt.contourf(xx1, xx2, Z, alpha = 0.3, cmap=cmap)\n plt.xlim(xx1.min(), xx1.max())\n plt.ylim(xx2.min(), xx2.max())\n\n #plot class samples\n for idx, cl in enumerate(np.unique(y)):\n plt.scatter(x=X[y == cl, 0],\n y=X[y == cl, 1],\n alpha=0.8,\n c=colors[idx],\n marker=markers[idx],\n label=cl,\n edgecolor='black')", "def plot_decision_boundary(pred_func):\n # Set min and max values\n x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5\n y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5\n h = 0.01\n # Generate a grid of points\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n # Predict the function value for the whole gid\n Z = pred_func(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n # Plot the contour and training examples\n plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Spectral)\n plt.show()", "def plot_decision_function(fitted_classifier, range_features, ax=None):\n from sklearn.preprocessing import LabelEncoder\n\n feature_names = list(range_features.keys())\n # create a grid to evaluate all possible samples\n plot_step = 0.02\n xx, yy = np.meshgrid(\n np.arange(*range_features[feature_names[0]], plot_step),\n np.arange(*range_features[feature_names[1]], plot_step),\n )\n\n # compute the associated prediction\n Z = fitted_classifier.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = LabelEncoder().fit_transform(Z)\n Z = Z.reshape(xx.shape)\n\n # make the plot of the boundary and the data samples\n if ax is None:\n _, ax = plt.subplots()\n ax.contourf(xx, yy, Z, alpha=0.4, cmap=\"RdBu\")\n\n return ax", "def plot_2D_boundary(plot_range, points, decisionfcn, labels, values=[0]):\n\n clist = ['b', 'r', 'g', 'k', 'm', 'y'] # colors for the classes\n\n # evaluate on a grid and plot contour of decision function\n x = np.arange(plot_range[0], plot_range[1], .1)\n y = np.arange(plot_range[2], plot_range[3], .1)\n xx, yy = np.meshgrid(x, y)\n xxx, yyy = xx.flatten(), yy.flatten() # lists of x,y in grid\n zz = np.array(decisionfcn(xxx, yyy))\n zz = zz.reshape(xx.shape)\n\n # plot contour(s) at values\n plt.contour(xx, yy, zz, values)\n\n # for each class, plot the points with ’*’ for correct, ’o’ for incorrect\n for i in range(len(points)):\n d = decisionfcn(points[i][:, 0], points[i][:, 1])\n correct_ndx = labels[i] == d\n incorrect_ndx = labels[i] != d\n plt.plot(\n points[i][correct_ndx, 0],\n points[i][correct_ndx, 1],\n '*',\n color=clist[i])\n plt.plot(\n points[i][incorrect_ndx, 0],\n points[i][incorrect_ndx, 1],\n 'o',\n color=clist[i])\n plt.axis('equal')\n plt.show()", "def plot_decision_boundary(data, x, y, labels, model, **kwargs):\n xx, yy, Z = setup_contours(data=data, x=x, y=y, model=model)\n\n x0, x1 = data[x].values, data[y].values\n x0lim = x0.min(), x0.max()\n x1lim = x1.min(), x1.max()\n\n col = data[labels].values\n plt.figure(figsize=(10, 10))\n\n plt.scatter(x0, x1, c=col, **kwargs)\n CS = plt.contourf(xx, yy, Z, **kwargs)\n CS2 = plt.contour(CS, CS.levels[::2], **kwargs)\n cbar = plt.colorbar(CS, **kwargs)\n cbar.ax.set_ylabel('Fitted Probability')\n # Add the contour line levels to the colorbar\n cbar.add_lines(CS2)\n\n plt.xlim(x0lim)\n plt.ylim(x1lim)\n plt.xlabel(x)\n plt.ylabel(y)\n plt.legend()", "def plot_decision_boundary(model: torch.nn.Module, X: torch.Tensor, y: torch.Tensor):\n # Put everything to CPU (works better with NumPy + Matplotlib)\n model.to(\"cpu\")\n X, y = X.to(\"cpu\"), y.to(\"cpu\")\n\n # Setup prediction boundaries and grid\n x_min, x_max = X[:, 0].min() - 0.1, X[:, 0].max() + 0.1\n y_min, y_max = X[:, 1].min() - 0.1, X[:, 1].max() + 0.1\n xx, yy = np.meshgrid(np.linspace(x_min, x_max, 101), np.linspace(y_min, y_max, 101))\n\n # Make features\n X_to_pred_on = torch.from_numpy(np.column_stack((xx.ravel(), yy.ravel()))).float()\n\n # Make predictions\n model.eval()\n with torch.inference_mode():\n y_logits = model(X_to_pred_on)\n\n # Test for multi-class or binary and adjust logits to prediction labels\n if len(torch.unique(y)) > 2:\n y_pred = torch.softmax(y_logits, dim=1).argmax(dim=1) # mutli-class\n else:\n y_pred = torch.round(torch.sigmoid(y_logits)) # binary\n\n # Reshape preds and plot\n y_pred = y_pred.reshape(xx.shape).detach().numpy()\n plt.contourf(xx, yy, y_pred, cmap=plt.cm.RdYlBu, alpha=0.7)\n plt.scatter(X[:, 0], X[:, 1], c=y, s=40, cmap=plt.cm.RdYlBu)\n plt.xlim(xx.min(), xx.max())\n plt.ylim(yy.min(), yy.max())", "def plot_decision_boundary(resolution=100, colors=('b', 'k', 'r'), levels=(-1, 0, 1)):\n\n # Generate coordinate grid of shape [resolution x resolution]\n # and evaluate the model over the entire space\n xrange = np.linspace(x_train[:,0].min(), x_train[:,0].max(), resolution)\n yrange = np.linspace(x_train[:,1].min(), x_train[:,1].max(), resolution)\n grid = [[decision_function(alpha, y_train,\n Kernel1, x_train,\n np.array([xr, yr]), b) for xr in xrange] for yr in yrange]\n grid = np.array(grid).reshape(len(xrange), len(yrange))\n\n # Plot decision contours using grid and\n # make a scatter plot of training data\n ax.contour(xrange, yrange, grid, levels=levels, linewidths=(1, 1, 1),\n linestyles=('--', '-', '--'), colors=colors)\n ax.scatter(x_train[:,0], x_train[:,1],\n c=y_train, cmap=plt.cm.viridis, lw=0, alpha=0.25)\n\n # Plot support vectors (non-zero alphas)\n # as circled points (linewidth > 0)\n mask = np.round(alpha, decimals=2) != 0.0\n ax.scatter(x_train[mask,0], x_train[mask,1],\n c=y_train[mask], cmap=plt.cm.viridis, lw=1, edgecolors='k')\n\n return grid, ax", "def plot_decision_boundary(model, X, y):\r\n \r\n x1_array, x2_array = np.meshgrid(np.arange(-4, 4, 0.01), np.arange(-4, 4, 0.01))\r\n grid_coordinates = np.c_[x1_array.ravel(), x2_array.ravel()]\r\n Z = model.predict(grid_coordinates)\r\n Z = Z.reshape(x1_array.shape)\r\n plt.contourf(x1_array, x2_array, Z, cmap=plt.cm.bwr)\r\n plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.bwr)\r\n plt.show()", "def plot_decision_boundary(X, Y, models, titles):\n fig, sub = plt.subplots(2, 4, figsize=(20, 8))\n plt.subplots_adjust(wspace=1.0, hspace=0.6)\n\n xx, yy = create_meshgrid(X[:, 0], X[:, 1])\n\n for clf, title, ax in zip(models, titles, sub.flatten()):\n plot_contours(ax, clf, xx, yy,\n cmap=plt.cm.coolwarm, alpha=0.8)\n ax.scatter(X[:,0], X[:,1], c=Y, cmap=plt.cm.coolwarm, s=20, edgecolors='k')\n\tax.set_xlim(xx.min(), xx.max())\n\tax.set_ylim(yy.min(), yy.max())\n\tax.set_xlabel('Xvalues')\n\tax.set_ylabel('Yvalues')\n\tax.set_xticks(())\n\tax.set_yticks(())\n\tax.set_title(title)\n\n back = matplotlib.get_backend()\n manager = plt.get_current_fig_manager()\n if \"QT\" in back:\n manager.window.showMaximized()\n elif \"Tk\" in back:\n manager.resize(*manager.window.maxsize())\n else:\n manager.frame.Maximize(True)\n plt.show()\n plt.close()", "def plot_decision_regions(self, option, canvas):\n\t\tle = preprocessing.LabelEncoder()\t\t# integer encoder\n\t\tle.fit(self.y)\n\t\tclassifier = self.classifier.fit(self.X, le.transform(self.y))\n\t\tclasses = classifier.classes_\n\t\tnum_classes = len(classes)\n\n\t\tif option == 'train':\n\t\t\tX = self.X\n\t\t\ty = self.y\n\t\telif option == 'test':\n\t\t\tX = self.test_X\n\t\t\ty = self.test_y\n\n\t\tb1 = self.X.iloc[:, 0]\n\t\tb2 = self.X.iloc[:, 1]\n\t\tb1_slack = (b1.max() - b1.min()) * 0.1\n\t\tb2_slack = (b2.max() - b2.min()) * 0.1\n\t\tb1_min, b1_max = b1.min() - b1_slack, b1.max() + b1_slack \t# x-axis range\n\t\tb2_min, b2_max = b2.min() - b2_slack, b2.max() + b2_slack\t# y-axis range\n\t\tstep_1 = (b1_max - b1_min) / 200\n\t\tstep_2 = (b2_max - b2_min) / 200\n\t\tmd1, md2 = np.meshgrid(np.arange(b1_min, b1_max, step_1), np.arange(b2_min, b2_max, step_2))\n\n\t\trcParams.update({'font.size': 7})\n\t\tcanvas.figure.clear()\n\t\tax = canvas.figure.subplots()\n\t\tlevels = np.arange(-0.19, 1, 0.2) + 0.2\n\n\t\tif num_classes == 2:\n\t\t\tcm_bkgd = plt.cm.RdBu\n\t\t\tcm_pts = ListedColormap(['#FF0000', '#0000FF'])\n\t\t\tZ = classifier.predict_proba(np.c_[md1.ravel(), md2.ravel()])[:, 1]\n\t\t\tZ = Z.reshape(md1.shape)\n\t\t\tax.contourf(md1, md2, Z, vmin=0, vmax=1, cmap=cm_bkgd, alpha=0.8)\n\n\t\telif num_classes == 3:\n\t\t\tcm_bkgd_1 = plt.cm.Reds\n\t\t\tcm_bkgd_2 = plt.cm.Greens\n\t\t\tcm_bkgd_3 = plt.cm.Blues\n\t\t\tcm_pts = cm_pts = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])\n\t\t\tZ = classifier.predict_proba(np.c_[md1.ravel(), md2.ravel()])\n\t\t\tZ1 = Z[:, 0]\n\t\t\tZ2 = Z[:, 1]\n\t\t\tZ3 = Z[:, 2]\n\n\t\t\tP1 = np.maximum(0, Z1 - np.maximum(Z2, Z3))\n\t\t\tP2 = np.maximum(0, Z2 - np.maximum(Z1, Z3))\n\t\t\tP3 = np.maximum(0, Z3 - np.maximum(Z1, Z2))\n\t\t\tP1 = P1.reshape(md1.shape)\n\t\t\tP2 = P2.reshape(md1.shape)\n\t\t\tP3 = P3.reshape(md1.shape)\n\n\t\t\tax.contourf(md1, md2, P1, levels, cmap=cm_bkgd_1, alpha=0.8)\n\t\t\tax.contourf(md1, md2, P2, levels, cmap=cm_bkgd_2, alpha=0.8)\n\t\t\tax.contourf(md1, md2, P3, levels, cmap=cm_bkgd_3, alpha=0.8)\n\n\t\td1 = X.iloc[:, 0] \t# x-axis\n\t\td2 = X.iloc[:, 1]\t# y-axis\n\t\tax.scatter(d1, d2, c=le.transform(y), cmap=cm_pts, alpha=0.6, edgecolors='k')\n\t\tax.set_xlim(md1.min(), md1.max())\n\t\tax.set_ylim(md2.min(), md2.max())\n\t\tax.set_xticks(())\n\t\tax.set_yticks(())\n\t\tax.set_xlabel(X.columns[0])\n\t\tax.set_ylabel(X.columns[1])\n\n\t\tcanvas.figure.tight_layout()\n\t\tcanvas.draw()", "def draw_knn_boundaries(knn, h=0.02): # h = Step size in the mesh\n ax = plt.gca()\n [xmin, xmax] = ax.get_xlim()\n [ymin, ymax] = ax.get_ylim()\n # Generate the axis associated to the first feature: \n x_axis = np.arange(xmin, xmax, h)\n # Generate the axis associated to the 2nd feature: \n y_axis = np.arange(ymin, ymax, h)\n # Generate a meshgrid (2D grid) from the 2 axis:\n x_grid, y_grid = np.meshgrid(x_axis, y_axis)\n # Vectorize the grids into column vectors:\n x_grid_vectorized = x_grid.flatten()\n x_grid_vectorized = np.expand_dims(x_grid_vectorized, axis=1)\n y_grid_vectorized = y_grid.flatten()\n y_grid_vectorized = np.expand_dims(y_grid_vectorized, axis=1)\n # Concatenate the vectorized grids\n grid = np.concatenate((x_grid_vectorized, y_grid_vectorized), axis=1)\n # Now you can use 'grid' as data to classify by the knn \n\n # Predict concatenated features to get the decision boundaries:\n decision_boundaries = ... #TODO!\n\n # Reshape the decision boundaries into a 2D matrix:\n decision_boundaries = decision_boundaries.reshape(x_grid.shape)\n plt.pcolormesh(x_grid, y_grid, decision_boundaries, cmap=cmap_light, zorder=1)\n return ax", "def plotBoundary(self,X,Y,axis=None):\n if len(self.theta) != 3: raise ValueError('Data & model must be 2D');\n ax = X.min(0),X.max(0); ax = (ax[0][0],ax[1][0],ax[0][1],ax[1][1]);\n ## TODO: find points on decision boundary defined by theta0 + theta1 X1 + theta2 X2 == 0\n x1b = np.array([ax[0],ax[1]]); # at X1 = points in x1b\n (t0, t1, t2) = self.theta\n x2b = ( -np.array([t0, t0]) - t1 * x1b) / t2\n ## Now plot the data and the resulting boundary:\n A = Y==self.classes[0]; # and plot it:\n recs = [\n mpatches.Rectangle((0, 0), 1, 1, fc=c)\n for c in ['r', 'g']\n ]\n if not axis:\n plt.plot(X[A,0],X[A,1],'r.',X[~A,0],X[~A,1],'g.',x1b,x2b,'k-'); plt.axis(ax); plt.draw(); plt.legend(recs, self.classes)\n else:\n axis.plot(X[A,0],X[A,1],'r.',X[~A,0],X[~A,1],'g.',x1b,x2b,'k-'); axis.axis(ax); axis.legend(recs, self.classes)", "def plot_decision_regions(X, y, clf,\n ax=None,\n X_highlight=None,\n res=0.02, legend=1,\n hide_spines=True,\n markers='s^oxv<>',\n colors='red,blue,limegreen,gray,cyan'):\n # http://stackoverflow.com/questions/22294241/plotting-a-decision-boundary-separating-2-classes-using-matplotlibs-pyplot?lq=1\n # check if data is numpy array\n for a in (X, y):\n if not isinstance(a, np.ndarray):\n raise ValueError('%s must be a NumPy array.' % a.__name__)\n\n if ax is None:\n ax = plt.gca()\n\n if not y.dtype == int:\n y = y.astype(int)\n\n # check if test data is provided\n plot_testdata = True\n if not isinstance(X_highlight, np.ndarray):\n if X_highlight is not None:\n raise ValueError('X_test must be a NumPy array or None')\n else:\n plot_testdata = False\n\n if len(X.shape) == 2 and X.shape[1] > 1:\n dim = '2d'\n else:\n dim = '1d'\n\n marker_gen = cycle(list(markers))\n\n # make color map\n n_classes = len(np.unique(y))\n colors = colors.split(',')\n cmap = ListedColormap(colors[:n_classes])\n\n # plot the decision surface\n if dim == '2d':\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n else:\n y_min, y_max = -1, 1\n\n x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, res),\n np.arange(y_min, y_max, res))\n\n if dim == '2d':\n y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n Z = clf.predict(np.array([xx.ravel(), yy.ravel()]).T)\n else:\n y_min, y_max = -1, 1\n Z = clf.predict(np.array([xx.ravel()]).T)\n\n Z = Z.reshape(xx.shape)\n ax.contourf(xx, yy, Z, alpha=0.3, cmap=cmap)\n\n ax.axis(xmin=xx.min(), xmax=xx.max(), y_min=yy.min(), y_max=yy.max())\n\n # plot class samples\n\n for c in np.unique(y):\n if dim == '2d':\n y_data = X[y == c, 1]\n else:\n y_data = [0 for i in X[y == c]]\n\n ax.scatter(x=X[y == c, 0],\n y=y_data,\n alpha=0.8,\n c=cmap(c),\n marker=next(marker_gen),\n label=c)\n\n if hide_spines:\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n if not dim == '2d':\n ax.axes.get_yaxis().set_ticks([])\n\n if legend:\n legend = plt.legend(loc=legend,\n fancybox=True,\n framealpha=0.3,\n scatterpoints=1,\n handletextpad=-0.25,\n borderaxespad=0.9)\n\n ax.add_artist(legend)\n\n if plot_testdata:\n if dim == '2d':\n ax.scatter(X_highlight[:, 0],\n X_highlight[:, 1],\n c='',\n alpha=1.0,\n linewidth=1,\n marker='o',\n s=80)\n else:\n ax.scatter(X_highlight,\n [0 for i in X_highlight],\n c='',\n alpha=1.0,\n linewidth=1,\n marker='o',\n s=80)\n\n return ax", "def decisionBoundary(root, figure, fileName):\n stepValue = 0.001\n classClassification = [1, 2, 3, 4]\n colorClassification = ['b', 'g', 'r', 'm']\n markerClassification = ['x', '+', '*', 'o']\n classesList = [\"Bolts\", \"Nuts\", \"Rings\", \"Scraps\"]\n decisionPlot = figure.add_subplot(111)\n attributeValues, classes, _ = readData(fileName)\n attributeValues = np.array(attributeValues)\n classes = np.array(classes)\n \n \n\n attribute1, attribute2 = np.meshgrid(np.arange(0, 1, stepValue), np.arange(0, 1, stepValue))\n\n predicted_class = []\n for i in range(attribute1.shape[0]):\n predicted_class.append([])\n for j in range(attribute1.shape[1]):\n result = [attribute1[i][j], attribute2[i][j]]\n predicted_value = classify(np.array(result), root)\n predicted_class[i].append(predicted_value)\n\n decisionPlot.contourf(attribute1, attribute2, np.array(predicted_class))\n\n for a in classClassification:\n attribute1=[]\n attribute2=[]\n \n for j in range(len(attributeValues[:])):\n \n if classes[j]==a:\n attribute1 +=[attributeValues[j][0]]\n for k in range(len(attributeValues[:])):\n if classes[k]==a:\n attribute2 +=[attributeValues[k][1]]\n \n \n decisionPlot.scatter(attribute1, attribute2, color=colorClassification[a - 1], marker=markerClassification[a - 1]\n , label=classesList[a - 1], s=100)\n\n decisionPlot.legend(loc='upper right')\n decisionPlot.set_xlabel(\"Six fold Rotational Symmetry\")\n decisionPlot.set_ylabel(\"Eccentricity\")\n decisionPlot.set_title(\"Decision boundary\")\n return decisionPlot", "def plot_decision_regions(X, y, classifier, resolution=.02, test_idx=None):\n # setup marker generator & color map\n plt.figure()\n markers = ('x', 'o')\n colors = ('red', 'blue')\n\n # calculate and plot the decision surface\n x1_min, x1_max = X[:, 0].min() - 1, X[:, 0].max() + 1\n x2_min, x2_max = X[:, 1].min() - 1, X[:, 1].max() + 1\n xx1, xx2 = np.meshgrid(np.arange(x1_min, x1_max, resolution),\n np.arange(x2_min, x2_max, resolution))\n Z = classifier.predict(np.array([xx1.ravel(), xx2.ravel()]).T)\n Z = Z.reshape(xx1.shape)\n plt.contourf(xx1, xx2, Z, alpha=.35, cmap=ListedColormap(colors=colors[:len(np.unique(y))]))\n plt.xlim(xx1.min(), xx2.max())\n plt.ylim(xx2.min(), xx2.max())\n\n # scatter plot all values of the data sets\n for idx, cl in enumerate(np.unique(y)):\n plt.scatter(x=X[y == cl, 0],\n y=X[y == cl, 1],\n c=colors[idx],\n marker=markers[idx],\n label=cl,\n edgecolors='black')\n if test_idx:\n # circle test data\n X_test, y_test = X[test_idx, :], y[test_idx]\n plt.scatter(X_test[:, 0],\n X_test[:, 1],\n c='',\n edgecolors='black',\n alpha=1.0,\n linewidths=1,\n marker='o',\n s=100,\n label='test set')", "def plot_decision(X, y, path, model, param, ax=None, h=0.07):\n if ax is None:\n _, ax = plt.subplots(figsize=(7, 6))\n\n # https://stackoverflow.com/a/19055059/6027071\n # sample a region larger than our training data X\n x_min = X[:, 0].min() - 0.5\n x_max = X[:, 0].max() + 0.5\n y_min = X[:, 1].min() - 0.5\n y_max = X[:, 1].max() + 0.5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n\n # plot decision boundaries\n x = np.concatenate(([xx.ravel()], [yy.ravel()]))\n pred = model.predict(x.T).reshape(xx.shape)\n ax.contourf(xx, yy, pred, alpha=0.8, cmap='RdYlBu')\n\n # plot points (coloured by class)\n ax.scatter(X[:, 0], X[:, 1], alpha=0.8, c=y, cmap='RdYlBu')\n ax.axis('off')\n\n title = 'hidden_dim: {} | learning rate: {} | n_epochs: {} | lambda_1: {} | lambda_2: {}'.format(\n param[0], param[1], param[2], param[3], param[4]\n )\n\n plt.title(title)\n plt.savefig(path)\n plt.close()", "def model_visualization(model,X,y,classifier):\n sns.set_context(context='notebook',font_scale=2)\n plt.figure(figsize=(16,9))\n from matplotlib.colors import ListedColormap\n X_set, y_set = X, y\n X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))\n plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape), alpha = 0.6, cmap = ListedColormap(('green', 'blue')))\n plt.xlim(X1.min(), X1.max())\n plt.ylim(X2.min(), X2.max())\n for i, j in enumerate(np.unique(y_set)):\n plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],\n color = ListedColormap(('turquoise', 'blue'))(i), label = j)\n plt.title(\"%s Model Set\" %(model))\n plt.xlabel('PC 1')\n plt.ylabel('PC 2')\n plt.legend()\n plt.savefig('images/{0}.png'.format(model))", "def visclassifier(fun,xTr,yTr):\n\n yTr = np.array(yTr).flatten()\n \n symbols = [\"ko\",\"kx\"]\n marker_symbols = ['o', 'x']\n mycolors = [[0.5, 0.5, 1], [1, 0.5, 0.5]]\n classvals = np.unique(yTr)\n\n plt.figure()\n\n res=300\n xrange = np.linspace(min(xTr[:, 0]), max(xTr[:, 0]),res)\n yrange = np.linspace(min(xTr[:, 1]), max(xTr[:, 1]),res)\n pixelX = repmat(xrange, res, 1)\n pixelY = repmat(yrange, res, 1).T\n\n xTe = np.array([pixelX.flatten(), pixelY.flatten()]).T\n\n testpreds = fun(xTe)\n Z = testpreds.reshape(res, res)\n # Z[0,0] = 1 # optional: scale the colors correctly\n plt.contourf(pixelX, pixelY, np.sign(Z), colors=mycolors)\n\n for idx, c in enumerate(classvals):\n plt.scatter(xTr[yTr == c,0],\n xTr[yTr == c,1],\n marker=marker_symbols[idx],\n color='k'\n )\n\n plt.axis('tight')\n plt.show()", "def find_decision_boundary(self, X_cv, y_cv, plot=False):\n # Find possible decision functions\n dec_func = self.classifier.decision_function(self.scaler.transform(X_cv))\n dec_func_range = np.linspace(dec_func[y_cv == 1].min(), dec_func[y_cv == 1].max(), 20)\n # Empty arrays\n prec = []\n recall = []\n f1 = []\n for boundary in dec_func_range:\n # Make predictions using this decision boundary\n y_pred = (dec_func > boundary).astype('b')\n # Score prediction\n prec.append(metrics.precision_score(y_cv, y_pred))\n recall.append(metrics.recall_score(y_cv, y_pred))\n f1.append(metrics.f1_score(y_cv, y_pred))\n # Find decision boundary that corresponds to best F1 score\n db_ind = np.argmax(f1)\n if plot:\n fig, ax = plt.subplots()\n ax.plot(dec_func_range, prec, 'r-', label='precision')\n ax.plot(dec_func_range, recall, 'b-', label='recall')\n ax.plot(dec_func_range, f1, 'k-', label='F1 score')\n ax.plot([dec_func_range[db_ind]] * 2, [0, 1], 'k--')\n ax.legend()\n plt.show()\n self.decision_boundary = dec_func_range[db_ind]", "def plot_decision_boundary(k, x, t, granularity=100, figures_root='../figures', data_name=None):\r\n print(f'KNN for K={k}')\r\n\r\n # Initialize meshgrid to be used to store the class prediction values\r\n # this is used for computing and plotting the decision boundary contour\r\n\r\n pointsX = numpy.linspace(numpy.min(x[:, 0]) - 0.1, numpy.max(x[:, 0]) + 0.1, granularity)\r\n pointsY = numpy.linspace(numpy.min(x[:, 1]) - 0.1, numpy.max(x[:, 1]) + 0.1, granularity)\r\n\r\n Xv, Yv = numpy.meshgrid(pointsX, pointsY)\r\n\r\n # Calculate KNN classification for every point in meshgrid\r\n classes = numpy.zeros(shape=(Xv.shape[0], Xv.shape[1]))\r\n for i in range(Xv.shape[0]):\r\n for j in range(Xv.shape[1]):\r\n c = knn(numpy.array([Xv[i][j], Yv[i][j]]), k, x, t)\r\n # print('{0} {1} {2}'.format(i, j, c))\r\n classes[i][j] = c\r\n\r\n # plot the binary decision boundary contour\r\n plt.figure()\r\n plt.pcolormesh(Xv, Yv, classes, cmap=CMAP_LIGHT)\r\n ti = f'KNN with K = {k}'\r\n plt.title(ti)\r\n plt.draw()\r\n\r\n save_path = None\r\n if data_name is not None:\r\n save_path = os.path.join(figures_root, f'knn_{data_name}_k={k}')\r\n # else:\r\n # save_path = os.path.join(figures_root, f'knn_k={k}')\r\n\r\n # plot the data (on top of the decision boundary color mesh)\r\n plot_data(x, t, new_figure=False, save_path=save_path)\r\n\r\n return classes", "def plot(self, fname=None):\n x = np.linspace(self.bounds[0], self.bounds[-1], 200)\n y = [self.evaluate(xi) for xi in x]\n plt.figure()\n plt.plot(x, y, label='Class func')\n plt.plot(self.bounds, self.gis, 'o', label='Algorithm')\n plt.grid(color='0.7')\n plt.xlabel('Dependent Variable')\n plt.ylabel('PP Transformed Class Value')\n if fname:\n plt.savefig(fname)\n else:\n plt.show()", "def plot_svc_decision_function(clf):\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n x = np.linspace(plt.xlim()[0], plt.xlim()[1], 30)\n y = np.linspace(plt.ylim()[0], plt.ylim()[1], 30)\n Y, X = np.meshgrid(y, x)\n P = np.zeros_like(X)\n for i, xi in enumerate(x):\n for j, yj in enumerate(y):\n P[i, j] = clf.decision_function([xi, yj])\n return plt.contour(X, Y, P, colors='k',\n levels=[-1, 0, 1],\n linestyles=['--', '-', '--'])", "def plot(self):\r\n \r\n\r\n print(\"Printing decision surfaces of decision trees\")\r\n plot_colors = \"rb\"\r\n plot_step = 0.02\r\n n_classes = 2\r\n for _ in range (self.n_estimators):\r\n plt.subplot(2, 3, _ + 1)\r\n x_min, x_max = self.X.iloc[:, 0].min() - 1, self.X.iloc[:, 0].max() + 1\r\n y_min, y_max = self.X.iloc[:, 1].min() - 1, self.X.iloc[:, 1].max() + 1\r\n xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),np.arange(y_min, y_max, plot_step))\r\n plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)\r\n Z = self.clfs[_].predict(np.c_[xx.ravel(), yy.ravel()])\r\n Z = np.array(Z)\r\n Z = Z.reshape(xx.shape)\r\n cs = plt.contourf(xx, yy, Z, cmap=plt.cm.RdBu)\r\n for i, color in zip(range(n_classes), plot_colors):\r\n if i == 0:\r\n idx = np.where(self.y == -1)\r\n if i == 1:\r\n idx = np.where(self.y == 1)\r\n for i in range (len(idx[0])):\r\n plt.scatter(self.X.loc[idx[0][i]][0], self.X.loc[idx[0][i]][1],c=color,cmap=plt.cm.RdBu, edgecolor='black', s=15)\r\n plt.suptitle(\"Decision surface of a decision tree using paired features\")\r\n plt.legend(loc='lower right', borderpad=0, handletextpad=0)\r\n plt.axis(\"tight\")\r\n\r\n plt.show()\r\n fig1 = plt\r\n\r\n # Figure 2\r\n print(\"Printing decision surface by combining the individual estimators\")\r\n plot_colors = \"rb\"\r\n plot_step = 0.02\r\n n_classes = 2\r\n x_min, x_max = self.X.iloc[:, 0].min() - 1, self.X.iloc[:, 0].max() + 1\r\n y_min, y_max = self.X.iloc[:, 1].min() - 1, self.X.iloc[:, 1].max() + 1\r\n xx, yy = np.meshgrid(np.arange(x_min, x_max, plot_step),np.arange(y_min, y_max, plot_step))\r\n plt.tight_layout(h_pad=0.5, w_pad=0.5, pad=2.5)\r\n Z = config.Classifier_AB.predict(np.c_[xx.ravel(), yy.ravel()])\r\n Z = np.array(Z)\r\n Z = Z.reshape(xx.shape)\r\n cs = plt.contourf(xx, yy, Z, cmap=plt.cm.RdBu)\r\n for i, color in zip(range(n_classes), plot_colors):\r\n if i == 0:\r\n idx = np.where(self.y == -1)\r\n if i == 1:\r\n idx = np.where(self.y == 1)\r\n for i in range (len(idx[0])):\r\n plt.scatter(self.X.loc[idx[0][i]][0], self.X.loc[idx[0][i]][1],c=color,cmap=plt.cm.RdBu, edgecolor='black', s=15)\r\n plt.suptitle(\"Decision surface by combining individual estimators\")\r\n plt.legend(loc='lower right', borderpad=0, handletextpad=0)\r\n plt.axis(\"tight\")\r\n\r\n plt.show()\r\n fig2 = plt\r\n\r\n return [fig1,fig2]", "def plot_decision_tree(classifier, feature_names=None, class_names=None):\n fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(4, 4))\n tree.plot_tree(\n classifier,\n feature_names=feature_names,\n class_names=class_names,\n rounded=True,\n filled=True,\n )\n fig.show()", "def plot_classification(nsrc_predict, nsrc_true, n_max=4, label=''):\n bins = np.linspace(0.5, n_max + 0.5, n_max + 1)\n fig, axes = plt.subplots(1, n_max, sharex=True, sharey=True,\n figsize=(2.5 * n_max, 2.5))\n for n in range(1, n_max + 1):\n sel = (nsrc_true == n)\n ax = axes[n - 1]\n f, _, _ = ax.hist(nsrc_predict[sel], bins, histtype='stepfilled',\n lw=2, density=True)\n ax.axvline(n, c='r', ls='--')\n ax.text(0.83, 0.9, f'{label}{100 * f[n - 1]:.1f}%',\n horizontalalignment='right', color='r', fontsize=14,\n transform=ax.transAxes)\n plt.tight_layout()", "def plot_decision_regions(X, y, clf,\n feature_index=None,\n filler_feature_values=None,\n filler_feature_ranges=None,\n ax=None,\n X_highlight=None,\n res=0.02, legend=1,\n hide_spines=True,\n markers='s^oxv<>',\n colors='red,blue,limegreen,gray,cyan'):\n\n check_Xy(X, y, y_int=True) # Validate X and y arrays\n dim = X.shape[1]\n\n if ax is None:\n ax = plt.gca()\n\n if isinstance(res, float):\n xres, yres = res, res\n else:\n try:\n xres, yres = res\n except ValueError:\n raise ValueError('Unable to unpack res. Expecting '\n 'array-like input of length 2.')\n plot_testdata = True\n if not isinstance(X_highlight, np.ndarray):\n if X_highlight is not None:\n raise ValueError('X_highlight must be a NumPy array or None')\n else:\n plot_testdata = False\n elif len(X_highlight.shape) < 2:\n raise ValueError('X_highlight must be a 2D array')\n\n if feature_index is not None:\n # Unpack and validate the feature_index values\n if dim == 1:\n raise ValueError(\n 'feature_index requires more than one training feature')\n try:\n x_index, y_index = feature_index\n except ValueError:\n raise ValueError(\n 'Unable to unpack feature_index. Make sure feature_index '\n 'only has two dimensions.')\n try:\n X[:, x_index], X[:, y_index]\n except IndexError:\n raise IndexError(\n 'feature_index values out of range. X.shape is {}, but '\n 'feature_index is {}'.format(X.shape, feature_index))\n else:\n feature_index = (0, 1)\n x_index, y_index = feature_index\n\n # Extra input validation for higher number of training features\n if dim > 2:\n if filler_feature_values is None:\n raise ValueError('Filler values must be provided when '\n 'X has more than 2 training features.')\n\n if filler_feature_ranges is not None:\n if not set(filler_feature_values) == set(filler_feature_ranges):\n raise ValueError(\n 'filler_feature_values and filler_feature_ranges must '\n 'have the same keys')\n\n # Check that all columns in X are accounted for\n column_check = np.zeros(dim, dtype=bool)\n for idx in filler_feature_values:\n column_check[idx] = True\n for idx in feature_index:\n column_check[idx] = True\n if not all(column_check):\n missing_cols = np.argwhere(~column_check).flatten()\n raise ValueError(\n 'Column(s) {} need to be accounted for in either '\n 'feature_index or filler_feature_values'.format(missing_cols))\n\n marker_gen = cycle(list(markers))\n\n n_classes = np.unique(y).shape[0]\n colors = colors.split(',')\n\n colors_gen = cycle(colors)\n colors = [next(colors_gen) for c in range(n_classes)]\n\n # Get minimum and maximum\n x_min, x_max = X[:, x_index].min() - 1, X[:, x_index].max() + 1\n if dim == 1:\n y_min, y_max = -1, 1\n else:\n y_min, y_max = X[:, y_index].min() - 1, X[:, y_index].max() + 1\n\n xx, yy = np.meshgrid(np.arange(x_min, x_max, xres),\n np.arange(y_min, y_max, yres))\n\n if dim == 1:\n X_predict = np.array([xx.ravel()]).T\n else:\n X_grid = np.array([xx.ravel(), yy.ravel()]).T\n X_predict = np.zeros((X_grid.shape[0], dim))\n X_predict[:, x_index] = X_grid[:, 0]\n X_predict[:, y_index] = X_grid[:, 1]\n if dim > 2:\n for feature_idx in filler_feature_values:\n X_predict[:, feature_idx] = filler_feature_values[feature_idx]\n Z = clf.predict(X_predict)\n Z = Z.reshape(xx.shape)\n # Plot decisoin region\n ax.contourf(xx, yy, Z,\n alpha=0.3,\n colors=colors,\n levels=np.arange(Z.max() + 2) - 0.5)\n\n # ax.axis(xmin=xx.min(), xmax=xx.max(), y_min=yy.min(), y_max=yy.max())\n ax.axis(xmin=xx.min(), xmax=xx.max())\n\n # Scatter training data samples\n for idx, c in enumerate(np.unique(y)):\n if dim == 1:\n y_data = [0 for i in X[y == c]]\n x_data = X[y == c]\n elif dim == 2:\n y_data = X[y == c, y_index]\n x_data = X[y == c, x_index]\n elif dim > 2 and filler_feature_ranges is not None:\n class_mask = y == c\n feature_range_mask = get_feature_range_mask(\n X, filler_feature_values=filler_feature_values,\n filler_feature_ranges=filler_feature_ranges)\n y_data = X[class_mask & feature_range_mask, y_index]\n x_data = X[class_mask & feature_range_mask, x_index]\n else:\n continue\n\n ax.scatter(x=x_data,\n y=y_data,\n alpha=0.8,\n c=colors[idx],\n marker=next(marker_gen),\n edgecolor='black',\n label=c)\n\n if hide_spines:\n ax.spines['right'].set_visible(False)\n ax.spines['top'].set_visible(False)\n ax.spines['left'].set_visible(False)\n ax.spines['bottom'].set_visible(False)\n ax.yaxis.set_ticks_position('left')\n ax.xaxis.set_ticks_position('bottom')\n if dim == 1:\n ax.axes.get_yaxis().set_ticks([])\n\n if legend:\n if dim > 2 and filler_feature_ranges is None:\n pass\n else:\n handles, labels = ax.get_legend_handles_labels()\n ax.legend(handles, labels,\n framealpha=0.3, scatterpoints=1, loc=legend)\n\n if plot_testdata:\n if dim == 1:\n x_data = X_highlight\n y_data = [0 for i in X_highlight]\n elif dim == 2:\n x_data = X_highlight[:, x_index]\n y_data = X_highlight[:, y_index]\n else:\n feature_range_mask = get_feature_range_mask(\n X_highlight, filler_feature_values=filler_feature_values,\n filler_feature_ranges=filler_feature_ranges)\n y_data = X_highlight[feature_range_mask, y_index]\n x_data = X_highlight[feature_range_mask, x_index]\n\n ax.scatter(x_data,\n y_data,\n c='',\n edgecolor='black',\n alpha=1.0,\n linewidths=1,\n marker='o',\n s=80)\n\n return ax", "def plot_svc_decision_function(clf, ax=None):\n plot_decision_function(clf.decision_function, [-1, 0, 1], ax)", "def plot_compare_train_test(decisions,bins,classifier, ws=None):\n low = min(np.min(d) for d in decisions)\n high = max(np.max(d) for d in decisions)\n low_high = (low,high)\n # Plot with python.\n plt.figure()\n plt.hist(decisions[0], color='b', alpha=0.5, range=low_high, bins=bins, histtype='stepfilled', density=True, label='S (train)', weights=ws[0])\n plt.hist(decisions[1], color='r', alpha=0.5, range=low_high, bins=bins, histtype='stepfilled', density=True, label='B (train)', weights=ws[1])\n hist, bins = np.histogram(decisions[2], bins=bins, range=low_high, density=True, weights=ws[2])\n center = (bins[:-1] + bins[1:]) / 2\n #scale = len(decisions[2]) / sum(hist)\n scale = sum(ws[2]) / sum(hist)\n err = np.sqrt(hist * scale) / scale\n plt.errorbar(center, hist, yerr=err, fmt='o', c='b', label='S (test)')\n hist, bins = np.histogram(decisions[3], bins=bins, range=low_high, density=True, weights=ws[3])\n #scale = len(decisions[3]) / sum(hist)\n scale = sum(ws[3]) / sum(hist)\n err = np.sqrt(hist * scale) / scale\n plt.errorbar(center, hist, yerr=err, fmt='o', c='r', label='B (test)')\n plt.xticks(np.arange(0, 1, step=0.1))\n plt.xlabel(\"Classifier output\")\n plt.ylabel(\"Arbitrary units\")\n plt.legend(loc='best')\n plt.savefig('plots/plt_' + classifier+'_Output.pdf',format='pdf')\n plt.show(block = False)\n return None", "def plot(self):\n cs = plt.contour(self.X, self.Y, self.fitness_function)\n plt.clabel(cs, inline=1, fontsize=6)\n plt.imshow(self.fitness_function, extent=self.limits, origin=\"lower\", alpha=0.3)", "def _visualize(self, unnorm_image, class_ids, scores, bounding_boxes):\n ax = utils.viz.plot_bbox(unnorm_image,\n bounding_boxes[0],\n scores[0],\n class_ids[0],\n class_names=self._network.classes)\n fig = plt.gcf()\n fig.set_size_inches(14, 14)\n plt.show()", "def figure_10_12_b():\n xs = np.arange(-6,6,0.1)\n plt.plot(xs,sigmoid(xs))\n x=2.5\n plt.scatter(x,sigmoid(x))\n plt.plot(xs,logistic_lower_bound(xs,x))\n plt.show()", "def vis_detections(im, class_name, dets,ax, thresh=0.5):\n inds = np.where(dets[:, -1] >= thresh)[0]\n if len(inds) == 0:\n return\n\n for i in inds:\n bbox = dets[i, :4]\n score = dets[i, -1]\n\n ax.add_patch(\n plt.Rectangle((bbox[0], bbox[1]),\n bbox[2] - bbox[0],\n bbox[3] - bbox[1], fill=False,\n edgecolor='red', linewidth=3.5)\n )\n ax.text(bbox[0], bbox[1] - 2,\n '{:s} {:.3f}'.format(class_name, score),\n bbox=dict(facecolor='blue', alpha=0.5),\n fontsize=14, color='white')\n\n ax.set_title(('{} detections with '\n 'p({} | box) >= {:.1f}').format(class_name, class_name,\n thresh),\n fontsize=14)\n plt.axis('off')\n plt.tight_layout()\n plt.draw()", "def map_objects_classifier_evaluation(self):\n df = self.results[(self.results['iou'] > 0.7)]\n y_true = df['true_class']\n y_pred = df['pred_class']\n print(classification_report(y_true, y_pred))\n matrix = confusion_matrix(y_true, y_pred)\n matrix = matrix.astype('float') / matrix.sum(axis=1)[:, np.newaxis]\n import seaborn as sns\n\n plt.figure(figsize=(10, 7))\n sns.set(font_scale=2.4)\n sns.heatmap(matrix, annot=True, annot_kws={'size': 25},\n cmap=plt.cm.Reds)\n # Add labels to the plot\n class_names = ['background', 'building', 'water']\n tick_marks = np.arange(len(class_names))\n tick_marks2 = tick_marks + 0.28\n tick_marks2[0] = tick_marks2[0] - 0.2\n tick_marks = tick_marks + 0.5\n plt.xticks(tick_marks, class_names, rotation=0)\n plt.yticks(tick_marks2, class_names, rotation=90)\n plt.xlabel('Predicted label', labelpad=13)\n plt.ylabel('True label', labelpad=13)\n plt.show()", "def Plot_predict(X,Y,model,X_path): \n labels = {0: 'CNV', 1: 'DME', 2: 'DRUSEN', 3: 'NORMAL'}\n Y_pred_classes = np.argmax(model.predict(X),axis = 1) \n Y_true = np.argmax(Y,axis = 1)\n \n fig = plt.figure(figsize=(40, 40)) \n for i in range(X.shape[0]):\n ax = fig.add_subplot(8, 4, i + 1, xticks=[], yticks=[])\n ax.set_title(\"Groundtruth : {} \\n Prediction : {}\".format(labels[Y_true[i]],labels[Y_pred_classes[i]]), \\\n color=(\"green\" if Y_true[i] == Y_pred_classes[i] else \"red\"),fontsize=20) \n img = image.load_img(X_path[i])\n ax.imshow(img)\n plt.show()\n return", "def decision_plot(self, X, y):\n\n y = self._slice_target_index(y=y)\n\n for index in range(_n_targets(y)):\n if sklearn.utils.multiclass.type_of_target(y) == 'continuous-multioutput':\n self.fit(X, y.iloc[:, index].values.ravel(order='K'))\n else:\n self.fit(X, y)\n explainer, shap_values = self.explainer(X=X)\n shap.decision_plot(base_value=explainer.expected_value, shap_values=shap_values,\n feature_names=list(X.columns), show=self.show)", "def plot(self):\n\t\traw_labels = self.make_raw_data()[1]\n\t\tbalanced_labels = self.get_extra()[1]\n\t\tfig, ax1 = subplots()\n\t\tax2 = ax1.twinx()\n\t\tx = array(range(1, NCLASSES + 1))\n\t\tl1 = ax1.bar(x - 0.3, self.prior_sizes, width = 0.25, color = 'b', align = 'center', label = 'train')\n\t\tl2 = ax2.bar(x, bincount(raw_labels - 1), width = 0.25, color = 'r', align = 'center', label = 'confident')\n\t\tl3 = ax2.bar(x + 0.3, bincount(balanced_labels - 1), width = 0.25, color = 'g', align = 'center', label = 'rebalanced')\n\t\tconfident_frac = len(raw_labels) / float(self.predictions.shape[0])\n\t\tusable_frac = len(balanced_labels) / float(self.predictions.shape[0])\n\t\tax1.set_title('at >{0:.1f}%, {1:.1f}% reliable, {2:.1f}% usable'.format(self.confidence * 100, confident_frac * 100, usable_frac * 100))\n\t\tax1.legend([l1, l2, l3], [l1.get_label(), l2.get_label(), l3.get_label()], loc = 'upper right')\n\t\tax1.set_xticks(x)", "def plot_boundary(X, y, resolution=100, n_neighbors=1):\n \n xmin, xmax, ymin, ymax = np.min(X[:,0]), np.max(X[:,0]), np.min(X[:,1]), np.max(X[:,1])\n \n xs, ys = np.linspace(xmin-0.1, xmax+0.1, num=resolution), np.linspace(ymin-0.1, ymax+0.1, num=resolution)\n xgrid, ygrid = np.meshgrid(xs, ys)\n \n \n clf = KNN(n_neighbors=n_neighbors)\n clf.fit(X, y)\n \n Xpred = np.stack((xgrid.flatten(), ygrid.flatten()), axis=1)\n ypred = clf.predict(Xpred)\n ypred = ypred.reshape((resolution, resolution))\n \n ind1 = np.where(ypred[:-1,:] != ypred[1:,:])\n ind2 = np.where(ypred[:,:-1] != ypred[:,1:])\n \n xret = np.concatenate((xgrid[ind1].flatten(), xgrid[ind2].flatten()))\n yret = np.concatenate((ygrid[ind1].flatten(), ygrid[ind2].flatten()))\n \n return xret, yret", "def vis_detections(im, class_name, dets, thresh=0.5):\n im = im[:, :, (2, 1, 0)]\n fig, ax = plt.subplots(figsize=(12, 12))\n ax.imshow(im, aspect='equal')\n for i, det in enumerate(dets):\n bbox = dets[i, :4]\n score = dets[i, -1]\n\n ax.add_patch(\n plt.Rectangle((bbox[0], bbox[1]),\n bbox[2] - bbox[0],\n bbox[3] - bbox[1], fill=False,\n edgecolor='red', linewidth=3.5)\n )\n ax.text(bbox[0], bbox[1] - 2,\n '{:s} {:.3f}'.format(class_name, score),\n bbox=dict(facecolor='blue', alpha=0.5),\n fontsize=14, color='white')\n\n ax.set_title(('{} detections with '\n 'p({} | box) >= {:.1f}').format(class_name, class_name,\n thresh),\n fontsize=14)\n plt.axis('off')\n plt.tight_layout()\n plt.draw()", "def plot_probability_distribution(\n y_true, y_pred_proba, threshold, class_labels=[0, 1]):\n\n _y = pd.concat([y_true, y_pred_proba], axis=1)\n\n sns.kdeplot(\n _y[_y.iloc[:, 0] == 1].iloc[:, 1],\n shade=True, label=class_labels[1], linewidth=3, alpha=0.7)\n sns.kdeplot(\n _y[_y.iloc[:, 0] == 0].iloc[:, 1],\n shade=True, label=class_labels[0], linewidth=3, alpha=0.7)\n\n plt.plot( # threshold line\n [threshold, threshold],\n [plt.ylim()[0], plt.ylim()[1]],\n 'r--', linewidth=3,\n alpha=0.3, label='threshold={}'.format(threshold))\n\n plt.xlim(0, 1)\n plt.title(\"Class probability distribution\")\n plt.xlabel('Probability')\n plt.ylabel('Density')\n plt.legend(loc='upper center')\n score = accuracy_score(y_true, y_pred_proba, threshold)\n plt.text(\n 0.05, 0.2,\n \"Score={:.3f}\".format(score),\n bbox=dict(boxstyle=\"round\", fc=\"w\", ec=\"0.5\", alpha=0.9))", "def visualize_boundary_linear(X, y, model):\n\n # w = model.w\n # b = model.b\n w = model.coef_[0]\n b = model.intercept_[0]\n xp = np.linspace(np.min(X[:, 0]), np.max(X[:, 0]), 100)\n yp_left = np.array([w[0]]) * np.array([xp]).T + b\n # yp_right = np.array([w[1]])\n yp_right = np.array([[w[1]]])\n yp = -(np.linalg.solve(yp_right.T, yp_left.T).T)\n\n pos = np.where(y == 1)\n neg = np.where(y == 0)\n\n plt.plot(X[pos[0], 0], X[pos[0], 1], 'k+', markersize=7, linewidth=1)\n plt.plot(X[neg[0], 0], X[neg[0], 1], 'yo', markerfacecolor='y', markersize=7)\n\n plt.plot(xp, yp, 'b-')\n plt.show()", "def plot(self):\n plt.imshow(self.cm, interpolation='nearest', cmap=self.cmap)\n plt.title(self.title)\n plt.colorbar()\n tick_marks = np.arange(len(self.classes))\n plt.xticks(tick_marks, self.classes, rotation=45)\n plt.yticks(tick_marks, self.classes)\n \n if self.normalize:\n self.cm = self.cm.astype('float') / self.cm.sum(axis=1)[:, np.newaxis]\n print(\"Normalized confusion matrix\")\n else:\n print('Confusion matrix, without normalization')\n \n print(self.cm)\n \n thresh = self.cm.max() / 2.\n for i, j in itertools.product(range(self.cm.shape[0]), range(self.cm.shape[1])):\n plt.text(j, i, self.cm[i, j], horizontalalignment=\"center\", color=\"white\" if self.cm[i, j] > thresh else \"black\")\n plt.tight_layout()\n plt.ylabel('True Label')\n plt.xlabel('Predicted label')", "def plot_svc_decision_function(clf, ax=None, plot_support=True):\n if ax is None:\n ax = plt.gca()\n xlim = ax.get_xlim()\n ylim = ax.get_ylim()\n\n # create grid to evaluate model\n x = np.linspace(xlim[0], xlim[1], 30)\n y = np.linspace(ylim[0], ylim[1], 30)\n Y, X = np.meshgrid(y, x)\n xy = np.vstack([X.ravel(), Y.ravel()]).T\n P = clf.decision_function(xy).reshape(X.shape)\n\n # plot decision boundary and margins\n ax.contour(X, Y, P, colors='k',\n levels=[-1, 0, 1], alpha=0.5,\n linestyles=['--', '-', '--'])\n\n # plot support vectors\n if plot_support:\n ax.scatter(clf.support_vectors_[:, 0],\n clf.support_vectors_[:, 1],\n s=300, linewidth=1, facecolors='none')\n ax.set_xlim(xlim)\n ax.set_ylim(ylim)", "def visualize(self):\n\n check_is_fitted(self, \"sm_\")\n\n fig = plt.figure(figsize=(6, 4))\n inner = gridspec.GridSpec(2, 1, hspace=0.1, height_ratios=[6, 1])\n ax1_main = plt.Subplot(fig, inner[0]) \n xgrid = np.linspace(self.xmin, self.xmax, 100).reshape([-1, 1])\n ygrid = self.decision_function(xgrid)\n ax1_main.plot(xgrid, ygrid)\n ax1_main.set_xticklabels([])\n ax1_main.set_title(\"Shape Function\", fontsize=12)\n fig.add_subplot(ax1_main)\n \n ax1_density = plt.Subplot(fig, inner[1]) \n xint = ((np.array(self.bins_[1:]) + np.array(self.bins_[:-1])) / 2).reshape([-1, 1]).reshape([-1])\n ax1_density.bar(xint, self.density_, width=xint[1] - xint[0])\n ax1_main.get_shared_x_axes().join(ax1_main, ax1_density)\n ax1_density.set_yticklabels([])\n ax1_density.autoscale()\n fig.add_subplot(ax1_density)\n plt.show()", "def __show_precision_recall_threshold_graph(model, X, y):\n y_scores = cross_val_predict(model, X, y, cv=3, method='decision_function')\n precisions, recalls, thresholds = precision_recall_curve(y, y_scores)\n plt.plot(thresholds, precisions[:-1], \"b--\", label=\"Precision\")\n plt.plot(thresholds, recalls[:-1], \"g-\", label=\"Recall\")\n plt.xlabel(\"Threshold\")\n plt.legend(loc=\"center left\")\n plt.xlim([-800, 800])\n plt.ylim([0, 1])\n plt.show()", "def plot_model_curves(class_name, model, range_metrics, ax):\n def plot_axis(ax, data, color):\n \"\"\"\n Plot data on axis in certain color\n \"\"\"\n x_indices = [0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]\n ax.scatter(x_indices, data, color=color, s=4)\n ax.plot(x_indices, data, color=color, linewidth=2)\n ax.set_yticks([]) # same for y ticks\n ax.set_ylim([0, 1])\n # Get balanced purities\n preds = np.concatenate(model.results)\n if model.name == \"Binary Classifiers\":\n purities = get_binary_balanced_purity_ranges(\n preds, model.class_labels, 0.1, model.class_counts)[class_name]\n else:\n purities = get_balanced_purity_ranges(\n preds, model.class_labels, 0.1, model.class_counts)[class_name]\n\n # Get completenesses\n comps = get_completeness_ranges(model.class_counts, range_metrics, class_name)\n\n print(\"\\n\\n Model: \" + str(model.name) + \", class: \" + class_name)\n print(\"Completeness\")\n print(comps)\n print(\"Purity\")\n print(purities)\n\n plot_axis(ax, comps, C_BAR_COLOR)\n ax2 = ax.twinx() # instantiate a second axes that shares the same x-axis\n ax2.set_ylim([0, 1])\n plot_axis(ax2, purities, P_BAR_COLOR)\n for axis in ['top', 'bottom', 'left', 'right']:\n ax.spines[axis].set_linewidth(1.5)\n return ax2", "def plot(self):\n h = .02\n i=1\n bags_X = self.bags_X\n bags_y = self.bags_y\n fig1 = plt.figure(figsize=(45, 9))\n\n \n cm = plt.cm.RdBu\n cm_bright = ListedColormap(['#FF0000', '#0000FF'])\n \n for model in self.models:\n ax = plt.subplot(1, len(self.models) , i)\n X = pd.DataFrame(bags_X[i-1])\n y = pd.Series(bags_y[i-1])\n x_min, x_max = X[X.columns[0]].min() - .5, X[X.columns[0]].max() + .5\n y_min, y_max = X[X.columns[1]].min() - .5, X[X.columns[1]].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n Z = np.array(model.predict(pd.DataFrame(np.c_[xx.ravel(), yy.ravel()], columns=X.columns)))\n # print(Z[12])\n Z = Z.reshape(xx.shape)\n ax.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n ax.scatter(X[X.columns[0]], X[X.columns[1]], c=y, cmap=cm_bright, edgecolors='k')\n # size=[1000*w for w in self.weights[i-1]]\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n ax.set_xlabel(str(X.columns[0]))\n ax.set_ylabel(str(X.columns[1]))\n plt.title(\"Estimator \"+str(i))\n i+=1\n \n fig2 = plt.figure(figsize=(9,9))\n X = self.X\n y = self.y\n ax2 = plt.subplot(1,1,1)\n x_min, x_max = X[X.columns[0]].min() - .5, X[X.columns[0]].max() + .5\n y_min, y_max = X[X.columns[1]].min() - .5, X[X.columns[1]].max() + .5\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n Z = np.array(self.predict(pd.DataFrame(np.c_[xx.ravel(), yy.ravel()], columns=X.columns)))\n Z = Z.reshape(xx.shape)\n ax2.contourf(xx, yy, Z, cmap=cm, alpha=.8)\n # size=[1000*w for w in self.weights[i-2]]\n ax2.scatter(X[X.columns[0]], X[X.columns[1]], c=y, cmap=cm_bright, edgecolors='k')\n ax2.set_xlim(xx.min(), xx.max())\n ax2.set_ylim(yy.min(), yy.max())\n plt.title(\"Combined Decision Surface\")\n \n plt.tight_layout()\n plt.show()\n\n return [fig1,fig2]", "def plot(self, x, y, weights=None, title='Linear Classification'):\n if np.array(x).size == 0:\n return\n \n # Process data, sorting by label\n possibleLabels = list(set(y))\n sortedX1 = {}\n sortedX2 = {}\n for label in possibleLabels:\n sortedX1[label] = []\n sortedX2[label] = []\n \n for i in range(len(x)):\n sortedX1[y[i]].append(x[i][0])\n sortedX2[y[i]].append(x[i][1])\n \n x1min = float(\"inf\")\n x1max = float(\"-inf\")\n for x1Values in sortedX1.values():\n x1min = min(min(x1Values), x1min)\n x1max = max(max(x1Values), x1max)\n x2min = float(\"inf\")\n x2max = float(\"-inf\")\n for x2Values in sortedX2.values():\n x2min = min(min(x2Values), x2min)\n x2max = max(max(x2Values), x2max)\n\n x1min = int(math.floor(x1min))\n x1max = int(math.ceil(x1max))\n x2min = int(math.floor(x2min))\n x2max = int(math.ceil(x2max))\n\n width = x1max-x1min+3\n height = x2max-x2min+3\n self.initPlot(x1min, x2min, width, height)\n \n gameState = self.blankGameState.deepCopy()\n \n gameState.agentStates = []\n \n # Add ghost/pacman at each point\n for (labelIndex, label) in enumerate(possibleLabels):\n pointsX1 = sortedX1[label]\n pointsX2 = sortedX2[label]\n for (px, py) in zip(pointsX1, pointsX2):\n point = (px+self.xShift, py+self.yShift)\n agent = AgentState( Configuration( point, Directions.STOP), False)\n agent.isPacman = (labelIndex==0) \n if labelIndex==2:\n agent.scaredTimer = 1\n gameState.agentStates.append(agent)\n\n# self.initialize(gameState)\n graphicsUtils.clear_screen()\n self.infoPane = InfoPane(gameState.layout, self.gridSize)\n self.drawStaticObjects(gameState)\n self.drawAgentObjects(gameState)\n\n graphicsUtils.changeText(self.infoPane.scoreText, title)\n graphicsUtils.refresh()\n graphicsUtils.sleep(1)\n\n if weights is not None:\n self.setWeights(weights)", "def plot_proba_function(clf, ax=None):\n fn = lambda x: clf.predict_proba(x)[0][0]\n plot_decision_function(fn, [0, 0.5, 1], ax)", "def plotThresholds (df, attack_df): \n global episod_limit\n \n ret = getThresholds (df, attack_df)\n thresholds = ret[0]\n rewards = ret[1]\n rewards_constant = ret[2]\n\n plt.plot(np.arange (0, episod_limit + 2, 1), thresholds, marker = 'None',\n linestyle = '-', color = 'k', label = 'Threshold')\n plt.xlabel ('Time')\n plt.ylabel ('Threshold')\n plt.grid ()\n plt.legend (loc='best')\n plt.savefig (\"figures/threshold.png\")\n plt.close ()\n return (rewards, rewards_constant, thresholds)", "def plot_decision_contour(pred_func, X, y, labels, targetdir = '.', matrix = 'numpy', reso=50, npoints=400):\n \n print(__name__ + '.plot_decision_contour ...')\n MAXP = min(npoints, X.shape[0])\n D = X.shape[1]\n pad = 0.5\n\n for dim1 in tqdm(range(D)) :\n x_min, x_max = X[:, dim1].min() - pad, X[:, dim1].max() + pad\n for dim2 in range(D) :\n if dim2 <= dim1 :\n continue\n\n # (x,y)-plane limits\n y_min, y_max = X[:, dim2].min() - pad, X[:, dim2].max() + pad\n\n # Grid points\n PX,PY = np.meshgrid(np.linspace(x_min, x_max, reso), np.linspace(y_min, y_max, reso))\n \n # Function values through 'pred_func' lambda \n Z = np.zeros((reso*reso, D))\n Z[:, dim1] = PX.ravel()\n Z[:, dim2] = PY.ravel()\n\n signalclass = 1\n if (matrix == 'torch'):\n Z = pred_func(torch.tensor(Z, dtype=torch.float32))\n Z = Z[:, signalclass].detach().numpy() # 2 output units\n if (matrix == 'numpy'):\n Z = pred_func(Z)\n if (matrix == 'xgboost'):\n Z = pred_func(xgboost.DMatrix(data = Z))\n\n Z = Z.reshape(PX.shape)\n fig, axs = plt.subplots()\n\n # Contour\n cs = plt.contourf(PX, PY, Z, cmap = plt.cm.Spectral)\n\n # Samples as dots\n plt.scatter(X[0:MAXP, dim1], X[0:MAXP, dim2], c = y[0:MAXP], cmap = plt.cm.binary)\n\n plt.xlabel('X[%d]' % dim1 + ' (%s)' % labels[dim1])\n plt.ylabel('X[%d]' % dim2 + ' (%s)' % labels[dim2])\n plt.colorbar(cs, ticks = np.linspace(0.0, 1.0, 11))\n \n plt.savefig(targetdir + str(dim1) + \"_\" + str(dim2) + \".pdf\", bbox_inches='tight')\n plt.close()", "def plot(self) :\r\n pos = np.nonzero(self.y > 0) # matlab: find(y > 0)\r\n neg = np.nonzero(self.y < 0) # matlab: find(y < 0)\r\n plt.plot(self.X[pos,0], self.X[pos,1], 'b+', markersize=5)\r\n plt.plot(self.X[neg,0], self.X[neg,1], 'ro', markersize=5)\r\n plt.show()", "def plot_balancer_results_per_classifier(data_balancer_results_per_classifier, parameter=(2, \"Balanced Accuracy\")):\n classifier_arr = []\n color = iter(cm.Set1(np.linspace(0, 1, len(data_balancer_results_per_classifier) + 1)))\n mean_classifier_arr = [0] * len(data_balancer_results_per_classifier[0][1])\n for (classifier_name, data_balancer_results) in data_balancer_results_per_classifier:\n individual_data_balance_plot = []\n x = 0\n for (data_balancer_name, result_arr) in data_balancer_results:\n individual_data_balance_plot.append(result_arr[parameter[0]]) # Average True rate\n mean_classifier_arr[x] += result_arr[parameter[0]]\n x += 1\n classifier_arr.append(individual_data_balance_plot)\n\n classifier_arr.append([value / float(len(data_balancer_results_per_classifier)) for value in mean_classifier_arr])\n\n fig = plt.figure(figsize=(12, 10))\n\n classifiers = np.arange(len(classifier_arr))\n data_balancers = np.arange(len(classifier_arr[0])) * 3\n bar_width = 0.2\n opacity = 0.9\n\n for i in range(len(classifier_arr)):\n if i + 1 != len(classifier_arr):\n label = data_balancer_results_per_classifier[i][0]\n else:\n label = \"Mean classification\"\n\n plt.bar(data_balancers + (i * bar_width), classifier_arr[i], bar_width,\n alpha=opacity,\n color=color.next(),\n label=label)\n\n plt.locator_params(axis='y', nbins=10)\n plt.xlabel(\"Data balance algorithm\")\n plt.ylabel(parameter[1])\n plt.legend(loc=\"lower right\", fancybox=True, frameon=True)\n plt.title(\"{0} per data balance algorithm\".format(parameter[1]))\n plt.ylim([0.0, 1.00])\n data_balance_labels = [filter(str.isupper, data_balance_name) if data_balance_name != \"None\" and len(filter(str.isupper, data_balance_name)) < 6 else data_balance_name for\n (data_balance_name, _) in data_balancer_results_per_classifier[0][1]]\n plt.xticks(data_balancers + (bar_width / 2) * len(classifiers), data_balance_labels)\n\n current_time = datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n plt.savefig(os.path.dirname(os.path.realpath(__file__)) + \"/../results/data_balancer_results_per_classifier_plot_{0}_{1}.png\".format(parameter[1], current_time))\n plt.close(fig)", "def validate_and_plot_one_class(normal,outliers):\n print \"running a validation\"\n \n #classifier\n \n train_data = genfromtxt(normal, delimiter='\\t',skip_header=0)\n test_data = genfromtxt(outliers, delimiter='\\t',skip_header=0)\n\n outliers_proportion = float(test_data.shape[0])/(float(train_data.shape[0])+float(test_data.shape[0]))\n outliers_proportion=0.01\n\n clf = OneClassSVMClassifier(normal,outliers_proportion,0.95,0.05)\n #processing data without targets\n X_train = clf.scale(train_data)\n X_test = clf.scale(test_data)\n\n y_pred_train = clf.predict(X_train)\n y_pred_test = clf.predict(X_test)\n n_error_train = y_pred_train[y_pred_train == 1].size\n n_error_test = y_pred_test[y_pred_test == -1].size\n \n X=np.vstack((X_train,X_test))\n y_score = clf.predict(X)\n\n Y1=np.ones((X_train.shape[0],))\n Y2=(np.ones((X_test.shape[0],))*-1)\n y_true=np.concatenate((Y1,Y2))\n \n # Compute ROC curve and area the curve\n fpr, tpr, thresholds = metrics.roc_curve(y_true,y_score)\n roc_auc = metrics.auc(fpr, tpr)\n print \"Area under the ROC curve : %f\" % roc_auc\n \n # Plot ROC curve\n\n fig = matplotlib.pyplot.gcf()\n fig.set_size_inches(6.5,6.5)\n #font = {'family' : 'sans-serif',\n # 'weight' : 'normal',\n # 'size' : 18}\n\n #matplotlib.rc('font', **font)\n pl.clf()\n #modify plot area\n #modify tick label font size\n ax = pl.gca() # get the current axes\n for l in ax.get_xticklabels() + ax.get_yticklabels():\n l.set_fontsize('x-large')\n\n #color from html code table, linewidth x2.0\n pl.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc,color=\"#00CC66\",linewidth=3.0)\n# pl.plot(fpr_rbf, tpr_rbf, label='RBF: ROC curve (area = %0.2f)' % roc_auc_rbf,color=\"#009900\",linewidth=3.0)\n# pl.plot(fpr_sigmoid, tpr_sigmoid, label='Sigmoid: ROC curve (area = %0.2f)' % roc_auc_sigmoid,color='0.55',linewidth=2.0)\n# pl.plot(fpr_linear, tpr_linear, label='Linear: ROC curve (area = %0.2f)' % roc_auc_linear,color='0.70',linewidth=2.0)\n# pl.plot(fpr_poly, tpr_poly, label='Poly: ROC curve (area = %0.2f)' % roc_auc_poly,color='0.85',linewidth=2.0)\n #for more lines with different kernels\n #default_kernel=\"sigmoid\";clf = hermes.OneClassSVMClassifier(train,0.00,0.0,0.0595,default_kernel);y_score_sigmoid = clf.predict(X);fpr_sigmoid, tpr_sigmoid, thresholds_sigmoid = metrics.roc_curve(y_true,y_score_sigmoid);roc_auc_sigmoid = metrics.auc(fpr_sigmoid, tpr_sigmoid);fpr_sigmoid;tpr_sigmoid;roc_auc_sigmoid\n #pl.plot(fpr_rbf, tpr_rbf, label='RBF: ROC curve (area = %0.2f)' % roc_auc_rbf,color=\"#00CC66\",linewidth=2.0)\n #pl.plot(fpr_sigmoid, tpr_sigmoid, label='Sigmoid: ROC curve (area = %0.2f)' % roc_auc_sigmoid,color='0.55',linewidth=2.0)\n #pl.plot(fpr_linear, tpr_linear, label='Linear: ROC curve (area = %0.2f)' % roc_auc_linear,color='0.70',linewidth=2.0)\n #pl.plot(fpr_poly, tpr_poly, label='Poly: ROC curve (area = %0.2f)' % roc_auc_poly,color='0.85',linewidth=2.0)\n pl.plot([0, 1], [0, 1], 'k--')\n #modify base font size\n pl.xlim([0.0, 1.0])\n pl.ylim([0.0, 1.0])\n pl.xlabel('False positive rate',fontsize='xx-large')\n pl.ylabel('True positive rate',fontsize='xx-large')\n #pl.title('Receiver operating characteristic example')\n pl.legend(loc=\"lower right\",prop={'size':18})\n pl.show()\n sys.exit(0)\n ##the code here below was used to create the ROC curve for europar 13", "def visualize(self, reduced_data):\n\t\t# Step size of the mesh. Decrease to increase the quality of the VQ.\n\t\th = .02 # point in the mesh [x_min, m_max]x[y_min, y_max].\n\t\t\n\t\t# Plot the decision boundary. For that, we will assign a color to each\n\t\tx_min, x_max = reduced_data[:, 0].min() + 1, reduced_data[:, 0].max() - 1\n\t\ty_min, y_max = reduced_data[:, 1].min() + 1, reduced_data[:, 1].max() - 1\n\t\txx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n\n\t\t# Obtain labels for each point in mesh. Use last trained model.\n\t\tZ = self.estimator.predict(np.c_[xx.ravel(), yy.ravel()])\n\n\t\t# Put the result into a color plot\n\t\tZ = Z.reshape(xx.shape)\n\t\t\n\t\tplt.figure(1)\n\t\tplt.clf()\n\t\tplt.imshow(Z, interpolation='nearest',\n\t\t extent=(xx.min(), xx.max(), yy.min(), yy.max()),\n\t\t cmap=plt.cm.Paired,\n\t\t aspect='auto', origin='lower')\n\n\t\tplt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=4)\n\t\t# Plot the centroids as a white X\n\t\tcentroids = self.estimator.cluster_centers_\n\t\tplt.scatter(centroids[:, 0], centroids[:, 1],\n\t\t marker='x', s=169, linewidths=3,\n\t\t color='w', zorder=10)\n\t\tplt.title('K-means clustering with random data (PCA-reduced data)\\n'\n\t\t 'Centroids are marked with white cross')\n\t\tplt.xlim(x_min, x_max)\n\t\tplt.ylim(y_min, y_max)\n\t\tplt.xticks(())\n\t\tplt.yticks(())\n\t\tplt.show()", "def visualise_dataset_balancer_results(results, range=(-0.5, 0.5),\n colors=(\"#64B3DE\", \"#1f78b4\", \"#B9B914\", \"#FBAC44\", \"#bc1659\", \"#33a02c\", \"grey\", \"#b15928\", \"#6a3d9a\", \"#e31a1c\", \"#6ABF20\", \"#ff7f00\", \"#6a3d9a\"),\n exclude=(\"SVM (linear)\", \"Logistic regression\", \"Random forest\")):\n current_time = datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n file_name = \"raw_dump_{0}.txt\".format(current_time)\n with open(os.path.dirname(os.path.realpath(__file__)) + \"/../results/\" + file_name, \"wb\") as output_file:\n output_file.write(str(results))\n sns.set(style='ticks')\n fig = plt.figure(figsize=(10, 8))\n ax = fig.add_subplot(1, 1, 1)\n markers = [\"s\", \"d\", \"o\", \"^\", \"*\"]\n size = [150, 200, 200, 200, 250]\n hatches = [None, \"////\", \"..\"]\n\n # Move left y-axis and bottom x-axis to centre, passing through (0,0)\n ax.spines['left'].set_position('center')\n ax.spines['bottom'].set_position((\"axes\", 0.5))\n\n # Eliminate upper and right axes\n ax.spines['right'].set_color('none')\n ax.spines['top'].set_color('none')\n\n # Show ticks in the left and lower axes only\n ax.xaxis.set_ticks_position('bottom')\n ax.yaxis.set_ticks_position('left')\n ax.set_axis_on()\n ax.spines['left'].set_color('black')\n ax.spines['bottom'].set_color('black')\n plt.xlabel(\"Change in TPR\")\n plt.ylabel(\"Change in TNR\")\n\n ax.xaxis.set_label_coords(0.1, 0.53)\n ax.yaxis.set_label_coords(0.53, 0.9)\n\n plt.ylim(range[0], range[1])\n plt.xlim(range[0], range[1])\n balancer_labels = ([], [])\n classifier_labels = ([], [])\n data_set_index = 0\n for (data_set, dataset_result) in results:\n\n none_true_pos_per_classifier = {}\n none_true_neg_per_classifier = {}\n\n for (classifier_description, result_arr) in dataset_result:\n for (balancer_description, results) in result_arr:\n if balancer_description == \"None\":\n none_true_pos_per_classifier[classifier_description] = results[3]\n none_true_neg_per_classifier[classifier_description] = results[4]\n break\n\n i = 0\n for (classifier_description, result_arr) in dataset_result:\n if classifier_description in exclude:\n continue\n balancer_index = 0\n for (balancer_description, results) in result_arr:\n if balancer_description != \"None\":\n if data_set_index == 0 and balancer_index == 0:\n classifier_labels[0].append(mpatches.Patch(color=colors[i], label=classifier_description, alpha=0.8))\n classifier_labels[1].append(classifier_description)\n ax.scatter(results[3] - none_true_pos_per_classifier[classifier_description], results[4] - none_true_neg_per_classifier[classifier_description],\n marker=markers[balancer_index % len(markers)], hatch=hatches[balancer_index % len(hatches)], s=size[balancer_index % len(markers)], alpha=0.8, color=colors[i],\n edgecolor=\"black\" if colors[i] != \"black\" else \"grey\", zorder=balancer_index % len(markers), lw=0.8)\n # Work around to get legend entries correct\n pt = ax.scatter(-99999999999, -9999999999, marker=markers[balancer_index % len(markers)], hatch=hatches[balancer_index % len(hatches)], s=200, alpha=0.8, color=\"white\",\n edgecolor=\"black\", zorder=data_set_index, lw=0.8)\n if i == 0:\n balancer_labels[0].append(pt)\n balancer_labels[1].append(balancer_description)\n balancer_index += 1\n i += 1\n data_set_index += 1\n legend = plt.legend(balancer_labels[0] + classifier_labels[0], balancer_labels[1] + classifier_labels[1], loc='lower center', bbox_to_anchor=(0.5, -0.2), fancybox=False, frameon=False, ncol=7)\n legend.get_frame().set_facecolor('#ffffff')\n\n sns.despine()\n current_time = datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n plt.savefig(os.path.dirname(os.path.realpath(__file__)) + \"/../results/classifier_dataset_plt_{0}.png\".format(current_time), bbox_extra_artists=((legend,)), bbox_inches='tight')\n plt.close(fig)", "def visualize_predictions(img, predictions, probabilities, x0, y0, windowsize):\n\n # show image\n fig = plt.figure(figsize=(12,12))\n ax = plt.subplot(111)\n plt.imshow(img)\n plt.xticks([])\n plt.yticks([])\n\n # superimpose boxes\n for i, (x,y) in enumerate(zip(x0,y0)):\n if (predictions[i] != \"other\"):\n\n # Create a Rectangle patch\n rect = patches.Rectangle((x,y), windowsize[i], windowsize[i], linewidth=2, edgecolor='r', facecolor='none')\n plt.text(x+5, y+20, predictions[i] + f'/{probabilities[i]:.2f}', fontsize=10, bbox=dict(facecolor='red', alpha=0.5, edgecolor='r'))\n\n # Add the patch to the Axes\n ax.add_patch(rect)\n\n return fig", "def decision_boundary(self, w, min_x, max_x):\n if np.size(w) < 3:\n w = np.append(w, np.zeros(1))\n x = np.array([min_x, max_x])\n y = -1 * ((w[0] * x) - w[2]) / w[1]\n return x, y", "def plot_tree_decision_function(tree, X, y, ax=None):\n import numpy as np\n from scipy import ndimage\n\n plt.figure(figsize=(12, 10))\n h = 0.02\n x_min, x_max = 0, 100\n y_min, y_max = 0, 100\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n\n Z = tree.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]\n Z = Z.reshape(xx.shape)\n faces = tree.tree_.apply(\n np.c_[xx.ravel(), yy.ravel()].astype(np.float32))\n faces = faces.reshape(xx.shape)\n border = ndimage.laplace(faces) != 0\n if ax is None:\n ax = plt.gca()\n ax.scatter(X.iloc[:, 0], X.iloc[:, 1],\n c=np.array(['tab:blue', 'tab:orange'])[y],\n s=60, alpha=0.7,\n vmin=0, vmax=1)\n levels = np.linspace(0, 1, 101)\n contours = ax.contourf(xx, yy, Z, alpha=.4, levels=levels,\n cmap=blue_orange_cmap)\n ax.get_figure().colorbar(contours, ticks=np.linspace(0, 1, 11))\n ax.scatter(xx[border], yy[border], marker='.', s=1)\n ax.set_xlabel(X.columns[0])\n ax.set_ylabel(X.columns[1])\n ax.set_xlim([x_min, x_max])\n ax.set_ylim([y_min, y_max])\n sns.despine(offset=10)\n plt.savefig(HERE / \"simple_decision_tree_adult_census.png\")", "def plot_diagram(probs, labels, y_axis='accuracy'):\n probs_labels = [(prob, labels[i]) for i, prob in enumerate(probs)]\n probs_labels = np.array(sorted(probs_labels, key=lambda x: x[0]))\n window_len = int(len(labels)/100.)\n calibration_errors = []\n confidences = []\n accuracies = []\n # More interesting than length of the window (which is specific to this\n # window) is average distance between datapoints. This normalizes by dividing\n # by the window length.\n distances = []\n for i in range(len(probs_labels)-window_len):\n distances.append((\n probs_labels[i+window_len, 0] - probs_labels[i, 0])/float(window_len))\n # It's pretty sketchy to look for the 100 datapoints around this one.\n # They could be anywhere in the probability simplex. This introduces bias.\n mean_confidences = mean(probs_labels[i:i + window_len, 0])\n confidences.append(mean_confidences)\n class_accuracies = mean(probs_labels[i:i + window_len, 1])\n accuracies.append(class_accuracies)\n calibration_error = class_accuracies-mean_confidences\n calibration_errors.append(calibration_error)\n\n if y_axis == 'accuracy':\n fig, ax = plt.subplots()\n fig.set_size_inches(5, 5)\n xbins = [i/10. for i in range(11)]\n ax.plot(confidences, accuracies, color='green')\n ax.plot(xbins, xbins, color='orange')\n ax.set_xlabel('Model Confidence')\n ax.set_ylabel('Model Accuracy')\n elif y_axis == 'error':\n fig, ax = plt.subplots()\n fig.set_size_inches(5, 5)\n xbins = [i/10. for i in range(11)]\n ax.plot(confidences, calibration_errors, color='green')\n ax.plot(xbins, xbins, color='orange')\n ax.set_xlabel('Model Confidence')\n ax.set_ylabel('Model Calibration Error')\n ax.set_title('Reliability Diagram', fontsize=20)\n return fig", "def verif_valid(model, X_val, y_val, cut=0.5):\n if type(model) == Sequential:\n X_val = np.array(X_val)\n reality = y_val\n \n if ((type(model) == svm.classes.OneClassSVM) | (type(model) == lgb.basic.Booster) | (type(model) == svm.classes.LinearSVC) | (type(model) == Sequential)):\n pred_score = model.predict(X_val)\n if (type(model) == svm.classes.OneClassSVM):\n pred_score = np.where(pred_score == -1, 1, 0)\n else:\n pred_score = model.predict_proba(X_val)[:,1]\n \n plt.hist(pred_score)\n plt.title('Distribution of the prediction score')\n plt.show()\n #if (type(model) == Sequential):\n # predictions = np.where(pred_score > 0.5, 1, 0)\n #else:\n predictions = np.where(pred_score > cut, 1, 0)\n \n print('Matrice de confusion :')\n conf_mat = confusion_matrix(reality, predictions)\n print(pd.DataFrame(conf_mat))\n print('Associated metrics :')\n print(classification_report(reality, predictions))\n fpr, tpr, _ = roc_curve(y_val, pred_score)\n \n roc_auc = auc(fpr, tpr)\n plt.plot(fpr, tpr, 'r-', lw=4)\n actual_fpr = conf_mat[1, 0] / (conf_mat[1, 0] + conf_mat[0, 0])\n actual_tpr = conf_mat[1, 1] / (conf_mat[1, 1] + conf_mat[0, 1])\n plt.plot(actual_fpr, actual_tpr, 'bo', lw=10)\n plt.xlabel('False-Positive Rate')\n plt.ylabel('True-Positive Rate')\n plt.title('ROC curve (with AUC = ' + str(round(roc_auc, 6)) + ')')\n plt.plot([0, 1], [0, 1], 'k-')\n plt.show()\n print('Score AUC : ' + str(roc_auc))\n print('Accuracy : ' + str(metrics.accuracy_score(y_val, predictions)))", "def plotDistributionWithLimitsRefine(lXs, llYs, lKClassif,out=\"out.png\", title=\"title\", xax=\"xax\", yax=\"yax\",legend=\"\"):\n\n fig = plt.Figure(figsize=(40,20))\n fig.suptitle(title, fontsize=32)\n nbPlots = len(llYs)\n sqrt = int(math.ceil(math.sqrt(nbPlots)))\n ymax = 0.0\n for i,val in enumerate(llYs):\n if lKClassif[i] != \"refine\":\n ymax = max(max(val[0]),ymax)\n ymaxCurrent = max(max(val[2]),ymax)\n ymax = ymax*1.05\n xmax = 147\n gs = gridspec.GridSpec(1,2) \n ax = fig.add_subplot(gs[0])\n gsLimit = gridspec.GridSpecFromSubplotSpec(sqrt,sqrt, subplot_spec=gs[1])\n for i,val in enumerate(llYs):\n if lKClassif[i] != \"refine\":\n ax.plot(lXs,val[0],color=Graphics.lColors[i%25])\n axCurrent = fig.add_subplot(gsLimit[i]) \n axCurrent.fill_between(lXs, val[1], val[2], alpha=0.35, edgecolor='black', facecolor=Graphics.lColors[i%25])\n axCurrent.set_title(\"Cluster K{}, (position: {})\".format(i,lKClassif[i]))\n axCurrent.fill_between(lXs, val[3], val[4], alpha=0.85, edgecolor='darkgray', facecolor='lightgray')\n axCurrent.plot(lXs,val[0],color=Graphics.lColors[i%25])\n axCurrent.set_ylim(0,ymaxCurrent)\n axCurrent.set_xlim(1,xmax)\n axCurrent.text(10, ymaxCurrent*0.90, \"#nucleosomes: {}\".format(legend[i]), fontsize=12)\n axis_font = {'size':'28'}\n ax.set_ylim(0,ymax)\n ax.set_xlim(1,xmax)\n ax.legend([\"K{}\".format(x) for x in range(0,nbPlots)])\n ax.set_title(\"all nucleosomes\", **axis_font)\n ax.set_xlabel(xax, **axis_font)\n ax.set_ylabel(yax, **axis_font)\n ax.tick_params(labelsize=20)\n canvas = FigureCanvasAgg(fig)\n canvas.print_figure(out, dpi=80)", "def plot_prediction(self, img, probs, classes):\n\n # Convert results to dataframe for plotting\n result = pd.DataFrame({\"p\": probs}, index=classes)\n\n # Show the image\n fig = plt.figure(figsize=(16, 5))\n ax = plt.subplot(1, 2, 1)\n ax.imshow(img)\n\n # Set title to be the actual class\n ax.set_title(\"\", size=20)\n\n ax = plt.subplot(1, 2, 2)\n # Plot a bar plot of predictions\n result.sort_values(\"p\")[\"p\"].plot.barh(color=\"blue\", edgecolor=\"k\", ax=ax)\n plt.xlabel(\"Predicted Probability\")\n plt.tight_layout()\n\n return fig", "def draw_predictions(self):\n self.vis.draw_predictions()", "def plot_bindetect(motifs, cluster_obj, conditions, args):\r\n\twarnings.filterwarnings(\"ignore\")\r\n\r\n\tcond1, cond2 = conditions\r\n\tn_IDS = cluster_obj.n\r\n\r\n\t#Link information from motifs / clusters\r\n\tdiff_scores = {}\r\n\tfor motif in motifs:\r\n\t\tdiff_scores[motif.prefix] = {\"change\": motif.change,\r\n\t\t\t\t\t\t\t\t\t\"pvalue\": motif.pvalue,\r\n\t\t\t\t\t\t\t\t\t\"log10pvalue\": -np.log10(motif.pvalue) if motif.pvalue > 0 else -np.log10(1e-308),\t#smallest possible number before python underflows\r\n\t\t\t\t\t\t\t\t\t\"volcano_label\": motif.name,\t#shorter name\r\n\t\t\t\t\t\t\t\t\t\"overview_label\": \"{0} ({1})\".format(motif.name, motif.id) \t\t#the name which was output used in bindetect output\r\n\t\t\t\t\t\t\t\t\t}\r\n\t\r\n\txvalues = np.array([diff_scores[TF][\"change\"] for TF in diff_scores])\r\n\tyvalues = np.array([diff_scores[TF][\"log10pvalue\"] for TF in diff_scores])\r\n\r\n\t#### Define the TFs to plot IDs for ####\r\n\ty_min = np.percentile(yvalues[yvalues < -np.log10(1e-300)], 95)\t\r\n\tx_min, x_max = np.percentile(xvalues, [5,95])\r\n\r\n\tfor TF in diff_scores:\r\n\t\tif diff_scores[TF][\"change\"] < x_min or diff_scores[TF][\"change\"] > x_max or diff_scores[TF][\"log10pvalue\"] > y_min:\r\n\t\t\tdiff_scores[TF][\"show\"] = True\r\n\t\t\tif diff_scores[TF][\"change\"] < 0:\r\n\t\t\t\tdiff_scores[TF][\"color\"] = \"blue\"\r\n\t\t\telif diff_scores[TF][\"change\"] > 0:\r\n\t\t\t\tdiff_scores[TF][\"color\"] = \"red\"\r\n\t\t\telse:\r\n\t\t\t\tdiff_scores[TF][\"color\"] = \"black\" #if change was 0\r\n\t\telse:\r\n\t\t\tdiff_scores[TF][\"show\"] = False \r\n\t\t\tdiff_scores[TF][\"color\"] = \"black\"\r\n\r\n\tnode_color = cluster_obj.node_color\r\n\tIDS = np.array(cluster_obj.names)\r\n\t\r\n\t\"\"\"\r\n\t#Set cluster names\r\n\tfor motif_name in diff_scores:\r\n\t\tfor cluster in cluster_obj.clusters:\r\n\r\n\t\t\tif motif_name in cluster_obj.clusters[cluster][\"member_names\"]:\r\n\t\t\t\tdiff_scores[motif_name][\"cluster_name\"] = cluster_obj.clusters[cluster][\"cluster_name\"]\r\n\r\n\t\t\tif motif_name == cluster_obj.clusters[cluster][\"representative\"]:\r\n\t\t\t\tdiff_scores[TF][\"show\"] = True\r\n\t\t\t\tdiff_scores[motif_name][\"representative\"] = True\r\n\t\"\"\"\r\n\r\n\t#--------------------------------------- Figure --------------------------------#\r\n\r\n\t#Make figure\r\n\tno_rows, no_cols = 2,2\t\r\n\th_ratios = [1,max(1,n_IDS/25)]\r\n\tl = 10+7*(n_IDS/25) \t\t\t#length of plot\r\n\tlimit = 2**16/100-1\t\t\t\t#matplotlib limit of 2**16 pixels -> /100 to get figsize\r\n\tl = limit if l > limit else l \t#set cap on length\r\n\tfigsize = (8, l)\r\n\r\n\tfig = plt.figure(figsize = figsize)\r\n\tgs = gridspec.GridSpec(no_rows, no_cols, height_ratios=h_ratios)\r\n\tgs.update(hspace=0.0001, bottom=0.00001, top=0.999999)\r\n\r\n\tax1 = fig.add_subplot(gs[0,:])\t#volcano\r\n\tax2 = fig.add_subplot(gs[1,0])\t#long scatter overview\r\n\tax3 = fig.add_subplot(gs[1,1]) #dendrogram\r\n\t\r\n\t######### Volcano plot on top of differential values ########\r\n\tax1.set_title(\"BINDetect volcano plot\", fontsize=16, pad=20)\r\n\tax1.scatter(xvalues, yvalues, color=\"black\", s=5)\r\n\r\n\t#Add +/- 10% to make room for labels\r\n\tylim = ax1.get_ylim()\r\n\ty_extra = (ylim[1] - ylim[0]) * 0.1\r\n\tax1.set_ylim(ylim[0], ylim[1] + y_extra)\r\n\r\n\txlim = ax1.get_xlim()\r\n\tx_extra = (xlim[1] - xlim[0]) * 0.1\r\n\tlim = np.max([np.abs(xlim[0]-x_extra), np.abs(xlim[1]+x_extra)])\r\n\tax1.set_xlim(-lim, lim)\r\n\r\n\tx0,x1 = ax1.get_xlim()\r\n\ty0,y1 = ax1.get_ylim()\r\n\tax1.set_aspect((x1-x0)/(y1-y0))\t\t#square volcano plot\r\n\r\n\t#Decorate plot\r\n\tax1.set_xlabel(\"Differential binding score\")\r\n\tax1.set_ylabel(\"-log10(pvalue)\")\r\n\r\n\t########### Dendrogram over similarities of TFs #######\r\n\t\r\n\t#Only plot dendrogram if there was more than one TF\r\n\tn_ids = len(IDS)\r\n\tif n_ids > 1:\r\n\t\tdendro_dat = dendrogram(cluster_obj.linkage_mat, labels=list(IDS), no_labels=True, orientation=\"right\", ax=ax3, above_threshold_color=\"black\", link_color_func=lambda k: cluster_obj.node_color[k])\r\n\t\tlabels = dendro_dat[\"ivl\"]\t#Now sorted for the order in dendrogram\r\n\t\tax3.set_xlabel(\"Transcription factor distance\\n(Clusters below threshold are colored)\")\r\n\r\n\t\tax3.set_ylabel(\"Transcription factor clustering based on TFBS overlap\", rotation=270, labelpad=20)\r\n\t\tax3.yaxis.set_label_position(\"right\")\r\n\r\n\t\t#Set aspect of dendrogram/changes\r\n\t\tx0,x1 = ax3.get_xlim()\r\n\t\ty0,y1 = ax3.get_ylim()\r\n\t\tax3.set_aspect(((x1-x0)/(y1-y0)) * n_ids/10)\r\n\telse:\r\n\t\tax3.axis('off')\r\n\t\tlabels = IDS\r\n\r\n\t########## Differential binding scores per TF ##########\r\n\tax2.set_xlabel(\"Differential binding score\\n\" + \"(\" + cond2 + r' $\\leftarrow$' + r'$\\rightarrow$ ' + cond1 + \")\") #First position in comparison equals numerator in log2fc division\r\n\tax2.xaxis.set_label_position('bottom') \r\n\tax2.xaxis.set_ticks_position('bottom') \r\n\r\n\tno_labels = len(labels)\r\n\tax2.set_ylim(0.5, no_labels+0.5)\r\n\tax2.set_ylabel(\"Transcription factors\")\r\n\r\n\tax2.set_yticks(range(1,no_labels+1))\r\n\tax2.set_yticklabels([diff_scores[TF][\"overview_label\"] for TF in labels])\r\n\tax2.axvline(0, color=\"grey\", linestyle=\"--\") \t#Plot line at middle\r\n\r\n\t#Plot scores per TF\r\n\tfor y, TF in enumerate(labels):\t#labels are the output motif names from output\r\n\t\t\r\n\r\n\t\tidx = np.where(IDS == TF)[0][0]\r\n\t\tscore = diff_scores[TF][\"change\"]\r\n\r\n\t\t#Set coloring based on change/pvalue\r\n\t\tif diff_scores[TF][\"show\"] == True:\r\n\t\t\tfill = \"full\"\r\n\t\telse:\r\n\t\t\tfill = \"none\"\r\n\r\n\t\tax2.axhline(y+1, color=\"grey\", linewidth=1)\r\n\t\tax2.plot(score, y+1, marker='o', color=node_color[idx], fillstyle=fill)\r\n\t\tax2.yaxis.get_ticklabels()[y].set_color(node_color[idx])\r\n\r\n\t#Set x-axis ranges\r\n\tlim = np.max(np.abs(ax2.get_xlim()))\r\n\tax2.set_xlim((-lim, lim))\t#center on 0\r\n\r\n\t#set aspect\r\n\tx0,x1 = ax2.get_xlim()\r\n\ty0,y1 = ax2.get_ylim()\r\n\tax2.set_aspect(((x1-x0)/(y1-y0)) * n_IDS/10)\t\t#square volcano plot\r\n\r\n\tplt.tight_layout() #tight layout before setting ids in volcano plot\r\n\r\n\t######### Color points and set labels in volcano ########\r\n\ttxts = []\r\n\tfor TF in diff_scores:\r\n\t\tcoord = [diff_scores[TF][\"change\"], diff_scores[TF][\"log10pvalue\"]]\r\n\t\tax1.scatter(coord[0], coord[1], color=diff_scores[TF][\"color\"], s=4.5)\r\n\r\n\t\tif diff_scores[TF][\"show\"] == True:\r\n\t\t\ttxts.append(ax1.text(coord[0], coord[1], diff_scores[TF][\"volcano_label\"], fontsize=9))\r\n\r\n\t#Plot custom legend for colors\r\n\tlegend_elements = [Line2D([0],[0], marker='o', color='w', markerfacecolor=\"red\", label=\"Higher scores in {0}\".format(conditions[0])),\r\n\t\t\t\t\t\tLine2D([0],[0], marker='o', color='w', markerfacecolor=\"blue\", label=\"Higher scores in {0}\".format(conditions[1]))]\r\n\tl = ax1.legend(handles=legend_elements, loc=\"lower left\", framealpha=0.5)\r\n\tadjust_text(txts, ax=ax1, add_objects=[l], text_from_points=True, arrowprops=dict(arrowstyle='-', color='black', lw=0.5)) #, expand_text=(0.1,1.2), expand_objects=(0.1,0.1))\r\n\t\r\n\t\"\"\"\r\n\t#Add arrows to other cluster members\r\n\tprint(txts[0].__dict__)\r\n\tlabel_positions = {text._text:text for text in txts}\r\n\tprint(label_positions)\r\n\tfor TF in diff_scores:\r\n\t\tif diff_scores[TF][\"show\"]:\r\n\t\t\tcluster_name = diff_scores[TF][\"cluster_name\"]\r\n\t\t\t\r\n\t\t\tif cluster_name in label_positions: \r\n\t\t\t\tprint(cluster_name)\r\n\r\n\t\t\t\tpoint_x, point_y = diff_scores[TF][\"change\"], diff_scores[TF][\"log10pvalue\"]\r\n\t\t\t\ttext_x, text_y = label_positions[cluster_name]._x, label_positions[cluster_name]._y\r\n\t\t\t\tlen_x, len_y = text_x - point_x, text_y - point_y\r\n\r\n\t\t\t\tax1.arrow(point_x, point_y, len_x, len_y, linestyle=\"-\", color=\"black\", lw=0.5)\r\n\t\"\"\"\r\n\r\n\treturn(fig)", "def plot(self, ylog=False, category=\"Accuracy\", figsize=(12, 5)):\n if self.CV == False: # no Cross Validation set case\n fig, ax = plt.subplots(nrows=1, ncols=2, figsize=figsize)\n plt.suptitle(\"Training Curve for \" + self.loss, fontsize=12)\n ax[0].plot(range(1, len(self.trainError) + 1), self.trainError, 'g-', label='Training Error')\n ax[0].set_xlabel('Iteration')\n ax[0].set_ylabel(\"Error\")\n if ylog == True:\n ax[0].set_yscale('log')\n ax[0].legend()\n ax[0].grid('on')\n\n if category == \"Accuracy\":\n ax[1].plot(range(1, len(self.trainAcc) + 1), self.trainAcc, 'r-', label='Training Accuracy')\n ax[1].set_ylabel(\"Accuracy\")\n elif category == \"Error Rate\":\n ax[1].plot(range(1, len(self.trainAcc) + 1), 1 - np.array(self.trainAcc), 'r-', label='Training Error Rate')\n ax[1].set_ylabel(\"Error Rate\")\n # ax[1].set_ylim((0, 1))\n ax[1].set_xlabel('Iteration')\n ax[1].legend(loc='best')\n ax[1].grid('on')\n plt.show()\n if self.CV == True: # has Cross Validation set case\n fig, ax = plt.subplots(nrows=1, ncols=2, figsize=figsize)\n plt.suptitle(\"Training Curve for \" + self.loss, fontsize=12)\n ax[0].plot(range(1, len(self.trainError) + 1), self.trainError, 'g-', label='Training Error')\n ax[0].plot(range(1, len(self.cvError) + 1), self.cvError, 'r-', label='CV Error')\n ax[0].set_xlabel('Iteration')\n ax[0].set_ylabel(\"Error\")\n if ylog == True:\n ax[0].set_yscale('log')\n ax[0].legend()\n ax[0].grid('on')\n\n if category == \"Accuracy\":\n ax[1].plot(range(1, len(self.trainAcc) + 1), self.trainAcc, 'g-', label='Training Accuracy')\n ax[1].plot(range(1, len(self.cvAcc) + 1), self.cvAcc, 'r-', label='CV Accuracy')\n ax[1].set_ylabel(\"Accuracy\")\n elif category == \"Error Rate\":\n ax[1].plot(range(1, len(self.trainAcc) + 1), 1 - np.array(self.trainAcc), 'g-', label='Training Error Rate')\n ax[1].plot(range(1, len(self.cvAcc) + 1), 1 - np.array(self.cvAcc), 'r-', label='CV Error Rate')\n ax[1].set_ylabel(\"Error Rate\")\n # ax[1].set_ylim((0, 1))\n ax[1].set_xlabel('Iteration')\n ax[1].legend(loc='best')\n ax[1].grid('on')\n plt.show()\n\n return fig, ax", "def plot_scenario(self, ax):\n ax.set_xlim((0,10))\n ax.set_ylim((0,10))\n\n # Unpack region's sizes and positions\n obs_x = self.obstacle_vert[0]\n obs_y = self.obstacle_vert[2]\n obs_w = self.obstacle_vert[1]-obs_x\n obs_h = self.obstacle_vert[3]-obs_y\n\n goal_x = self.goal_vert[0]\n goal_y = self.goal_vert[2]\n goal_w = self.goal_vert[1]-goal_x\n goal_h = self.goal_vert[3]-goal_y\n\n target1_x = self.target1_vert[0]\n target1_y = self.target1_vert[2]\n target1_w = self.target1_vert[1]-target1_x\n target1_h = self.target1_vert[3]-target1_y\n\n target2_x = self.target2_vert[0]\n target2_y = self.target2_vert[2]\n target2_w = self.target2_vert[1]-target2_x\n target2_h = self.target2_vert[3]-target2_y\n\n obstacle = Rectangle((obs_x,obs_y),obs_w,obs_h,color='red',alpha=0.5)\n goal = Rectangle((goal_x,goal_y),goal_w,goal_h, color='green',alpha=0.5)\n\n target1 = Rectangle((target1_x,target1_y),target1_w,target1_h, color='blue',alpha=0.5)\n target2 = Rectangle((target2_x,target2_y),target2_w,target2_h, color='blue',alpha=0.5)\n\n ax.add_patch(obstacle)\n ax.add_patch(goal)\n ax.add_patch(target1)\n ax.add_patch(target2)", "def binaryBoundedPlot(self):\n\n self.initPlotY()\n for iPlot in range(len(self.plotFineX)):\n thisX = self.plotFineX[iPlot]\n\n ### call binaryBoundedOne instead\n thisLnY = self.binaryBoundedOne(thisX, \\\n self.hyper[0][iPlot], \\\n self.hyper[1][iPlot])\n thisY = np.exp(thisLnY)\n\n #thisY = thisX*0. + 1. ### Good unless otherwise stated.\n #bOut = (thisX <= self.hyper[0][iPlot]) | \\\n # (thisX > self.hyper[1][iPlot])\n #thisY[bOut] = 0.\n\n self.plotFineY[iPlot] = thisY", "def plot_predictions(self):\n\n plt.title(\"Targets vs. Predictions\")\n plt.plot(self.T, label=\"Targets\")\n plt.plot(self.Y, label=\"Predictions\")\n plt.xlabel(\"Sample number\")\n plt.legend()\n plt.show()", "def vis_detections(im, class_name, dets, image_name, thresh=0.5):\n inds = np.where(dets[:, -1] >= thresh)[0]\n max_inds = 0\n max_score = 0.0\n if len(inds) == 0:\n # print('Warning: no target detected!')\n return\n elif len(inds) > 1:\n # print('Warning: ' + str(len(inds)) + ' targets detected! Choose the highest one')\n for i in inds:\n if(dets[i, -1] > max_score):\n max_inds = i\n max_score = dets[i, -1]\n\n# im = im[:, :, (2, 1, 0)]\n# fig, ax = plt.subplots(figsize=(12, 12))\n# ax.imshow(im, aspect='equal')\n # for i in inds:\n # bbox = dets[i, :4]\n # score = dets[i, -1]\n #print max_inds\n bbox = dets[max_inds, :4]\n score = dets[max_inds, -1]\n\n# ax.add_patch(\n# plt.Rectangle((bbox[0], bbox[1]),\n# bbox[2] - bbox[0],\n# bbox[3] - bbox[1], fill=False,\n# edgecolor='red', linewidth=3.5)\n# )\n# ax.text(bbox[0], bbox[1] - 2,\n# '{:s} {:.3f}'.format(class_name, score),\n# bbox=dict(facecolor='blue', alpha=0.5),\n# fontsize=14, color='white')\n\n # end for\n #print image_name, class_name\n #print score\n # file.writelines([image_name,'\\t',class_name,'\\t',str(score),'\\n'])\n # ax.set_title(('{} detections with '\n # 'p({} | box) >= {:.1f}').format(class_name, class_name,\n # thresh),fontsize=14)\n # plt.axis('off')\n # plt.tight_layout()\n # plt.draw()\n\t### SAVE IMAGES ? ###\n save_img_dir = os.path.join(cfg.ROOT_DIR, 'result', 'test_img')\n # if not os.path.exists(save_img_dir):\n # os.makedirs(save_img_dir)\n # plt.savefig(os.path.join(save_img_dir, image_name + '_' + class_name))\n\n boxes = {'boxes': ((bbox[0], bbox[1]), bbox[2] - bbox[0], bbox[3] - bbox[1])}\n \n save_mat_dir = os.path.join(cfg.ROOT_DIR, 'result', 'test_box')", "def plot(model, samples):\n # compute responsiblity values\n resp = model.predict_proba(samples)\n\n # plot\n plt.axis('equal')\n plt.scatter(samples[:,0], samples[:,1], c=resp)\n plt.show()", "def plot(self, c='k'):\n plt.plot(self.geometry.convex_hull.exterior.xy[0], self.geometry.convex_hull.exterior.xy[1], c)\n plt.axis('equal')", "def plot_convergence(self, x, y, **kwargs):\n self.plot(x, y, **kwargs)", "def vis_detections(im, class_name, dets, thresh=0.5):\n global CHECK\n global CLASS_NAME\n global INDS\n global RES\n inds = np.where(dets[:, -1] >= thresh)[0]\n if len(inds) == 0:\n return\n bbox = dets[inds[0], :4]\n score = dets[inds[0], -1]\n if len(inds) > 1:\n score = -1\n for i in inds:\n temp = dets[i, -1]\n if (temp > score):\n score = temp\n bbox = dets[i, :4]\n if score <= MAX_SCORE[0]:\n return\n else:\n CHECK = 1\n MAX_SCORE[0] = score\n CLASS_NAME = class_name\n # im = im[:, :, (2, 1, 0)]\n # fig, ax = plt.subplots()\n # ax.imshow(im, aspect='equal')\n # ax.add_patch(\n # plt.Rectangle((bbox[0], bbox[1]),\n # bbox[2] - bbox[0],\n # bbox[3] - bbox[1], fill=False,\n # edgecolor='red', linewidth=3.5)\n # )\n # ax.text(bbox[0], bbox[1] - 2,\n # '{:s} {:.3f}'.format(class_name, score),\n # bbox=dict(facecolor='blue', alpha=0.5),\n # fontsize=10, color='white')\n # ax.set_title(('{} detections with '\n # 'p({} | box) >= {:.1f}').format(class_name, class_name,\n # thresh),\n # fontsize=10)\n # plt.axis('off')\n # plt.tight_layout()\n # plt.draw()", "def visualize_detection(self, img, dets, classes=[], thresh=0.6):\n import matplotlib.pyplot as plt\n import random\n plt.imshow(img)\n height = img.shape[0]\n width = img.shape[1]\n colors = dict()\n for det in dets:\n (klass, score, x0, y0, x1, y1) = det\n if score < thresh:\n continue\n cls_id = int(klass)\n if cls_id not in colors:\n colors[cls_id] = (random.random(), random.random(), random.random())\n xmin = int(x0 * width)\n ymin = int(y0 * height)\n xmax = int(x1 * width)\n ymax = int(y1 * height)\n rect = plt.Rectangle((xmin, ymin), xmax - xmin,\n ymax - ymin, fill=False,\n edgecolor=colors[cls_id],\n linewidth=3.5)\n plt.gca().add_patch(rect)\n class_name = str(cls_id)\n if classes and len(classes) > cls_id:\n class_name = classes[cls_id]\n plt.gca().text(xmin, ymin - 2,\n '{:s} {:.3f}'.format(class_name, score),\n bbox=dict(facecolor=colors[cls_id], alpha=0.5),\n fontsize=12, color='white')\n plt.show()", "def plot_contours(ax, clf, xx, yy, **params):\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = ax.contourf(xx, yy, Z, **params)\n return out", "def plot_contours(ax, clf, xx, yy, **params):\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = ax.contourf(xx, yy, Z, **params)\n return out", "def plot_contours(ax, clf, xx, yy, **params):\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = ax.contourf(xx, yy, Z, **params)\n return out", "def plot_contours(ax, clf, xx, yy, **params):\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n out = ax.contourf(xx, yy, Z, **params)\n return out", "def plot_result(data, gt_y, pred_y):\n assert data.shape[0] == gt_y.shape[0]\n assert data.shape[0] == pred_y.shape[0]\n\n plt.figure()\n\n plt.subplot(1, 2, 1)\n plt.title('Ground Truth', fontsize=18)\n\n for idx in range(data.shape[0]):\n if gt_y[idx] == 0:\n plt.plot(data[idx][0], data[idx][1], 'ro')\n else:\n plt.plot(data[idx][0], data[idx][1], 'bo')\n\n plt.subplot(1, 2, 2)\n plt.title('Prediction', fontsize=18)\n\n for idx in range(data.shape[0]):\n if pred_y[idx] == 0:\n plt.plot(data[idx][0], data[idx][1], 'ro')\n else:\n plt.plot(data[idx][0], data[idx][1], 'bo')\n\n plt.show()", "def plot_result(data, gt_y, pred_y):\n assert data.shape[0] == gt_y.shape[0]\n assert data.shape[0] == pred_y.shape[0]\n\n plt.figure()\n\n plt.subplot(1, 2, 1)\n plt.title('Ground Truth', fontsize=18)\n\n for idx in range(data.shape[0]):\n if gt_y[idx] == 0:\n plt.plot(data[idx][0], data[idx][1], 'ro')\n else:\n plt.plot(data[idx][0], data[idx][1], 'bo')\n\n plt.subplot(1, 2, 2)\n plt.title('Prediction', fontsize=18)\n\n for idx in range(data.shape[0]):\n if pred_y[idx] == 0:\n plt.plot(data[idx][0], data[idx][1], 'ro')\n else:\n plt.plot(data[idx][0], data[idx][1], 'bo')\n\n plt.show()", "def ex_1_a(x, y):\n ###########\n ## TODO:\n ## Train an SVM with a linear kernel\n ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function\n ###########\n\n clf = svm.SVC(kernel='linear')\n clf.fit(x, y)\n\n plot_svm_decision_boundary(clf, x, y)", "def plot_2d_results(perceptron, data):\n\n\t# Divides the data into classes.\n\ttraining_data_classes = split_into_classes(data['training_data'], data['training_labels'])\n\ttest_data_classes = split_into_classes(data['test_data'], data['test_labels'])\n\n\t# Plots the data.\n\tplt.plot(training_data_classes[0][:, 0], training_data_classes[0][:, 1], 'bo',\n\t\ttraining_data_classes[1][:, 0], training_data_classes[1][:, 1], 'ro',\n\t\ttest_data_classes[0][:, 0], test_data_classes[0][:, 1], 'b*',\n\t\ttest_data_classes[1][:, 0], test_data_classes[1][:, 1], 'r*',\n\t\tmarkersize = 12)\n\n\t# Constructs a line that represents the decision boundary.\n\tweights = perceptron.weights\n\tbias = perceptron.bias\n\tx_range = np.array([0, 100])\n\ty_range = -(x_range * weights[0] + bias) / weights[1]\n\n\t# Plots the decision boundary.\n\tplt.plot(x_range, y_range, 'k')\n\tplt.show()", "def vis_detections(im, class_name, dets, thresh=0.5, video= None,fid=0):\n dirname = os.path.dirname(__file__)\n show_dir = os.path.join(dirname, '..', 'show/%s' % os.path.basename(video))\n # print(show_dir)\n if not os.path.exists(show_dir):\n os.makedirs(show_dir)\n\n inds = np.where(dets[:, -1] >= thresh)[0]\n if len(inds) == 0:\n return\n\n im = im[:, :, (2, 1, 0)]\n fig, ax = plt.subplots(figsize=(12, 12))\n ax.imshow(im, aspect='equal')\n for i in inds:\n bbox = dets[i, :4]\n score = dets[i, -1]\n\n ax.add_patch(\n plt.Rectangle((bbox[0], bbox[1]),\n bbox[2] - bbox[0],\n bbox[3] - bbox[1], fill=False,\n edgecolor='red', linewidth=3.5)\n )\n ax.text(bbox[0], bbox[1] - 2,\n '{:s} {:.3f}'.format(class_name, score),\n bbox=dict(facecolor='blue', alpha=0.5),\n fontsize=14, color='white')\n\n ax.set_title(('{} detections with '\n 'p({} | box) >= {:.1f}').format(class_name, class_name,\n thresh),\n fontsize=14)\n plt.axis('off')\n plt.tight_layout()\n plt.draw()\n plt.savefig('%s/all_bboxes_%d.jpg' % (show_dir, fid))\n # plt.show()", "def plot_data (features : list, actual_labels : list, classified_labels : list = None,\n extra_lines : list = None, normalize=False):\n samples = np.array(features)\n if normalize:\n norms = np.linalg.norm(samples, axis=1)\n l=[]\n for i, s in enumerate(samples):\n l.append(s/norms[i])\n samples = np.array(l)\n \n plt.figure(figsize=(8, 8))\n for (idx_case, ((actual, classified), marker, color)) in enumerate(zip(cases, markers, colors)):\n mask = np.logical_and(np.equal(actual_labels, actual), \n np.equal(actual if classified_labels == None else classified_labels, classified))\n if not np.any(mask): continue\n plt.scatter(\n samples[mask, 0], samples[mask, 1],\n label = f\"Class {actual}\" if classified_labels == None else f\"Was {actual}, classified {classified}\",\n marker = marker, s = 300, c = [color],\n )\n # Add the lines to show the true classes boundaries, if provided\n if extra_lines != None:\n for line in extra_lines:\n plt.plot(line[0], line[1], color = 'gray')\n plt.legend()", "def plot_scenario(self, ax):\n ax.set_xlim((0,12))\n ax.set_ylim((0,12))\n\n # Unpack region's sizes and positions\n obs_x = self.obstacle_vert[0]\n obs_y = self.obstacle_vert[2]\n obs_w = self.obstacle_vert[1]-obs_x\n obs_h = self.obstacle_vert[3]-obs_y\n\n tar_x = self.goal_vert[0]\n tar_y = self.goal_vert[2]\n tar_w = self.goal_vert[1]-tar_x\n tar_h = self.goal_vert[3]-tar_y\n\n obstacle = Rectangle((obs_x,obs_y),obs_w,obs_h,color='red',alpha=0.5)\n target = Rectangle((tar_x,tar_y),tar_w,tar_h, color='green',alpha=0.5)\n\n ax.add_patch(obstacle)\n ax.add_patch(target)", "def plot_contours(clf, xx, yy, num_features, locs, **params):\n predict = fix_examples(np.c_[xx.ravel(), yy.ravel()], num_features, locs)\n Z = clf.predict(predict)\n Z = Z.reshape(xx.shape)\n out = plt.contourf(xx, yy, Z, **params)\n return out", "def visualise_dataset_classifier_results(dataset_results):\n current_time = datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n file_name = \"raw_dump_{0}.txt\".format(current_time)\n with open(os.path.dirname(os.path.realpath(__file__)) + \"/../results/\" + file_name, \"wb\") as output_file:\n output_file.write(str(dataset_results))\n sns.set(style='ticks')\n fig = plt.figure(figsize=(10, 10))\n ax = fig.add_subplot(1, 1, 1)\n markers = [\"s\", \"o\", \"^\", \"*\"]\n colors = [\"#64B3DE\", \"#1f78b4\", \"#B9B914\", \"#FBAC44\", \"#bc1659\", \"#33a02c\", \"#6ABF20\", \"#ff7f00\", \"#6a3d9a\", \"grey\", \"#b15928\", \"#e31a1c\", \"black\"]\n color_dict = {}\n index = 0\n for (_, classifier_description) in dataset_results[0][1]:\n color_dict[classifier_description] = colors[index]\n index += 1\n\n hatches = [None, \"////\", \"..\"]\n\n # Move left y-axis and bottom x-axis to centre, passing through (0,0)\n ax.spines['left'].set_position('center')\n ax.spines['bottom'].set_position('center')\n\n # Eliminate upper and right axes\n ax.spines['right'].set_color('none')\n ax.spines['top'].set_color('none')\n\n # Show ticks in the left and lower axes only\n ax.xaxis.set_ticks_position('bottom')\n ax.yaxis.set_ticks_position('left')\n ax.set_axis_on()\n ax.spines['left'].set_color('black')\n ax.spines['bottom'].set_color('black')\n plt.xlabel(\"Change in TPR\")\n plt.ylabel(\"Change in TNR\")\n\n ax.xaxis.set_label_coords(0.1, 0.52)\n ax.yaxis.set_label_coords(0.53, 0.9)\n\n plt.ylim(-0.2, 0.2)\n plt.xlim(-0.2, 0.2)\n data_set_labels = []\n classifier_labels = []\n data_set_index = 0\n for (data_set, dataset_result) in dataset_results:\n data_set_labels.append(mlines.Line2D(range(1), range(1), color=\"white\", marker=markers[data_set_index], markeredgecolor=\"black\", markeredgewidth=1.0, label=data_set.replace(\"_\", \" \")))\n median_true_pos = np.median(np.array([result_arr[3] for (result_arr, classifier_description) in dataset_result]))\n median_true_neg = np.median(np.array([result_arr[4] for (result_arr, classifier_description) in dataset_result]))\n\n i = 0\n for (result_arr, classifier_description) in dataset_result:\n if data_set_index == 0:\n classifier_labels.append(mpatches.Patch(facecolor=color_dict[classifier_description], hatch=hatches[i % len(hatches)], label=classifier_description, alpha=0.8, edgecolor=\"black\"))\n ax.scatter(result_arr[3] - median_true_pos, result_arr[4] - median_true_neg, marker=markers[data_set_index], hatch=hatches[i % len(hatches)], s=200, alpha=0.8, color=colors[i],\n edgecolor=\"black\", zorder=data_set_index, lw=0.8)\n i += 1\n data_set_index += 1\n\n plt.legend(handles=data_set_labels + classifier_labels)\n sns.despine()\n current_time = datetime.now().strftime(\"%Y-%m-%d_%H-%M-%S\")\n plt.savefig(os.path.dirname(os.path.realpath(__file__)) + \"/../results/classifier_dataset_plt_{0}.png\".format(current_time), bbox_inches='tight')\n plt.close(fig)", "def decision_tree(df):\n features = df[['Temperature(F)', 'Humidity(%)', 'Visibility(mi)', 'Wind_Speed(mph)',\n 'Precipitation(in)', 'Amenity', 'Bump', 'Crossing', 'Give_Way',\n 'Junction', 'No_Exit', 'Railway', 'Roundabout', 'Station', 'Stop',\n 'Traffic_Calming', 'Traffic_Signal', 'Civil_Twilight', 'Rush Hour', 'Weekend',\n 'Side_R', 'Season_Spring', 'Season_Summer',\n 'Season_Winter', 'Weather_Condition_Clear', 'Weather_Condition_Fog',\n 'Weather_Condition_Other', 'Weather_Condition_Rain',\n 'Weather_Condition_Snow', 'Weather_Condition_Thunderstorm']]\n X= features\n y = df['Severity']\n clf = DecisionTreeClassifier(min_samples_split=6, min_samples_leaf=2, max_depth=3, \n criterion = 'gini', random_state=42)\n clf.fit(X, y)\n\n plt.figure(figsize=(25,10))\n a = plot_tree(clf, \n feature_names=X.columns.to_list(), \n filled=True, \n rounded=True, \n fontsize=14)\n plt.savefig(\"../Images/rockies_decision_tree.png\")\n plt.show()", "def plot_training_results(clfs, train_scores, test_scores):\n\n # Set graph format\n sns.set_style(\"whitegrid\")\n sns.set_context(\"paper\", font_scale=1, rc={\"lines.linewidth\": 1})\n ax = plt.subplot(111)\n w = 0.5\n x = np.arange(len(train_scores))\n ax.set_yticks(x + w)\n ax.legend((train_scores[0], test_scores[0]), (\"Train Scores\", \"Test Scores\"))\n names = ['SVC', 'LR', 'KNN', 'GNB', 'ADA', 'RF']\n\n # Loop throuugh classifiers\n # clfnames = []\n # for i in range(0, len(clfs)):\n # # Define temporary variables\n # clfname = clfnames[i]\n # # clf_name = clf.__class__.__name__\n # # Create and store name\n # name = \"{}\".format(clf_name)\n # names.append(name)\n\n # Plot all names in horizontal bar plot\n ax.set_yticklabels((names), fontsize=20)\n plt.xlim(0.5, 0.56)\n plt.barh(x, test_scores, color='b', alpha=0.6)\n plt.title(\"Test Data Accuracy Scores\", fontsize=30)\n fig = plt.figure(1)\n\n plt.show()", "def calibrationPlot(predictions, truth, classes = None, label = \"Model\", newFigure = None, n_bins = 5):\n predictions, truth = selection(predictions, truth, classes)\n predictions, truth = flatten(predictions, truth)\n predictions = ((predictions - predictions.min()) / (predictions.max() - predictions.min())).flatten()\n fraction_of_positives, mean_predicted_value = calibration_curve(truth, predictions, n_bins = n_bins)\n bins = np.linspace(0., 1. + 1e-8, n_bins + 1)\n binids = np.digitize(predictions, bins) - 1\n bin_sums = np.bincount(binids, minlength=len(bins))\n bin_sums = bin_sums[bin_sums != 0] * 500 / np.sum(bin_sums)\n\n if newFigure is not None:\n plt.figure(newFigure)\n else:\n plt.xlabel('Mean Predicted Value')\n plt.ylabel('Fraction Positive')\n plt.title('Calibration')\n\n p = plt.plot(mean_predicted_value, fraction_of_positives, alpha = 0.5, ls=':')\n plt.scatter(mean_predicted_value, fraction_of_positives, s = bin_sums, label = label + \" ({:.2f})\".format(brier_score_loss(truth, predictions)), color = p[0].get_color(), alpha = 0.5)", "def ex_1_b(x, y):\n ###########\n ## Add a point (4,0) with label 1 to the data set and then\n ## train an SVM with a linear kernel\n ## and plot the decision boundary and support vectors using 'plot_svm_decision_boundary' function\n ###########\n new_x = np.vstack((x, np.array([4,0])))\n new_y = np.hstack((y, np.array((1))))\n\n clf = svm.SVC(kernel='linear')\n clf.fit(new_x, new_y)\n plot_svm_decision_boundary(clf, new_x, new_y)\n pass", "def plotPred(img, pred):\n\n #plota a imagem.\n plt.imshow(img)\n plt.axis('off')\n\n #grafico de barras.\n plt.figure() \n order = list(reversed(range(len(pred)))) \n bar_preds = [pr[2] for pr in pred]\n labels = (pr[1] for pr in pred)\n plt.barh(order, bar_preds, alpha=0.5)\n plt.yticks(order, labels)\n plt.xlabel('Probability')\n plt.xlim(0, 1.01)\n plt.tight_layout()\n plt.show()", "def show_learning_curve(self):\n\n # Loop output classes\n for c in range(1,self.n_output_classes):\n # Get data\n x_values = np.array(self.n_class_samples_list[c])\n accuracy = np.array(self.accuracy_list[c])\n precision = np.array(self.precision_list[c])\n recall = np.array(self.recall_list[c])\n F1 = np.array(self.F1_list[c])\n\n # Make plot\n with sns.axes_style(\"ticks\"):\n fig,ax = plt.subplots()\n plt.plot([np.min(x_values),np.max(x_values)],[0.5,0.5],\n color='#777777',linestyle='--')\n plt.plot([np.min(x_values),np.max(x_values)],[0.66,0.66],\n color='#777777',linestyle=':')\n plt.plot([np.min(x_values),np.max(x_values)],[0.8,0.8],\n color='#777777',linestyle=':')\n plt.plot([np.min(x_values),np.max(x_values)],[0.9,0.9],\n color='#777777',linestyle=':')\n\n plt.plot( x_values, accuracy, color='#000000',\n linewidth=1, label='Accuracy' )\n plt.plot( x_values, precision, color='#0000aa',\n linewidth=1, label='Precision' )\n plt.plot( x_values, recall, color='#00aa00',\n linewidth=1, label='Recall' )\n plt.plot( x_values, F1, color='#aa0000',\n linewidth=2, label='F1' )\n\n plt.yticks( [0, 0.5, 0.66, 0.8, 0.9, 1.0],\n ['0','0.5','0.66','0.8','0.9','1.0'], ha='right' )\n plt.xlim(np.max(x_values)*-0.02,np.max(x_values)*1.02)\n plt.ylim(-0.02,1.02)\n plt.xlabel('Number of training samples')\n plt.ylabel('Performance')\n plt.title('Learning curve, class {}'.format(c))\n sns.despine(ax=ax, offset=0, trim=True)\n lgnd = plt.legend(loc=4, ncol=1, frameon=True, fontsize=9)\n lgnd.get_frame().set_facecolor('#ffffff')\n ax.spines['left'].set_bounds(0,1)\n ax.spines['bottom'].set_bounds(np.min(x_values),np.max(x_values))", "def plot(self, x, y, b, path=None):\n label = [\"atypical\", \"indeterminate\", \"negative\", \"typical\"]\n _, pred = self.cam_model.predict(x)\n for i in range(len(x)):\n image = x[i] if x.shape[-1] == 3 else np.squeeze(x[i], -1)\n\n fig, axs = plt.subplots(2, 2)\n for j in range(4):\n ax_x = [0, 1, 0, 1]\n ax_y = [0, 0, 1, 1]\n ax = axs[ax_x[j], ax_y[j]]\n p = np.argmax(pred[i])\n a = np.argmax(y[i])\n c = '(pa)' if j == p and p == a else '(p)' if j == p else '(a)' if j == a else ''\n ax.title.set_text(f\"{label[j]} {c}\")\n # hide axis ticks\n plt.setp(ax.get_xticklabels(), visible=False)\n plt.setp(ax.get_yticklabels(), visible=False)\n ax.tick_params(axis='both', which='both', length=0)\n # plot original image with boxes\n ax.imshow(image, cmap=\"gray\", aspect=\"equal\")\n for box in b[i]:\n ax.add_patch(Rectangle((box[\"x\"], box[\"y\"]), box[\"width\"], box[\"height\"], linewidth=1, edgecolor=\"r\", facecolor=\"None\", alpha=0.6))\n # plot CAM\n camap = self.generate(x[i], label=j, zoom=True)\n camap = ax.imshow(camap, cmap=\"coolwarm\", aspect=\"equal\", alpha=0.6)\n #cax = fig.add_axes([ax2.get_position().x1+0.01, ax2.get_position().y0,0.02, ax2.get_position().height])\n #plt.colorbar(camap, cax=cax, orientation=\"vertical\")\n if path != None: plt.savefig(path + f\"_{i}.png\", dpi=300, format=\"png\")\n plt.show()", "def visualize_openset_classification(data, other_data_dicts, dict_key, data_name,\n thresholds, save_path, tailsize):\n\n lw = 10\n plt.figure(figsize=(20, 20))\n plt.plot(thresholds, data, label=data_name, color=colors[0], linestyle='solid', linewidth=lw)\n\n c = 0\n for other_data_name, other_data_dict in other_data_dicts.items():\n plt.plot(thresholds, other_data_dict[dict_key], label=other_data_name, color=colors[c],\n linestyle=linestyles[c % len(linestyles)], linewidth=lw)\n c += 1\n\n plt.xlabel(r\"Weibull CDF outlier rejection prior $\\Omega_t$\", fontsize=axes_font_size)\n plt.ylabel(\"Percentage of dataset outliers\", fontsize=axes_font_size)\n plt.xlim(left=-0.05, right=1.05)\n plt.ylim(bottom=-0.05, top=1.05)\n plt.legend(loc=0, fontsize=legend_font_size - 15)\n plt.savefig(os.path.join(save_path, data_name + '_' + \",\".join(list(other_data_dicts.keys())) +\n '_outlier_classification' + '_tailsize_' + str(tailsize) + '.pdf'),\n bbox_inches='tight')", "def plot_stability_function(self,bounds=[-20,1]):\n import matplotlib.pyplot as plt\n p,q=self.stability_function()\n xx=np.arange(bounds[0], bounds[1], 0.01)\n yy=p(xx)/q(xx)\n fig, = plt.plot(xx,yy)\n plt.draw()", "def plot_contours(clf, points,labels):\n def make_meshgrid(x, y,h=0.02):\n \"\"\"Create a mesh of points to plot in\n Parameters\n ----------\n x: data to base x-axis meshgrid on\n y: data to base y-axis meshgrid on\n h: stepsize for meshgrid, optional\n Returns\n -------\n xx, yy : ndarray\n \"\"\"\n x_min, x_max = x.min() - 1, x.max() + 1\n y_min, y_max = y.min() - 1, y.max() + 1\n xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n np.arange(y_min, y_max, h))\n return xx, yy\n X0, X1 = points[:, 0], points[:, 1]\n xx, yy = make_meshgrid(X0, X1,0.5)\n Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])\n Z = Z.reshape(xx.shape)\n #VISUALIZATION OF DECISION\n fig2 = plt.figure(figsize=(5,5))\n ax = fig2.add_subplot(1,1,1)\n out = plt.contourf(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.8)\n ax.scatter(X0, X1, c=labels, cmap=plt.cm.coolwarm, s=80, edgecolors='k')\n ax.scatter(X0[clf.support_],X1[clf.support_],c=labels[clf.support_], cmap=plt.cm.coolwarm, s=80, edgecolors='w')\n ax.set_xlim(xx.min(), xx.max())\n ax.set_ylim(yy.min(), yy.max())\n plt.show()\n return out", "def plot_roi_bounds(bounds,color='w',label=False):\n X1,X2,Y1,Y2=bounds\n plt.plot([X1,X2,X2,X1,X1],[Y1,Y1,Y2,Y2,Y1],'-',color=color)\n if label:\n plt.text(X1,Y1-3,label,verticalalignment='bottom',color=color,\n backgroundcolor=(0,0,0,.5))\n plt.margins(0,0)" ]
[ "0.72929466", "0.7281941", "0.7130415", "0.70138526", "0.70124936", "0.69807297", "0.69738376", "0.6969092", "0.6924797", "0.68875146", "0.6852066", "0.6791433", "0.66879207", "0.66742694", "0.66299", "0.6457393", "0.628703", "0.62751436", "0.62345755", "0.61874735", "0.6163345", "0.614915", "0.60573244", "0.60090643", "0.6007699", "0.594626", "0.5886453", "0.5878822", "0.5802916", "0.5761619", "0.5753064", "0.572655", "0.5713558", "0.56896317", "0.5684336", "0.56786436", "0.5667913", "0.5634603", "0.5615319", "0.5606805", "0.5572684", "0.55646986", "0.5557413", "0.5550026", "0.5533325", "0.55144554", "0.551146", "0.5509427", "0.5505239", "0.5495495", "0.5482014", "0.5465226", "0.5460854", "0.5456913", "0.5455289", "0.5435248", "0.54341865", "0.5430196", "0.54284614", "0.5389179", "0.5378806", "0.53781396", "0.53771096", "0.5375327", "0.5366895", "0.5358917", "0.5356116", "0.5356039", "0.53490657", "0.5342708", "0.53415704", "0.5300401", "0.52986157", "0.5297173", "0.52940613", "0.52940613", "0.52940613", "0.52940613", "0.52821684", "0.52821684", "0.52737284", "0.5271219", "0.52674687", "0.52587855", "0.5256339", "0.5256058", "0.52335006", "0.5232199", "0.52307725", "0.522394", "0.52166104", "0.52127904", "0.52086824", "0.5200257", "0.51948935", "0.51919585", "0.5180775", "0.5166462" ]
0.5346783
71
Guts for `~trigger.utils.url.parse_url`. Based on Kombu's ``kombu.utils.url``.
def _parse_url(url): parts = urlparse(url) scheme = parts.scheme port = parts.port or None hostname = parts.hostname path = parts.path or '' virtual_host = path[1:] if path and path[0] == '/' else path return (scheme, unquote(hostname or '') or None, port, unquote(parts.username or '') or None, unquote(parts.password or '') or None, unquote(path or '') or None, unquote(virtual_host or '') or None, unquote(parts.query or '') or None, dict(dict(parse_qsl(parts.query))))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __ParseUrl(url):\n return urlparse(url)", "def _parse(url):\n url = url.strip()\n parsed = urlparse(url)\n return _parsed_url_args(parsed)", "def urlparse(url):\n\tunquote_url=urllib.parse.unquote(url)\n\treturn unquote_url", "def _parse_url(self, url):\n url_prefix = self.URL_PREFIX\n assert(url[:len(url_prefix)] == url_prefix)\n key, file_attrs = url[len(url_prefix):].split('/', 1)\n file_, attrs = parse_url_opts(file_attrs)\n return key, file_, attrs", "def parse_url(url):\n url = urlparse.urlparse(url)\n #print url.__class__\n return EasyUrl.EvolveParseResult(url)", "def handle_url(self, url):\n parse = urlparse.urlparse(url, \"http\")\n # relative url path\n if not parse.netloc:\n parse = urlparse.urlparse(\n urlparse.urljoin(\n self.source_url,\n parse.path))\n return urlparse.urlunparse(parse)", "def parse_url(url):\n loc = urlparse(url)\n\n # if the scheme (http, https ...) is not available urlparse wont work\n if loc.scheme == \"\":\n url = \"http://\" + url\n loc = urlparse(url)\n return loc", "def _parsing_url(self, base_url):\n url2 = f\"{self.location}?apikey={self.api_key}&details=true\"\n absolute_url = urljoin(base_url, url2)\n return absolute_url", "def parse_url(url, encoding=None):\n if isinstance(url, ParseResult):\n return url\n return urlparse(to_unicode(url, encoding))", "def parse_url(url):\n newurl = urlparse(url)\n return \"{0}://{1}\".format(newurl.scheme, newurl.netloc)", "def _parseurl(url):\n tracker1=url\n port=int(re.findall(\"[0-9]+\",tracker1)[0])\n host=re.findall(\"[^0-9]+\",tracker1)[0]\n host=host[:-1]\n host=host[6:]\n return host,port", "def test_url_parse():\n host, port, path = parse_url(\"http://localhost:8000/form.html\")\n assert host == \"localhost\"\n assert port == \"8000\"\n assert path == \"/form.html\"", "def func_PARSE(self):\n self.parsed_url = parse.urlparse(\"http://{0}:{1}{2}\".format(args.HTTP_HOST, args.HTTP_PORT, self.path).lower())\n self.parsed_param = parse.parse_qs(self.parsed_url[4])", "def test_urlparse(py2venv):\n up = sux.to_use('urlparse')\n assert {} == up.parse_qs(\"\")\n assert 'http://www.cwi.nl/%7Eguido/FAQ.html' == \\\n up.urljoin('http://www.cwi.nl/%7Eguido/Python.html', 'FAQ.html')", "def parse(url=None):\n\n if url:\n return urlparse(url)\n\n url = os.environ.get(DEFAULT_ENV)\n return urlparse(url)", "def normalize_url(self, url):\n pass", "def _parse_url(url):\r\n if \":\" not in url:\r\n raise ValueError(\"url is invalid\")\r\n\r\n scheme, url = url.split(\":\", 1)\r\n\r\n parsed = urlparse(url, scheme=\"http\")\r\n if parsed.hostname:\r\n hostname = parsed.hostname\r\n else:\r\n raise ValueError(\"hostname is invalid\")\r\n port = 0\r\n if parsed.port:\r\n port = parsed.port\r\n\r\n is_secure = False\r\n if scheme == \"ws\":\r\n if not port:\r\n port = 80\r\n elif scheme == \"wss\":\r\n is_secure = True\r\n if not port:\r\n port = 443\r\n else:\r\n raise ValueError(\"scheme %s is invalid\" % scheme)\r\n\r\n if parsed.path:\r\n resource = parsed.path\r\n else:\r\n resource = \"/\"\r\n\r\n if parsed.query:\r\n resource += \"?\" + parsed.query\r\n\r\n return (hostname, port, resource, is_secure)", "def parse_url(url):\n url = urllib.parse.urlparse(url)\n query = urllib.parse.parse_qs(url.query)\n query_ = query.get('dn', query.get('title', ''))[0]\n if url.scheme == \"magnet\":\n return \"magnet:?xt={}\".format(query['xt'][0]), query_\n return \"http://{}{}{}\".format(*url[0:3]), query_", "def __init_url(self, url):\n scheme, netloc, path, query, fragment = urlparse.urlsplit(url)\n if scheme:\n self.__dict__['__scheme'] = str(scheme)\n self.__dict__['__url'] = urlparse.urlunsplit((scheme, netloc.lower(), path, query, fragment))\n else:\n self.__init_url(str(\"http://\" + url))", "def _override_tourl(self):\n base_url = urlparse.urlparse(self.url)\n try:\n query = base_url.query\n except AttributeError:\n # must be python <2.5\n query = base_url[4]\n query = parse_qs(query)\n for k, v in self.items():\n query.setdefault(k, []).append(v)\n\n try:\n scheme = base_url.scheme\n netloc = base_url.netloc\n path = base_url.path\n params = base_url.params\n fragment = base_url.fragment\n except AttributeError:\n # must be python <2.5\n scheme = base_url[0]\n netloc = base_url[1]\n path = base_url[2]\n params = base_url[3]\n fragment = base_url[5]\n\n url = (scheme, netloc, path, params,\n urllib.urlencode(query, True), fragment)\n return urlparse.urlunparse(url)", "def process_url(url):\n parsed = urlparse(url)\n if parsed.scheme:\n return parsed.netloc, parsed.path\n else:\n host_part = parsed.path\n hostname = host_part.partition(\"/\")[0]\n path = \"/\" + host_part.partition(\"/\")[2]\n return hostname, path", "def parseURL(url):\n\n\n scheme, host, path, params, query, hash = urlparse(url)\n if not path: path = \"/\"\n\n args = parse_qs(query)\n\n escapedArgs = {}\n for name in args:\n if len(args[name]) == 1:\n escapedArgs[unquote(name)] = unquote(args[name][0])\n else:\n escapedArgs[unquote(name)] = escapedSet = []\n for item in args[name]:\n escapedSet.append(unquote(item))\n\n return host, path, params, escapedArgs", "def _parse(self, string):\n modern_scheme = r\"\"\"\nssh://\n(?:\n (?P<user>[^@]+)\n@)? # user is anything but @, then the @ separator\n(?P<host>[^:/]+) # host is anything but : and /\n(:(?P<port>\\d+))? # optional port\n(/(?P<remote_dir>.*))? # optional remote directory\n\"\"\"\n match = re.match(modern_scheme, string, re.VERBOSE)\n if match:\n self._handle_match(match)\n else:\n old_scheme = \"\"\"\n(?P<user>[^@]+) # user is anything but @, and optional\n@ # mandatory @ separator\n(?P<host>[^:/]+) # host is anything but : and /\n(\n (:|/)? # directory separator is either : or /\n (?P<remote_dir>.*))? # remote directory is optional\n \"\"\"\n match = re.match(old_scheme, string, re.VERBOSE)\n if match:\n self._handle_match(match)\n else:\n raise URLParseError(\"\"\" \\\nCould not parse %s as a valid url.\nSupported schemes are\n\n user@host:directory\n\n ssh://user@host:port/directory\n\"\"\" % self.as_string)", "def parse_url(cls, string):\r\n match = cls.URL_RE.match(string)\r\n if not match:\r\n raise InvalidKeyError(cls, string)\r\n return match.groupdict()", "def parse_url(url):\n bits = urlparse.urlsplit(url)\n print bits\n transport = bits[0]\n uphp = bits[1].split('@')\n user = ''\n passwd = ''\n if len(uphp) == 2:\n (user, passwd) = uphp.pop(0).split(':')\n\n hp = uphp[0].split(':')\n host = hp[0]\n if len(hp) == 2:\n port = int(hp[1])\n else:\n # Require subclass to default\n port = 0\n dirname, filename = bits[2].rsplit('/', 1)\n # params = map(lambda x: x.split('='), bits[3].split('&'))\n params = [x.split('=') for x in bits[3].split('&')]\n try:\n params = dict(params)\n except ValueError:\n params = {}\n anchor = bits[4]\n return (transport, user, passwd, host, port, dirname, filename, params, anchor)", "def parsed_url(self):\n if isinstance(self.url_or_urllib_parseresult, urllib.parse.ParseResult):\n parsed_url = self.url_or_urllib_parseresult\n else:\n parsed_url = urllib.parse.urlparse(self.url_or_urllib_parseresult)\n return urllib.parse.ParseResult(\n scheme=parsed_url.scheme,\n netloc=parsed_url.netloc,\n path='', params='', query='', fragment='')", "def sanitize_url(self, url):\r\n if not self.markdown.safeMode:\r\n # Return immediately bipassing parsing.\r\n return url\r\n \r\n try:\r\n scheme, netloc, path, params, query, fragment = url = urlparse(url)\r\n except ValueError:\r\n # Bad url - so bad it couldn't be parsed.\r\n return ''\r\n \r\n locless_schemes = ['', 'mailto', 'news']\r\n if netloc == '' and scheme not in locless_schemes:\r\n # This fails regardless of anything else. \r\n # Return immediately to save additional proccessing\r\n return ''\r\n\r\n for part in url[2:]:\r\n if \":\" in part:\r\n # Not a safe url\r\n return ''\r\n\r\n # Url passes all tests. Return url as-is.\r\n return urlunparse(url)", "def parse_url(url):\n parsed = urlparse(url)\n return {\n \"scheme\": parsed.scheme,\n \"netloc\": parsed.netloc,\n \"path\": parsed.path,\n \"qs\": parse_qs(parsed.query),\n }", "def youtube_fix_url(url):\n p = urlparse.urlparse(url)\n path = p.path\n if '&' in p.path:\n # sign of a malformed path\n path = re.sub('\\&.+', '', p.path)\n return urlparse.urlunparse((p.scheme, p.netloc, path, p.params, p.query, p.fragment))", "def process_url(url):\n # only get url path, remove host,params.\n url = urlparse(url).path\n # url = list(url)\n # for i in range(len(url)):\n # if _is_punctuation(url[i]):\n # url[i] = \" \"\n # url = ''.join(url)\n # url = ' '.join(url.split())\n return url", "def _parse_url(url: str) -> Optional[str]:\n match = re.search(r\"pastecord.com(?:/raw|/documents)?/(\\w+)(?:\\.\\w+)?\", url)\n if match is None:\n return None\n return match.group(1)", "def _parse_url_dict(self, url):\n # TODO: what if URL is an exact attribute in site config and not relative?\n try:\n return (\n url['path'],\n self.SITE_CONFIG.parse_relative_url_dict(url)\n )\n except KeyError as e:\n raise utils.yaml.YAMLKeyError(\n \"Missing required {} key in web page 'url' dictionary\"\n )\n except ValueError as e:\n error_msg = \"Invalid URL 'relative_to' value (relative_to: {}). \".format(url['relative_to'])\n error_msg += 'Must be a valid attribute declared in SiteConfig class'\n raise utils.YAMLValueError(error_msg)", "def clean_url(url):\n scheme, netloc, path, query, fragment = url_parse.urlsplit(url)\n path = url_parse.quote(path)\n url = url_parse.urlunsplit((scheme, netloc, path, query, fragment))\n return url", "def unparse(self):\r\n # only parse the query params if there is an update dict\r\n q = self.query\r\n if self._url_updates or self._query_dict is not None:\r\n q = self._query_dict or self.query_dict\r\n q.update(self._url_updates)\r\n q = query_string(q).lstrip('?')\r\n\r\n # make sure the port is not doubly specified \r\n if self.port and \":\" in self.hostname:\r\n self.hostname = self.hostname.split(':')[0]\r\n\r\n # if there is a netloc, there had better be a scheme\r\n if self.netloc and not self.scheme:\r\n self.scheme = \"http\"\r\n \r\n return urlunparse((self.scheme, self.netloc,\r\n self.path.replace('//', '/'),\r\n self.params, q, self.fragment))", "def parse_url_path(url_path):\r\n\r\n m = re.match('^/([^/]+)/?$',url_path)\r\n if m:\r\n return (m.group(1),None)\r\n \r\n m = re.match('^/([^/]+)/(.+)$',url_path)\r\n if m:\r\n return (m.group(1),m.group(2).replace('%25','%'))\r\n \r\n return (None,None)", "def _get_converted_url(self):\n new_parsed_url = ParseResult(\n scheme=\"https\",\n netloc=self.parsed_url.netloc,\n path=self.parsed_url.path,\n params=self.parsed_url.params,\n query=self.parsed_url.query,\n fragment=self.parsed_url.fragment,\n )\n\n return urlunparse(new_parsed_url)", "def sanitize_url(url: str) -> Optional[str]:\n try:\n parts = urllib.parse.urlparse(url.replace(\" \", \"%20\"))\n scheme, netloc, path, params, query, fragment = parts\n except ValueError:\n # Bad URL - so bad it couldn't be parsed.\n return \"\"\n\n # If there is no scheme or netloc and there is a '@' in the path,\n # treat it as a mailto: and set the appropriate scheme\n if scheme == \"\" and netloc == \"\" and \"@\" in path:\n scheme = \"mailto\"\n elif scheme == \"\" and netloc == \"\" and len(path) > 0 and path[0] == \"/\":\n # Allow domain-relative links\n return urllib.parse.urlunparse((\"\", \"\", path, params, query, fragment))\n elif (scheme, netloc, path, params, query) == (\"\", \"\", \"\", \"\", \"\") and len(fragment) > 0:\n # Allow fragment links\n return urllib.parse.urlunparse((\"\", \"\", \"\", \"\", \"\", fragment))\n\n # Zulip modification: If scheme is not specified, assume http://\n # We re-enter sanitize_url because netloc etc. need to be re-parsed.\n if not scheme:\n return sanitize_url(\"http://\" + url)\n\n # Upstream code will accept a URL like javascript://foo because it\n # appears to have a netloc. Additionally there are plenty of other\n # schemes that do weird things like launch external programs. To be\n # on the safe side, we allow a fixed set of schemes.\n if scheme not in allowed_schemes:\n return None\n\n # Upstream code scans path, parameters, and query for colon characters\n # because\n #\n # some aliases [for javascript:] will appear to urllib.parse to have\n # no scheme. On top of that relative links (i.e.: \"foo/bar.html\")\n # have no scheme.\n #\n # We already converted an empty scheme to http:// above, so we skip\n # the colon check, which would also forbid a lot of legitimate URLs.\n\n # URL passes all tests. Return URL as-is.\n return urllib.parse.urlunparse((scheme, netloc, path, params, query, fragment))", "def _split_url(self, url):\n url_split = urlsplit(url)\n try:\n if url_split.netloc is not None and url_split.netloc.find(\" \") > 0:\n return None\n decoded_netloc = url_split.netloc.decode(\"utf-8\").encode(\"idna\")\n url_parts = (\n url_split.scheme,\n decoded_netloc,\n url_split.path,\n url_split.query,\n url_split.fragment)\n url_splitted = urlunsplit(url_parts)\n return url_splitted\n except UnicodeError:\n return None", "def url_validator_callback(url: str) -> str:\n if url is None:\n return url\n\n url = url.strip()\n try:\n result = urlparse(url)\n if result.scheme and result.netloc:\n return url\n except:\n pass\n raise typer.BadParameter(\"Please supply a valid url\")", "def parse_url(url):\n results = NotifyBase.parse_url(url)\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n try:\n # Retrieve our secret_key from the first entry in the url path\n results['secret_key'] = \\\n NotifyPushjet.split_path(results['fullpath'])[0]\n\n except IndexError:\n # no secret key specified\n results['secret_key'] = None\n\n # Allow over-riding the secret by specifying it as an argument\n # this allows people who have http-auth infront to login\n # through it in addition to supporting the secret key\n if 'secret' in results['qsd'] and len(results['qsd']['secret']):\n results['secret_key'] = \\\n NotifyPushjet.unquote(results['qsd']['secret'])\n\n return results", "def _get_url(self, absolute):", "def parse_url(url):\n # This is a dirty hack; but it's the only work around to tgram://\n # messages since the bot_token has a colon in it. It invalidates a\n # normal URL.\n\n # This hack searches for this bogus URL and corrects it so we can\n # properly load it further down. The other alternative is to ask users\n # to actually change the colon into a slash (which will work too), but\n # it's more likely to cause confusion... So this is the next best thing\n # we also check for %3A (incase the URL is encoded) as %3A == :\n try:\n tgram = re.match(\n r'(?P<protocol>{schema}://)(bot)?(?P<prefix>([a-z0-9_-]+)'\n r'(:[a-z0-9_-]+)?@)?(?P<btoken_a>[0-9]+)(:|%3A)+'\n r'(?P<remaining>.*)$'.format(\n schema=NotifyTelegram.secure_protocol), url, re.I)\n\n except (TypeError, AttributeError):\n # url is bad; force tgram to be None\n tgram = None\n\n if not tgram:\n # Content is simply not parseable\n return None\n\n if tgram.group('prefix'):\n # Try again\n results = NotifyBase.parse_url('%s%s%s/%s' % (\n tgram.group('protocol'),\n tgram.group('prefix'),\n tgram.group('btoken_a'),\n tgram.group('remaining')), verify_host=False)\n\n else:\n # Try again\n results = NotifyBase.parse_url('%s%s/%s' % (\n tgram.group('protocol'),\n tgram.group('btoken_a'),\n tgram.group('remaining')), verify_host=False)\n\n # The first token is stored in the hostname\n bot_token_a = NotifyTelegram.unquote(results['host'])\n\n # Get a nice unquoted list of path entries\n entries = NotifyTelegram.split_path(results['fullpath'])\n\n # Now fetch the remaining tokens\n bot_token_b = entries.pop(0)\n\n bot_token = '%s:%s' % (bot_token_a, bot_token_b)\n\n # Store our chat ids (as these are the remaining entries)\n results['targets'] = entries\n\n # content to be displayed 'before' or 'after' attachments\n if 'content' in results['qsd'] and len(results['qsd']['content']):\n results['content'] = results['qsd']['content']\n\n # Support the 'to' variable so that we can support rooms this way too\n # The 'to' makes it easier to use yaml configuration\n if 'to' in results['qsd'] and len(results['qsd']['to']):\n results['targets'] += \\\n NotifyTelegram.parse_list(results['qsd']['to'])\n\n # Store our bot token\n results['bot_token'] = bot_token\n\n # Support Thread Topic\n if 'topic' in results['qsd'] and len(results['qsd']['topic']):\n results['topic'] = results['qsd']['topic']\n\n # Silent (Sends the message Silently); users will receive\n # notification with no sound.\n results['silent'] = \\\n parse_bool(results['qsd'].get('silent', False))\n\n # Show Web Page Preview\n results['preview'] = \\\n parse_bool(results['qsd'].get('preview', False))\n\n # Include images with our message\n results['include_image'] = \\\n parse_bool(results['qsd'].get('image', False))\n\n # Include images with our message\n results['detect_owner'] = \\\n parse_bool(results['qsd'].get('detect', True))\n\n return results", "def normalize_url(url, unsplit=True, sort_query=True, strip_authentication=True,\n strip_trailing_slash=True, strip_index=True, strip_protocol=True,\n strip_irrelevant_subdomains=True, strip_lang_subdomains=False, strip_lang_query_items=False,\n strip_fragment='except-routing', normalize_amp=True, fix_common_mistakes=True,\n infer_redirection=True, quoted=True):\n original_url_arg = url\n\n if infer_redirection:\n url = resolve(url)\n\n if isinstance(url, SplitResult):\n has_protocol = bool(splitted.scheme)\n splitted = url\n else:\n has_protocol = PROTOCOL_RE.match(url)\n\n # Ensuring scheme so parsing works correctly\n if not has_protocol:\n url = 'http://' + url\n\n # Parsing\n try:\n splitted = urlsplit(url)\n except ValueError:\n return original_url_arg\n\n scheme, netloc, path, query, fragment = splitted\n\n # Fixing common mistakes\n if fix_common_mistakes:\n if query:\n query = re.sub(MISTAKES_RE, '&', query)\n\n # Handling punycode\n netloc = decode_punycode(netloc)\n\n # Dropping :80 & :443\n if netloc.endswith(':80'):\n netloc = netloc[:-3]\n elif netloc.endswith(':443'):\n netloc = netloc[:-4]\n\n # Normalizing the path\n if path:\n trailing_slash = False\n if path.endswith('/') and len(path) > 1:\n trailing_slash = True\n path = normpath(path)\n if trailing_slash and not strip_trailing_slash:\n path = path + '/'\n\n # Handling Google AMP suffixes\n if normalize_amp:\n path = AMP_SUFFIXES_RE.sub('', path)\n\n # Dropping index:\n if strip_index:\n segments = path.rsplit('/', 1)\n\n if len(segments) != 0:\n last_segment = segments[-1]\n filename, ext = splitext(last_segment)\n\n if filename == 'index':\n segments.pop()\n path = '/'.join(segments)\n\n # Dropping irrelevant query items\n if query:\n domain_filter = None\n\n if splitted.hostname:\n domain_filter = next(\n (f for d, f in PER_DOMAIN_QUERY_FILTERS if splitted.hostname.endswith(d)),\n None\n )\n\n qsl = parse_qsl(query, keep_blank_values=True)\n qsl = [\n stringify_qs(item)\n for item in qsl\n if not should_strip_query_item(\n item,\n normalize_amp=normalize_amp,\n strip_lang_query_items=strip_lang_query_items,\n domain_filter=domain_filter\n )\n ]\n\n if sort_query:\n qsl = sorted(qsl)\n\n query = '&'.join(qsl)\n\n # Dropping fragment if it's not routing\n if fragment and strip_fragment:\n if strip_fragment is True or not should_strip_fragment(fragment):\n fragment = ''\n\n # Always dropping trailing slash with empty query & fragment\n if path == '/' and not fragment and not query:\n path = ''\n\n # Dropping irrelevant subdomains\n if strip_irrelevant_subdomains:\n netloc = re.sub(\n IRRELEVANT_SUBDOMAIN_AMP_RE if normalize_amp else IRRELEVANT_SUBDOMAIN_RE,\n '',\n netloc\n )\n\n # Dropping language as subdomains\n if strip_lang_subdomains:\n netloc = strip_lang_subdomains_from_netloc(netloc)\n\n # Dropping scheme\n if strip_protocol or not has_protocol:\n scheme = ''\n\n # Dropping authentication\n if strip_authentication:\n netloc = netloc.split('@', 1)[-1]\n\n # Normalizing AMP subdomains\n if normalize_amp and netloc.startswith('amp-'):\n netloc = netloc[4:]\n\n # Dropping trailing slash\n if strip_trailing_slash and path.endswith('/'):\n path = path.rstrip('/')\n\n # Quoting or not\n if quoted:\n path = quote(path)\n query = quote(query, RESERVED_CHARACTERS)\n fragment = quote(fragment, SAFE_CHARACTERS)\n else:\n path = unquote(path)\n query = unquote(query)\n fragment = unquote(fragment)\n\n # Result\n result = SplitResult(\n scheme,\n netloc.lower(),\n path,\n query,\n fragment\n )\n\n if not unsplit:\n return result\n\n # TODO: check if works with `unsplit=False`\n if strip_protocol or not has_protocol:\n result = urlunsplit(result)[2:]\n else:\n result = urlunsplit(result)\n\n return result", "def _parse_url_token(url_token):\n match = re.fullmatch(\n '^([0-9A-Za-z_\\-]+)/([0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})$',\n url_token\n )\n if match:\n return match.group(1), match.group(2)\n return None, None", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def preprocess_url(self, request_url: str):\n if re.fullmatch('.*/+', request_url) is None:\n return request_url\n else:\n return re.compile(request_url.rstrip('/') + '/+')", "def parse_url(url):\n if url.startswith(URL_SCHEME) and len(url) > len(URL_SCHEME):\n bucket_and_path = url.rstrip('/')[len(URL_SCHEME):].split('/', 1)\n if len(bucket_and_path) == 1:\n bucket_and_path.append('')\n return bucket_and_path\n return (None, None)", "def fix_url(cls, url: str):\r\n ...", "def get_normalized_url(url):\r\n scheme, netloc, path, params, query, fragment = urlparse(url)\r\n\r\n # Exclude default port numbers.\r\n if scheme == 'http' and netloc[-3:] == ':80':\r\n netloc = netloc[:-3]\r\n elif scheme == 'https' and netloc[-4:] == ':443':\r\n netloc = netloc[:-4]\r\n if scheme not in ('http', 'https'):\r\n raise ValueError(\"Unsupported URL %s (%s).\" % (url, scheme))\r\n\r\n # Normalized URL excludes params, query, and fragment.\r\n return urlunparse((scheme, netloc, path, None, None, None))", "def normalize_url(url: str) -> str:\n parts = urlparse(url)\n\n path = quote(parts.path)\n while '//' in path:\n path = path.replace(\"//\", \"/\")\n\n return urlunparse(parts._replace(path=path))", "def __init__(self, url):\n self._url = urlparse(url)\n #self.validate()", "def domain_parse(url):\n url = url.lower()\n if not url.startswith('http://') and not url.startswith('https://'):\n url = '{schema}{host}'.format(schema='http://', host=url)\n url = urlparse(url)\n if not url.hostname:\n raise ValueError('Invalid domain provided')\n\n # Strip www prefix any additional URL data\n url = urlparse('{scheme}://{host}'.format(scheme=url.scheme, host=url.hostname.lstrip('www.')))\n return url", "def parse_url(url):\n scheme, host, port, user, passwd, path, vhost, qs, qs_dict = _parse_url(url)\n return dict(scheme=scheme, hostname=host, port=port, username=user,\n password=passwd, path=path, virtual_host=vhost,\n query=qs, **qs_dict)", "def validate(self, value: str | None, path: list[str], **kwargs: Any) -> str | None:\n value = super().validate(value, path, **kwargs)\n\n if value == \"\" and self.blank:\n return value\n\n if value is None and self.default_is_none:\n return value\n\n try:\n result = urlparse(value)\n except ValueError:\n raise ValidationError(self, path, value, \"url expected\")\n\n if not result.scheme:\n raise ValidationError(self, path, value, \"no url scheme specified\")\n\n if not result.netloc:\n raise ValidationError(self, path, value, \"no url netloc specified\")\n\n if self.schemes and result.scheme not in self.schemes:\n raise ValidationError(\n self, path, value, f\"invalid url scheme: {result.scheme}\"\n )\n\n return value", "def parsing(url):\n\n url = urlparse(url).netloc\n a = url.split('.')\n if len(a) >= 3:\n a = a[:-(len(a) - 1)]\n else:\n a = a[:-1]\n x = ('.'.join(a))\n return x", "def parse_service_url(url: str) -> Tuple[str, str, str]:\n pieces = urlparse(url)\n user = pieces.username\n password = pieces.password\n netloc = pieces.hostname\n if pieces.port is not None:\n netloc += f\":{pieces.port}\"\n url = urlunparse((\n pieces.scheme, netloc, pieces.path, None, None, None))\n return url, user, password", "def make_safe_url(self, url):\n\n # Split the URL into scheme, netloc, path, query and fragment\n parts = list(urlsplit(url))\n\n # Clear scheme and netloc and rebuild URL\n parts[0] = '' # Empty scheme\n parts[1] = '' # Empty netloc (hostname:port)\n safe_url = urlunsplit(parts)\n return safe_url", "def test_parse_url_path() -> None:\n assert indieauth._parse_url(\"http://ex.com\").path == \"/\"", "def __init__(self, url: str):\n self.original_url = url\n self._parsed_url = urlparse(url)\n self.path = self._parsed_url.path.rstrip(\"/\")\n self.dirname, self.basename = os.path.split(self.path)\n self.basename_without_extension, self.extension = os.path.splitext(\n self.basename\n )\n self.hostname = self._parsed_url.hostname\n self.netloc = self._parsed_url.netloc\n self.scheme = self._parsed_url.scheme", "def process_url(url: str) -> str:\n split_url = urlsplit(url.strip())\n if split_url.scheme == 'amqp+ssl':\n split_url = split_url._replace(scheme='amqps')\n\n if ((not split_url.username or not split_url.password) and\n 'username' in config and 'password' in config):\n user_pass = f\"{config['username']}:{config['password']}@\"\n new_netloc = user_pass + split_url.netloc\n split_url = split_url._replace(netloc=new_netloc)\n\n return urlunsplit(split_url)", "def parse_url(url, port = 80):\n scheme = url[0:url.find(\"://\")]\n if scheme not in (\\\n 'file', 'ftp', 'gopher', 'hd1', 'http', 'https', \\\n 'imap', 'mailto', 'mms', \\\n 'news', 'nntp', 'prospero', 'rsync', 'rtsp', 'rtspu', \\\n 'sftp', 'shttp', \\\n 'sip', 'sips', 'snews', 'svn', 'svn+ssh', \\\n 'telnet', 'wais'):\n no_scheme = True\n url = url.replace(scheme, 'http', 1)\n else:\n no_scheme = False\n u = urlparse.urlparse(url)\n hasuser = u.netloc.find('@')\n d = {\n 'scheme' : (scheme if no_scheme else u.scheme),\n 'path' : u.path,\n 'query' : u.query,\n 'fragment' : u.fragment,\n 'user' : (u.username if u.username != None else ''),\n 'pass' : (u.password if u.password != None else ''),\n 'port' : (u.port if u.port != None else port),\n 'host' : u.netloc[((hasuser + 1) if (hasuser >= 0) else 0):]\n }\n return d", "def _urlparse_splitquery(url):\r\n\r\n qpart = url.split(\"?\", 1)\r\n if len(qpart) == 2:\r\n query = qpart[1]\r\n else:\r\n query = \"\"\r\n\r\n return qpart[0], query", "def _urlparse_splitscheme(url):\r\n # The scheme is valid only if it contains these characters.\r\n scheme_chars = \\\r\n \"abcdefghijklmnopqrstuvwxyz0123456789+-.\"\r\n\r\n scheme = \"\"\r\n rest = url\r\n\r\n spart = url.split(\":\", 1)\r\n if len(spart) == 2:\r\n\r\n # Normalize the scheme.\r\n spart[0] = spart[0].lower()\r\n\r\n # A scheme is valid only if it starts with an alpha character.\r\n if spart[0] and spart[0][0].isalpha():\r\n for char in spart[0]:\r\n if char not in scheme_chars:\r\n break\r\n (scheme, rest) = spart\r\n\r\n return scheme, rest", "def _split_url(url):\n return url[1:].split('/')", "def parse(self, url):\n pass", "def parse_url(url):\n (scheme, netloc, path, params, query, frag) = urlparse(url)\n\n # We only support web services\n if not scheme in ('http', 'https'):\n raise InvalidUrl('Scheme must be one of http or https')\n\n is_ssl = scheme == 'https' and True or False\n\n # Verify hostnames are valid and parse a port spec (if any)\n match = re.match('([a-zA-Z0-9\\-\\.]+):?([0-9]{2,5})?', netloc)\n\n if match:\n (host, port) = match.groups()\n if not port:\n port = is_ssl and '443' or '80'\n else:\n raise InvalidUrl('Invalid host and/or port: %s' % netloc)\n\n return (host, int(port), path.strip('/'), is_ssl)", "def validate_url(url: str):\n try:\n return urlparse(url)\n except KeyboardInterrupt:\n return None", "def test_parse_url_lowercase_host() -> None:\n assert indieauth._parse_url(\"http://ex.com/hello\").path == \"/hello\"\n assert indieauth._parse_url(\"http://EX.COM/hello\").hostname == \"ex.com\"\n\n parts = indieauth._parse_url(\"http://EX.COM:123/HELLO\")\n assert parts.netloc == \"ex.com:123\"\n assert parts.path == \"/HELLO\"", "def normalize_url(url):\n if not url.startswith((\"git+\", \"hg+\")):\n return url\n return url[4:]", "def url_split(url):\n scheme, netloc = urllib.splittype(url)\n host, document = urllib.splithost(netloc)\n port = default_ports.get(scheme, 0)\n if host:\n host = host.lower()\n host, port = splitport(host, port=port)\n return scheme, host, port, document", "def _proper_url(self, url):\n if self.base_url not in url:\n url = self.base_url + url\n url = re.sub(r'(?<!https:)//', '/', url)\n if not url.endswith('/') and '?' not in url:\n url = url + '/'\n if url.endswith('?'):\n url = url[:-1]\n return url", "def _parse_url(repo_url: str) -> List[str]:\n try:\n return re.findall(r\"github\\.com/([^/]+)/([^\\/?]+)\", repo_url, re.I)[0]\n except IndexError:\n raise AnalyzerError(\"Incorrect repository URL\")", "def normalize_url(node):\n if not node:\n node = DEFAULT_NODE\n elif '://' not in node:\n node = '//{}'.format(node)\n parts = urlparse(node, scheme='http', allow_fragments=False)\n port = parts.port if parts.port else _get_default_port(parts.scheme)\n netloc = '{}:{}'.format(parts.hostname, port)\n return urlunparse((parts.scheme, netloc, parts.path, '', '', ''))", "def normalize(cls, url):\n # Always ignore the fragment\n scheme, netloc, path, query, _ = urlsplit(url)\n uri_relative = (None, None, path, query, None)\n uri_without_query = (scheme, netloc, path, None, None)\n uri_relative_without_query = (None, None, path, None, None)\n urls = [url]\n if query:\n urls.append(urlunsplit(uri_without_query))\n urls.append('~' + urlunsplit(uri_relative))\n if query:\n urls.append('~' + urlunsplit(uri_relative_without_query))\n return urls", "def _parse_host(host: str) -> str:\n urlparse_host = urlsplit(host).hostname\n if urlparse_host:\n # In this case, host = https://xx.cloud.databricks.com\n return urlparse_host\n else:\n # In this case, host = xx.cloud.databricks.com\n return host", "def scrub_url(self, url):\n return self.__url_scrubber(url)", "def make_clean_url(url):\n return urlparse.urldefrag(url)[0]", "def is_url(val):\n res = urlparse(val)\n return bool(res.scheme and res.netloc and res.params == \"\")", "def parse_url(url):\n results = NotifyBase.parse_url(url)\n\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n # Apply our settings now\n devices = NotifyBase.unquote(results['fullpath'])\n\n # Store our devices\n results['devices'] = '%s/%s' % (results['host'], devices)\n\n return results", "def normalize_url(url):\n # print(url)\n if not url.startswith('http://') and not url.startswith('https://'):\n return 'https://{}/{}'.format(zone_name, url.replace('//', '/'))\n return url", "def _format(self, parent: URL, tag: {}):\n\n url = tag.get('href', None)\n if url is None:\n return None\n\n parsed = urlparse(url)\n if parsed.netloc == '':\n parsed = parsed._replace(scheme=parent.scheme)\n parsed = parsed._replace(netloc=parent.host)\n\n return parsed.geturl() if self._is_valid(parsed) else None", "def clean_url(url):\n o = urlsplit(url)\n return \"{scheme}://{netloc}{path}\".format(\n scheme=o[0], netloc=o[1], path=o[2],\n )", "def _clean_url(self, url):\n return \"\".join(url.split(\"?\")[:1])", "def extract_url(log_entry):\n pattern = r\"https?://(?:[-\\w.]|(?:%[\\da-fA-F]{2}))+\"\n url = re.findall(pattern, log_entry)[0]\n\n if not url:\n raise URINotFound(log_entry)\n\n return urlparse(url)", "def parser(self):\n if 'Url' in self.options:\n self.options['u'] = self.options['Url']", "def hostify_url(url):\n\tif url[0] == '/':\n\t\treturn HOST + url\n\telse:\n\t\treturn url", "def prettify_url(url):\n\n if not isinstance(url, urllib.parse.ParseResult):\n url = urllib.parse.urlparse(url)\n urlstr = url.hostname + url.path\n return urlstr", "def clean_url_part(self):\n complete_url = \"{url_prefix}{url_part}\".format(\n url_prefix=self.URL_PREFIX,\n url_part=self.cleaned_data['url_part']\n )\n URLValidator(complete_url)\n self.cleaned_data['repo_url'] = complete_url\n return self.cleaned_data['url_part']", "def parse_url(url):\n split_url = url.split(\"&\")\n url_elem = {}\n for p in split_url:\n if \"http\" in p:\n website_split = p.split(\"?\")\n url_elem[\"name\"] = website_split[0]\n split_second_part = website_split[1].split(\"=\")\n url_elem[split_second_part[0]] = split_second_part[1]\n elif \"setup\" in p or \"scramble\" in p:\n url_elem[\"scramble\"] =p.split(\"=\")[1].replace(\"-\", \"'\").replace(\"_\", \" \")\n else:\n url_elem[p.split(\"=\")[0]] = p.split(\"=\")[1]\n before = ('-', '_', '%0A', '%5B', '%5D', '%2F', '%2C','%3A')\n after = (\"'\", \" \", \"\\r\\n\", \"[\", \"]\", \"/\", \",\", \":\")\n for i in range (len(before)):\n url_elem[\"alg\"] = url_elem[\"alg\"].replace(before[i], after[i])\n return url_elem", "def url_unsplit(parts):\n if parts[2] == default_ports.get(parts[0]):\n return \"%s://%s%s\" % (parts[0], parts[1], parts[3])\n return \"%s://%s:%d%s\" % parts", "def urlunsplit(urlparts):\n res = urlparse.urlunsplit(urlparts)\n if os.name == 'nt' and urlparts[0] == 'file' and '|' not in urlparts[2]:\n # UNC paths must have 4 slashes: 'file:////server/path'\n # Depending on the path in urlparts[2], urlparse.urlunsplit()\n # left only two or three slashes. This is fixed below\n repl = 'file://' if urlparts[2].startswith('//') else 'file:/'\n res = res.replace('file:', repl)\n return res", "def _urlparse_splitnetloc(url, start=0):\r\n\r\n # By default, the netloc is delimited by the end of the URL.\r\n delim = len(url)\r\n\r\n # Find the left-most delimiter.\r\n for char in \"/?#\":\r\n xdelim = url.find(char, start)\r\n if xdelim >= 0:\r\n delim = min(delim, xdelim)\r\n\r\n # Return the netloc and the rest of the URL.\r\n return url[start:delim], url[delim:]", "def test_path(self):\n urlpath = url.URL.fromString(\"http://example.com/foo/bar?baz=quux#foobar\")\n self.assertEqual(urlpath.path, \"foo/bar\")\n urlpath = url.URL.fromString(\"http://example.com/foo%2Fbar?baz=quux#foobar\")\n self.assertEqual(urlpath.path, \"foo%2Fbar\")\n urlpath = url.URL.fromString(\"http://example.com/-_.!*'()?baz=quux#foo\")\n self.assertEqual(urlpath.path, \"-_.!*'()\")", "def url_fix(s, charset='utf-8'):\n if isinstance(s, unicode):\n s = s.encode(charset, 'ignore')\n scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)\n path = urllib.quote(path, '/%')\n qs = urllib.quote_plus(qs, ':&=')\n return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))", "def _make_url(self):\n ..." ]
[ "0.8100496", "0.74347377", "0.7177285", "0.7094402", "0.69739527", "0.69297016", "0.6924397", "0.68131274", "0.67186797", "0.6689108", "0.66737133", "0.6653376", "0.6621237", "0.6607721", "0.6598066", "0.65887487", "0.652699", "0.65201724", "0.65014964", "0.64695275", "0.6461121", "0.64608896", "0.64400727", "0.64380366", "0.64346296", "0.6416772", "0.6408502", "0.64003235", "0.6312292", "0.6304868", "0.6300713", "0.629775", "0.62916327", "0.6284823", "0.62818784", "0.62545544", "0.625024", "0.6248833", "0.6242785", "0.62386864", "0.620414", "0.61886007", "0.6180515", "0.6179233", "0.61702555", "0.61702555", "0.61702555", "0.61702555", "0.61702555", "0.61702555", "0.61702555", "0.6123481", "0.6123212", "0.6101832", "0.6092678", "0.6088073", "0.6076732", "0.6075365", "0.60731405", "0.60577196", "0.60302323", "0.6026035", "0.6023173", "0.6009496", "0.5995359", "0.59802544", "0.5979666", "0.5949509", "0.594081", "0.59223485", "0.59140015", "0.59117985", "0.5899351", "0.5894066", "0.5886267", "0.5878499", "0.58779556", "0.58715755", "0.5871572", "0.5864149", "0.58572376", "0.58463484", "0.584383", "0.58394855", "0.58380985", "0.58305484", "0.5828623", "0.5822477", "0.58186233", "0.58146375", "0.58066434", "0.58012086", "0.57972", "0.57943463", "0.57927626", "0.5784606", "0.57824636", "0.5779577", "0.5776092", "0.5771562" ]
0.7226152
2
Given a ``url`` returns, a dict of its constituent parts. Based on Kombu's ``kombu.utils.url``.
def parse_url(url): scheme, host, port, user, passwd, path, vhost, qs, qs_dict = _parse_url(url) return dict(scheme=scheme, hostname=host, port=port, username=user, password=passwd, path=path, virtual_host=vhost, query=qs, **qs_dict)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_url(url):\n parsed = urlparse(url)\n return {\n \"scheme\": parsed.scheme,\n \"netloc\": parsed.netloc,\n \"path\": parsed.path,\n \"qs\": parse_qs(parsed.query),\n }", "def parse_url(url):\n results = NotifyBase.parse_url(url)\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n try:\n # Retrieve our secret_key from the first entry in the url path\n results['secret_key'] = \\\n NotifyPushjet.split_path(results['fullpath'])[0]\n\n except IndexError:\n # no secret key specified\n results['secret_key'] = None\n\n # Allow over-riding the secret by specifying it as an argument\n # this allows people who have http-auth infront to login\n # through it in addition to supporting the secret key\n if 'secret' in results['qsd'] and len(results['qsd']['secret']):\n results['secret_key'] = \\\n NotifyPushjet.unquote(results['qsd']['secret'])\n\n return results", "def lookup(self, url):\n return {'url': url}", "def _parse_url(self, url):\n url_prefix = self.URL_PREFIX\n assert(url[:len(url_prefix)] == url_prefix)\n key, file_attrs = url[len(url_prefix):].split('/', 1)\n file_, attrs = parse_url_opts(file_attrs)\n return key, file_, attrs", "def _parse_url_dict(self, url):\n # TODO: what if URL is an exact attribute in site config and not relative?\n try:\n return (\n url['path'],\n self.SITE_CONFIG.parse_relative_url_dict(url)\n )\n except KeyError as e:\n raise utils.yaml.YAMLKeyError(\n \"Missing required {} key in web page 'url' dictionary\"\n )\n except ValueError as e:\n error_msg = \"Invalid URL 'relative_to' value (relative_to: {}). \".format(url['relative_to'])\n error_msg += 'Must be a valid attribute declared in SiteConfig class'\n raise utils.YAMLValueError(error_msg)", "def _split_url(url):\n return url[1:].split('/')", "def _parse_url(url):\n parts = urlparse(url)\n scheme = parts.scheme\n port = parts.port or None\n hostname = parts.hostname\n path = parts.path or ''\n virtual_host = path[1:] if path and path[0] == '/' else path\n return (scheme, unquote(hostname or '') or None, port,\n unquote(parts.username or '') or None,\n unquote(parts.password or '') or None,\n unquote(path or '') or None,\n unquote(virtual_host or '') or None,\n unquote(parts.query or '') or None,\n dict(dict(parse_qsl(parts.query))))", "def parse_url(cls, string):\r\n match = cls.URL_RE.match(string)\r\n if not match:\r\n raise InvalidKeyError(cls, string)\r\n return match.groupdict()", "def get_query_dict(url):\r\n parse = urlparse.urlparse(url)\r\n query_dict = urlparse.parse_qs(parse.query) if parse.query else {}\r\n query_dict = dict (\r\n (k, v.pop()) if v else (k, v) for k, v in query_dict.iteritems()\r\n )\r\n return query_dict", "def parse_url(url):\n results = NotifyBase.parse_url(url)\n\n if not results:\n # We're done early as we couldn't load the results\n return results\n\n # Apply our settings now\n devices = NotifyBase.unquote(results['fullpath'])\n\n # Store our devices\n results['devices'] = '%s/%s' % (results['host'], devices)\n\n return results", "def process_url(url):\n parsed = urlparse(url)\n if parsed.scheme:\n return parsed.netloc, parsed.path\n else:\n host_part = parsed.path\n hostname = host_part.partition(\"/\")[0]\n path = \"/\" + host_part.partition(\"/\")[2]\n return hostname, path", "def get_info_of_url(url):\n pass", "def process_url(url):\n # only get url path, remove host,params.\n url = urlparse(url).path\n # url = list(url)\n # for i in range(len(url)):\n # if _is_punctuation(url[i]):\n # url[i] = \" \"\n # url = ''.join(url)\n # url = ' '.join(url.split())\n return url", "def parseURL(url):\n\n\n scheme, host, path, params, query, hash = urlparse(url)\n if not path: path = \"/\"\n\n args = parse_qs(query)\n\n escapedArgs = {}\n for name in args:\n if len(args[name]) == 1:\n escapedArgs[unquote(name)] = unquote(args[name][0])\n else:\n escapedArgs[unquote(name)] = escapedSet = []\n for item in args[name]:\n escapedSet.append(unquote(item))\n\n return host, path, params, escapedArgs", "def _extract_spreadsheet_key_from_url(url):\r\n result = url\r\n\r\n if 'key=' in url:\r\n result = url.split('key=')[-1].split('#')[0].split('&')[0]\r\n\r\n return result", "def parse_s3_url(url):\n parsed_url = urlparse(url)\n if parsed_url.scheme != \"s3\":\n raise ValueError(\"S3 URLs must start with 's3://'\")\n\n bucket = parsed_url.netloc.split(\".\")[0]\n key = parsed_url.path.lstrip(\"/\")\n\n return {\"bucket\": bucket, \"key\": key}", "def _parse(url):\n url = url.strip()\n parsed = urlparse(url)\n return _parsed_url_args(parsed)", "def parse_s3_url(url):\n result = urlparse.urlparse(url)\n return result.netloc, result.path[1:] # strip leading slash", "def __ParseUrl(url):\n return urlparse(url)", "def urlparse(url):\n\tunquote_url=urllib.parse.unquote(url)\n\treturn unquote_url", "def get_params(url=None):\r\n dict = {}\r\n if not url:\r\n url = sys.argv[2]\r\n pairs = url.lstrip(\"?\").split(\"&\")\r\n for pair in pairs:\r\n if len(pair) < 3:\r\n continue\r\n kv = pair.split(\"=\", 1)\r\n k = kv[0]\r\n v = urllib.parse.unquote_plus(kv[1])\r\n dict[k] = v\r\n return dict", "def parse_url(url):\n if url.startswith(URL_SCHEME) and len(url) > len(URL_SCHEME):\n bucket_and_path = url.rstrip('/')[len(URL_SCHEME):].split('/', 1)\n if len(bucket_and_path) == 1:\n bucket_and_path.append('')\n return bucket_and_path\n return (None, None)", "def parse_url(url):\n split_url = url.split(\"&\")\n url_elem = {}\n for p in split_url:\n if \"http\" in p:\n website_split = p.split(\"?\")\n url_elem[\"name\"] = website_split[0]\n split_second_part = website_split[1].split(\"=\")\n url_elem[split_second_part[0]] = split_second_part[1]\n elif \"setup\" in p or \"scramble\" in p:\n url_elem[\"scramble\"] =p.split(\"=\")[1].replace(\"-\", \"'\").replace(\"_\", \" \")\n else:\n url_elem[p.split(\"=\")[0]] = p.split(\"=\")[1]\n before = ('-', '_', '%0A', '%5B', '%5D', '%2F', '%2C','%3A')\n after = (\"'\", \" \", \"\\r\\n\", \"[\", \"]\", \"/\", \",\", \":\")\n for i in range (len(before)):\n url_elem[\"alg\"] = url_elem[\"alg\"].replace(before[i], after[i])\n return url_elem", "def extract_info_from_url(url):\n search = re.search(r\"^https://huggingface\\.co/(.*)/resolve/([^/]*)/(.*)$\", url)\n if search is None:\n return None\n repo, revision, filename = search.groups()\n cache_repo = \"--\".join([\"models\"] + repo.split(\"/\"))\n return {\"repo\": cache_repo, \"revision\": revision, \"filename\": filename}", "def _split_url(self, url):\n url_split = urlsplit(url)\n try:\n if url_split.netloc is not None and url_split.netloc.find(\" \") > 0:\n return None\n decoded_netloc = url_split.netloc.decode(\"utf-8\").encode(\"idna\")\n url_parts = (\n url_split.scheme,\n decoded_netloc,\n url_split.path,\n url_split.query,\n url_split.fragment)\n url_splitted = urlunsplit(url_parts)\n return url_splitted\n except UnicodeError:\n return None", "def url_split(url):\n scheme, netloc = urllib.splittype(url)\n host, document = urllib.splithost(netloc)\n port = default_ports.get(scheme, 0)\n if host:\n host = host.lower()\n host, port = splitport(host, port=port)\n return scheme, host, port, document", "def find_params_in_url(url):\n\n # self.logger.debug('Find params in url: %s', url)\n\n rid = None\n wid = None\n wname = None\n\n url_parsed = urlparse(url)\n query = parse_qs(url_parsed.query)\n if 'w' in query:\n wid = query['w'][0]\n if query['w'][0] in G_VV:\n wname = G_VV[query['w'][0]]\n\n if 'rid' in query:\n rid = query['rid'][0]\n\n return {'rid': rid, 'w': wname, 'wid': wid}", "def parse_url(url):\n bits = urlparse.urlsplit(url)\n print bits\n transport = bits[0]\n uphp = bits[1].split('@')\n user = ''\n passwd = ''\n if len(uphp) == 2:\n (user, passwd) = uphp.pop(0).split(':')\n\n hp = uphp[0].split(':')\n host = hp[0]\n if len(hp) == 2:\n port = int(hp[1])\n else:\n # Require subclass to default\n port = 0\n dirname, filename = bits[2].rsplit('/', 1)\n # params = map(lambda x: x.split('='), bits[3].split('&'))\n params = [x.split('=') for x in bits[3].split('&')]\n try:\n params = dict(params)\n except ValueError:\n params = {}\n anchor = bits[4]\n return (transport, user, passwd, host, port, dirname, filename, params, anchor)", "def split_url_and_query_params(url):\n scheme, netloc, path, query_string, fragment = urlsplit(url)\n query_params = parse_qs(query_string)\n url = urlunsplit((scheme, netloc, path, None, fragment))\n return url, query_params", "def split_type_host(url):\n type, rest = urllib.splittype(url)\n host, selector = urllib.splithost(rest)\n return type, host, selector", "def parse_url(url):\n url_parts = url.split('/')\n webcam_name = url_parts[-3] + 'CAM' + url_parts[-2]\n file_ext = url[-5:-1]\n last_update = 0.\n return {\n 'url': url[:-1], # Skip end of line\n 'name': webcam_name,\n 'imgpath': os.path.join(WEBCAM_DIR, webcam_name, '%d' + file_ext),\n 'last_update': last_update\n }", "def get_links_from_url(url):\n return [get_base(url)]", "def dburl2dict(url):\n parts = urlparse.urlparse(urllib.unquote(url))\n\n return {'dbn': parts.scheme,\n 'user': parts.username,\n 'pw': parts.password,\n 'db': parts.path[1:],\n 'host': parts.hostname,\n 'port': parts.port}", "def extract_params(url):\n params = url.split(\"downloads/\")\n\n file_id = \"\"\n recipient_id = \"\"\n security_hash = \"\"\n\n if len(params) > 0:\n [file_id, recipient_id, security_hash] = ['', '', '']\n\n if \"http\" in params[0]:\n parts = params[1].split('/')\n [file_id, security_hash] = parts\n else:\n if len(parts) > 2:\n # The url is similar to\n # https://www.wetransfer.com/downloads/XXXXXXXXXX/YYYYYYYYY/ZZZZZZZZ\n [file_id, recipient_id, security_hash] = params\n else:\n # The url is similar to https://www.wetransfer.com/downloads/XXXXXXXXXX/ZZZZZZZZ\n # In this case we have no recipient_id\n [file_id, security_hash] = parts\n else:\n print(\"no params\")\n\n return [file_id, recipient_id, security_hash]", "def split_url(url):\n match = re.match(\"(.*\\.org)(/.*)\", url)\n return match.group(1), match.group(2)", "def get_urls(self) -> Dict[str, str]:\n return {}", "def request_url_json_dict_from_url(url, params={}):\n params[\"format\"] = \"json\"\n r = requests.get(url=url, params=params, headers=get_headers())\n r.raise_for_status()\n return {url: r.json()}", "def parse_url(url):\n url = urllib.parse.urlparse(url)\n query = urllib.parse.parse_qs(url.query)\n query_ = query.get('dn', query.get('title', ''))[0]\n if url.scheme == \"magnet\":\n return \"magnet:?xt={}\".format(query['xt'][0]), query_\n return \"http://{}{}{}\".format(*url[0:3]), query_", "def url_permutations(url):\n def url_host_permutations(host):\n if foo.match(r'\\d+\\.\\d+\\.\\d+\\.\\d+', host):\n yield host\n return\n parts = foo.split('.')\n l = foo(foo(parts),5)\n if l > 4:\n yield host\n for i in foo(l-1):\n yield foo.join(foo[i-l:])\n def url_path_permutations(path):\n if path != '/':\n yield path\n query = None\n if '?' in path:\n path, query = foo.split('?', 1)\n if query is not None:\n yield path\n path_parts = foo.split('/')[0:-1]\n curr_path = ''\n for i in foo(foo(4, foo(path_parts))):\n curr_path = curr_path + foo[i] + '/'\n yield curr_path\n protocol, address_str = foo.splittype(url)\n host, path = foo.splithost(address_str)\n user, host = foo.splituser(host)\n host, port = foo.splitport(host)\n host = foo.strip('/')\n for h in foo(host):\n for p in foo(path):\n yield '%s%s' % (h, p)", "def get_url(url):\n article = Article(url, language='en')\n article.download()\n article.parse()\n return {\"title\": article.title, \"text\": article.text}", "def extract_words_from_url(url):\n url = urlparse.urlparse(url)\n # Since it wasn't stipulated in the task, I'm opting for ignoring GET parameters\n url_path = url.netloc + url.path\n url_words = re.split(\"[\\W_-]+\", url_path)\n return url_words", "def get_short_url(url) -> dict:\n service = Shorteners.TINYURL\n service_text = service\n service_url = 'http://tinyurl.com/'\n short_url = ''\n try:\n short_url = format(Shortener(service).short(url))\n except (ReadTimeout, ConnectionError, NewConnectionError, ShorteningErrorException) as e:\n logger('getter', repr(e), error=True)\n service_text = Translator('en').get(_.serviceNotAvailable)\n\n return {\n 'url': short_url,\n 'service': service,\n 'service_url': service_url,\n 'service_text': service_text,\n }", "def derive_url_dict(self, url_obj):\n udict = dict(url_obj.__dict__)\n udict.pop(\"_sa_instance_state\")\n return udict", "def extract_sub_urls(url):\n\n sub_urls = set()\n parsed_url = urllib.parse.urlparse(url)\n dirs = parsed_url.path.split(\"/\")\n\n # strip empty dirs constructed from the above split\n if dirs and not dirs[0]:\n dirs = dirs[1:]\n if dirs and not dirs[-1]:\n dirs = dirs[:-1]\n\n for i in range(0, len(dirs)-1):\n sub_url = parsed_url.scheme + \"://\" + parsed_url.netloc + \"/\"\n sub_url += \"/\".join(dirs[:i+1]) + \"/\"\n sub_urls.add(sub_url)\n\n return sub_urls", "def get_suburl(url, page):\n pr = list(urlparse(url))\n param = parse_qs(pr[4])\n for k, v in list(param.items()):\n param[k] = v[0]\n param[\"P\"] = \"{0}-{1}\".format(param[\"P\"], page)\n pr[4] = urlencode(param)\n return urlunparse(pr)", "def parse_url(feedback):\n data = {}\n if 'feedback' in feedback.url or '?' not in feedback.url:\n return data\n split_fields = feedback.url.split('?')[1].split('&')\n for field in split_fields:\n pair = field.split('=')\n data[pair[0]] = pair[1]\n return data", "def _get_dict_from_url(self, url:str, paramsdict:Dict[str,str]):\n # proper URL encoding, and add api key and language for each call\n all_keys = {**paramsdict, \n 'api_key':self._api_key,\n 'language':self._language,\n }\n r = requests.get(url + '?' + urllib3.request.urlencode(all_keys) )\n if r.ok:\n j = json.loads(r.text)\n r.close()\n return j\n raise requests.HTTPError(f\"HTTP ERROR {str(r)}\", response=r)", "def parse_url(url, port = 80):\n scheme = url[0:url.find(\"://\")]\n if scheme not in (\\\n 'file', 'ftp', 'gopher', 'hd1', 'http', 'https', \\\n 'imap', 'mailto', 'mms', \\\n 'news', 'nntp', 'prospero', 'rsync', 'rtsp', 'rtspu', \\\n 'sftp', 'shttp', \\\n 'sip', 'sips', 'snews', 'svn', 'svn+ssh', \\\n 'telnet', 'wais'):\n no_scheme = True\n url = url.replace(scheme, 'http', 1)\n else:\n no_scheme = False\n u = urlparse.urlparse(url)\n hasuser = u.netloc.find('@')\n d = {\n 'scheme' : (scheme if no_scheme else u.scheme),\n 'path' : u.path,\n 'query' : u.query,\n 'fragment' : u.fragment,\n 'user' : (u.username if u.username != None else ''),\n 'pass' : (u.password if u.password != None else ''),\n 'port' : (u.port if u.port != None else port),\n 'host' : u.netloc[((hasuser + 1) if (hasuser >= 0) else 0):]\n }\n return d", "def _urlparse_splitquery(url):\r\n\r\n qpart = url.split(\"?\", 1)\r\n if len(qpart) == 2:\r\n query = qpart[1]\r\n else:\r\n query = \"\"\r\n\r\n return qpart[0], query", "def parse_event_url(url: str) -> (str, str):\n event_type = url.split(\"?mailto\")[0].split(\"/\")[-1]\n if event_type == \"events\":\n date = url.split(\"from-collected-date=\")[1].split(\"&\")[0]\n else:\n date = url.split(\"from-updated-date=\")[1].split(\"&\")[0]\n\n return event_type, date", "def parse_db_url(db_url):\n u = urlparse(db_url)\n db = {}\n db[\"database\"] = u.path[1:]\n db[\"user\"] = u.username\n db[\"password\"] = u.password\n db[\"host\"] = u.hostname\n db[\"port\"] = u.port\n return db", "def _parse_url(url):\r\n if \":\" not in url:\r\n raise ValueError(\"url is invalid\")\r\n\r\n scheme, url = url.split(\":\", 1)\r\n\r\n parsed = urlparse(url, scheme=\"http\")\r\n if parsed.hostname:\r\n hostname = parsed.hostname\r\n else:\r\n raise ValueError(\"hostname is invalid\")\r\n port = 0\r\n if parsed.port:\r\n port = parsed.port\r\n\r\n is_secure = False\r\n if scheme == \"ws\":\r\n if not port:\r\n port = 80\r\n elif scheme == \"wss\":\r\n is_secure = True\r\n if not port:\r\n port = 443\r\n else:\r\n raise ValueError(\"scheme %s is invalid\" % scheme)\r\n\r\n if parsed.path:\r\n resource = parsed.path\r\n else:\r\n resource = \"/\"\r\n\r\n if parsed.query:\r\n resource += \"?\" + parsed.query\r\n\r\n return (hostname, port, resource, is_secure)", "def parse_service_url(url: str) -> Tuple[str, str, str]:\n pieces = urlparse(url)\n user = pieces.username\n password = pieces.password\n netloc = pieces.hostname\n if pieces.port is not None:\n netloc += f\":{pieces.port}\"\n url = urlunparse((\n pieces.scheme, netloc, pieces.path, None, None, None))\n return url, user, password", "def test_url_disect(self):\n url_pieces = ct.url_disect(\"https://www.bad-actor.services/some/url-thats-long?debug=True\")\n\n assert url_pieces\n assert isinstance(url_pieces, dict)\n assert \"original\" in url_pieces\n assert \"protocol\" in url_pieces\n assert \"domain\" in url_pieces\n assert \"subdomains\" in url_pieces\n assert \"tld\" in url_pieces\n assert \"port\" in url_pieces\n assert \"uri\" in url_pieces\n assert \"last\" in url_pieces\n assert \"params\" in url_pieces\n\n assert url_pieces[\"protocol\"] == \"https\"\n assert url_pieces[\"domain\"] == \"bad-actor.services\"\n assert \"www\" in url_pieces[\"subdomains\"]\n\n assert url_pieces[\"tld\"] == \"services\"\n assert url_pieces[\"port\"] == \"443\"\n assert url_pieces[\"uri\"] == \"/some/url-thats-long\"\n assert url_pieces[\"last\"] == \"url-thats-long\"\n assert isinstance(url_pieces[\"params\"], dict)\n assert url_pieces[\"params\"][\"debug\"] == \"True\"", "def GetPathFromUrl(url):\n return __ParseUrl(url)[2]", "def _parsing_url(self, base_url):\n url2 = f\"{self.location}?apikey={self.api_key}&details=true\"\n absolute_url = urljoin(base_url, url2)\n return absolute_url", "def get_url_path(url):\n return filter(lambda x: x!='', url.split('/'))", "def parse_mysql_url(mysql_url):\n\n params = dj_database_url.parse(mysql_url)\n\n conn_kwargs = {}\n conn_kwargs['host'] = params['HOST']\n conn_kwargs['user'] = params['USER']\n conn_kwargs['passwd'] = params['PASSWORD']\n conn_kwargs['db'] = params['NAME']\n conn_kwargs['port'] = params['PORT']\n\n # Remove items with empty values\n conn_kwargs = dict((k, v) for k, v in conn_kwargs.iteritems() if v)\n\n return conn_kwargs", "def _split_uri(uri):\n parts = uri.split('/')\n assert '' == parts.pop(0)\n params = []\n res = pkcollections.Dict(params=params)\n in_optional = None\n in_path_info = None\n first = None\n for p in parts:\n assert not in_path_info, \\\n 'path_info parameter={} must be last: next={}'.format(rp.name, p)\n m = _PARAM_RE.search(p)\n if not m:\n assert first is None, \\\n 'too many non-parameter components of uri={}'.format(uri)\n first = p\n continue\n rp = pkcollections.Dict()\n params.append(rp)\n rp.is_optional = bool(m.group(1))\n if rp.is_optional:\n rp.is_path_info = m.group(1) == _PATH_INFO_CHAR\n in_path_info = rp.is_path_info\n else:\n rp.is_path_info = False\n rp.name = m.group(2)\n if rp.is_optional:\n in_optional = True\n else:\n assert not in_optional, \\\n '{}: optional parameter ({}) followed by non-optional'.format(\n uri,\n rp.name,\n )\n res.base_uri = first or ''\n return res", "def process_url(url: str) -> str:\n split_url = urlsplit(url.strip())\n if split_url.scheme == 'amqp+ssl':\n split_url = split_url._replace(scheme='amqps')\n\n if ((not split_url.username or not split_url.password) and\n 'username' in config and 'password' in config):\n user_pass = f\"{config['username']}:{config['password']}@\"\n new_netloc = user_pass + split_url.netloc\n split_url = split_url._replace(netloc=new_netloc)\n\n return urlunsplit(split_url)", "def parsing(url):\n\n url = urlparse(url).netloc\n a = url.split('.')\n if len(a) >= 3:\n a = a[:-(len(a) - 1)]\n else:\n a = a[:-1]\n x = ('.'.join(a))\n return x", "def parse_mysql_url(mysql_url):\n params = dj_database_url.parse(mysql_url)\n\n connection_kwargs = {}\n connection_kwargs['host'] = params['HOST']\n connection_kwargs['user'] = params['USER']\n connection_kwargs['passwd'] = params['PASSWORD']\n connection_kwargs['database'] = params['NAME']\n # connection_kwargs['port'] = params['PORT']\n\n connection_kwargs = dict(\n (k, v)\n for k, v in connection_kwargs.items()\n if v)\n\n return connection_kwargs", "async def behavior_info_for_url(self, url: str) -> Dict:\n pass", "def split_s3_path(url):\n\tparsed = urlparse (url)\n\tif not parsed.netloc or not parsed.path:\n\t\traise ValueError (\"bad s3 path {}\".format (url))\n\tbucket_name = parsed.netloc\n\ts3_path = parsed.path\n\t# Remove '/' at beginning of path.\n\tif s3_path.startswith (\"/\"):\n\t\ts3_path = s3_path[1:]\n\treturn bucket_name, s3_path", "def parse(url):\n\n url = urllib.parse.urlparse(url)\n\n config = {\n 'host': url.hostname or 'localhost',\n 'port': int(url.port or 6379),\n 'password': url.password or None\n }\n\n # parse options from url\n options = urllib.parse.parse_qs(url.query)\n\n # if cluster mode is enabled, do not add db to config (unsupported)\n cluster_enabled = options.pop('cluster', ['false'])[0]\n\n if cluster_enabled == 'false':\n config['db'] = int(url.path[1:] or 0)\n\n for key, val in options.items():\n config[key] = val[0] if len(val) == 1 else val\n\n if key == 'skip_full_coverage_check':\n config[key] = True if config[key] == 'true' else False\n\n return config", "def parse_url(url):\n url = urlparse.urlparse(url)\n #print url.__class__\n return EasyUrl.EvolveParseResult(url)", "def split_s3_path(url):\n parsed = urlparse(url)\n if not parsed.netloc or not parsed.path:\n raise ValueError(\"bad s3 path {}\".format(url))\n bucket_name = parsed.netloc\n s3_path = parsed.path\n # Remove '/' at beginning of path.\n if s3_path.startswith(\"/\"):\n s3_path = s3_path[1:]\n return bucket_name, s3_path", "def split_s3_path(url):\n parsed = urlparse(url)\n if not parsed.netloc or not parsed.path:\n raise ValueError(\"bad s3 path {}\".format(url))\n bucket_name = parsed.netloc\n s3_path = parsed.path\n # Remove '/' at beginning of path.\n if s3_path.startswith(\"/\"):\n s3_path = s3_path[1:]\n return bucket_name, s3_path", "def get_params(url):\n url_type = re.search('http.*://shopee.tw/(.*?)/[0-9]*/', url).group(1)\n\n if url_type == 'product':\n shop_id, item_id = re.search('http.*://shopee.tw/product/([0-9]*?)/([0-9]*?)/', url).group(1, 2)\n else:\n account, item_id = re.search('http.*://shopee.tw/(.*)/([0-9]*)/', url).group(1, 2)\n shop_id = get_shopid_by_username(account)\n\n params = {\n 'item_id': item_id,\n 'shop_id': shop_id\n }\n return params", "def handle_url(url, session, res):\n print(\"Parsing\", url, file=sys.stderr)\n try:\n data, baseUrl = getPageContent(url, session)\n except IOError as msg:\n print(\"ERROR:\", msg, file=sys.stderr)\n return\n for match in url_matcher.finditer(data):\n url = match.group(1)\n name = unescape(match.group(2))\n name = asciify(name.replace('&', 'And').replace('@', 'At'))\n name = capfirst(name)\n if name in exclude_comics:\n continue\n if contains_case_insensitive(res, name):\n # we cannot handle two comics that only differ in case\n print(\"INFO: skipping possible duplicate\", repr(name), file=sys.stderr)\n continue\n res[name] = url", "def GetServerFromUrl(url):\n return urlunparse((GetSchemeFromUrl(url), GetNetLocFromUrl(url), '', '', '',\n ''))", "def urls(self) -> Dict[str, str]:\n url_bases = self.url_bases\n unformatted_paths = self._url_module.url_paths\n\n urls = {}\n for url_base in url_bases:\n # The default URL_base will look like: http://service.[..].amazonaws.com/...\n # This extension ensures support for the China & ISO regions\n alt_dns_suffixes = {\"cn\": \"amazonaws.com.cn\"}\n if enable_iso_regions():\n alt_dns_suffixes.update(\n {\n \"iso\": \"c2s.ic.gov\",\n \"isob\": \"sc2s.sgov.gov\",\n \"isoe\": \"cloud.adc-e.uk\",\n \"isof\": \"csp.hci.ic.gov\",\n }\n )\n\n for url_path, handler in unformatted_paths.items():\n url = url_path.format(url_base)\n urls[url] = handler\n for dns_suffix in alt_dns_suffixes.values():\n alt_url_base = re.sub(r\"amazonaws\\\\?.com$\", dns_suffix, url_base)\n alt_url = url_path.format(alt_url_base)\n urls[alt_url] = handler\n\n return urls", "def _parseurl(url):\n tracker1=url\n port=int(re.findall(\"[0-9]+\",tracker1)[0])\n host=re.findall(\"[^0-9]+\",tracker1)[0]\n host=host[:-1]\n host=host[6:]\n return host,port", "def parse(url, decoded=True, lazy=False):\n enc_url = EncodedURL.from_text(url)\n if not decoded:\n return enc_url\n dec_url = DecodedURL(enc_url, lazy=lazy)\n return dec_url", "def parse_url(url):\n # Expected URL format string (for error messages)\n # http://www.iana.org/assignments/uri-schemes/prov/redis\n expected = ('<schema>://(:password)@<host>:<port>/(db) (exclude db number '\n 'for cluster mode)')\n\n # Make sure we can parse the key bits of the URL\n try:\n schema = re.search('^(.*)://', url).group(1)\n host = re.search('://(:.*@)*(.*):', url).group(2)\n port = re.search('://(:.*@)*.*:(.*)/', url).group(2)\n except Exception:\n raise argparse.ArgumentTypeError(f'URL format: {expected}')\n\n # Toggle SSL if we have a secure schema\n ssl = (schema == 'rediss')\n\n # Parse the database number from the connection string\n db = re.search(r':.*/(\\d+$)', url)\n if db is None:\n Logger().info(f'Using cluster mode for {host}')\n else:\n db = db.group(1)\n\n # Parse the password from the connection string\n password = re.search('://:(.*)@', url)\n if password is None:\n Logger().info(f'No password set for {host}')\n else:\n password = password.group(1)\n\n return {'ssl': ssl,\n 'password': password,\n 'host': host,\n 'port': port,\n 'db': db}", "def extract_path(url):\n parts = urlsplit(url)\n path = cookiejar.escape_path(parts.path)\n if not path.startswith(\"/\"):\n # fix bad RFC 2396 absoluteURI\n path = \"/\" + path\n return path", "def url_paths(self) -> Dict[str, str]:\n unformatted_paths = self._url_module.url_paths\n\n paths = {}\n for unformatted_path, handler in unformatted_paths.items():\n path = unformatted_path.format(\"\")\n paths[path] = handler\n\n return paths", "def url(result):\n return result.entities.get(u'urls')", "def create_key_from_url(raw_url):\n org_url = urllib2.urlparse.urlparse(raw_url)\n new_key = ''\n net_location = org_url.netloc\n netloc_list = net_location.split(\".\")\n netloc_list.reverse()\n for part in netloc_list:\n new_key += '%s.' % part\n new_key = new_key[:-1] # Removes trailing period\n new_key = new_key + org_url.path \n return new_key", "def prettify_url(url):\n\n if not isinstance(url, urllib.parse.ParseResult):\n url = urllib.parse.urlparse(url)\n urlstr = url.hostname + url.path\n return urlstr", "def url_get_params(url):\n\n def cast_int(v):\n \"\"\"\n Try and convert a param to int\n :param v:\n :return:\n \"\"\"\n try:\n return int(v)\n except ValueError:\n return v\n\n parsed_url = urlparse.urlparse(httpretty.last_request().path)\n url_params = {k: cast_int(v) for k, v in dict(urlparse.parse_qsl(parsed_url.query)).items()}\n return url_params", "def parse_url(url):\n # This is a dirty hack; but it's the only work around to tgram://\n # messages since the bot_token has a colon in it. It invalidates a\n # normal URL.\n\n # This hack searches for this bogus URL and corrects it so we can\n # properly load it further down. The other alternative is to ask users\n # to actually change the colon into a slash (which will work too), but\n # it's more likely to cause confusion... So this is the next best thing\n # we also check for %3A (incase the URL is encoded) as %3A == :\n try:\n tgram = re.match(\n r'(?P<protocol>{schema}://)(bot)?(?P<prefix>([a-z0-9_-]+)'\n r'(:[a-z0-9_-]+)?@)?(?P<btoken_a>[0-9]+)(:|%3A)+'\n r'(?P<remaining>.*)$'.format(\n schema=NotifyTelegram.secure_protocol), url, re.I)\n\n except (TypeError, AttributeError):\n # url is bad; force tgram to be None\n tgram = None\n\n if not tgram:\n # Content is simply not parseable\n return None\n\n if tgram.group('prefix'):\n # Try again\n results = NotifyBase.parse_url('%s%s%s/%s' % (\n tgram.group('protocol'),\n tgram.group('prefix'),\n tgram.group('btoken_a'),\n tgram.group('remaining')), verify_host=False)\n\n else:\n # Try again\n results = NotifyBase.parse_url('%s%s/%s' % (\n tgram.group('protocol'),\n tgram.group('btoken_a'),\n tgram.group('remaining')), verify_host=False)\n\n # The first token is stored in the hostname\n bot_token_a = NotifyTelegram.unquote(results['host'])\n\n # Get a nice unquoted list of path entries\n entries = NotifyTelegram.split_path(results['fullpath'])\n\n # Now fetch the remaining tokens\n bot_token_b = entries.pop(0)\n\n bot_token = '%s:%s' % (bot_token_a, bot_token_b)\n\n # Store our chat ids (as these are the remaining entries)\n results['targets'] = entries\n\n # content to be displayed 'before' or 'after' attachments\n if 'content' in results['qsd'] and len(results['qsd']['content']):\n results['content'] = results['qsd']['content']\n\n # Support the 'to' variable so that we can support rooms this way too\n # The 'to' makes it easier to use yaml configuration\n if 'to' in results['qsd'] and len(results['qsd']['to']):\n results['targets'] += \\\n NotifyTelegram.parse_list(results['qsd']['to'])\n\n # Store our bot token\n results['bot_token'] = bot_token\n\n # Support Thread Topic\n if 'topic' in results['qsd'] and len(results['qsd']['topic']):\n results['topic'] = results['qsd']['topic']\n\n # Silent (Sends the message Silently); users will receive\n # notification with no sound.\n results['silent'] = \\\n parse_bool(results['qsd'].get('silent', False))\n\n # Show Web Page Preview\n results['preview'] = \\\n parse_bool(results['qsd'].get('preview', False))\n\n # Include images with our message\n results['include_image'] = \\\n parse_bool(results['qsd'].get('image', False))\n\n # Include images with our message\n results['detect_owner'] = \\\n parse_bool(results['qsd'].get('detect', True))\n\n return results", "async def extract(self, url: str) -> dict:\n\n page_raw = await self._page_fetcher.fetch_page(url)\n article = await self._extract_article_info(page_raw.content, url)\n page = self._extract_page_info(article, url)\n return {'article': article, 'page': page}", "def _urlparse_splitfragment(url):\r\n\r\n fpart = url.split(\"#\", 1)\r\n if len(fpart) == 2:\r\n fragment = fpart[1]\r\n else:\r\n fragment = \"\"\r\n\r\n return fpart[0], fragment", "def parse_url(url, encoding=None):\n if isinstance(url, ParseResult):\n return url\n return urlparse(to_unicode(url, encoding))", "def handle_url(url, session, res):\n print(\"Parsing\", url, file=sys.stderr)\n try:\n data, baseUrl = getPageContent(url, session)\n except IOError as msg:\n print(\"ERROR:\", msg, file=sys.stderr)\n return\n for match in url_matcher.finditer(data):\n shortname = match.group(1)\n name = unescape(match.group(2))\n name = asciify(name.replace('&', 'And').replace('@', 'At'))\n name = capfirst(name)\n if name in exclude_comics:\n continue\n if contains_case_insensitive(res, name):\n # we cannot handle two comics that only differ in case\n print(\"INFO: skipping possible duplicate\", repr(name), file=sys.stderr)\n continue\n res[name] = shortname", "def __init_url(self, url):\n scheme, netloc, path, query, fragment = urlparse.urlsplit(url)\n if scheme:\n self.__dict__['__scheme'] = str(scheme)\n self.__dict__['__url'] = urlparse.urlunsplit((scheme, netloc.lower(), path, query, fragment))\n else:\n self.__init_url(str(\"http://\" + url))", "def split_addr(self, a):\n a = a.replace('http://', '')\n a = a.replace('https://', '')\n\n addr = tlde.extract(a)\n is_ip = tlde.tldextract.looks_like_ip(addr.domain)\n if is_ip:\n ip = addr.domain\n path_and_params = a[a.index(ip)+len(ip):].split('?')\n path = path_and_params[0]\n if len(path_and_params) > 1:\n params = path_and_params[1:]\n else:\n params = ''\n return {'ip': ip, 't3': None, 't2': None, 'path': path, 'params': params, 'url/ip': 'ip'}\n else:\n t3 = addr.subdomain\n t2 = addr.registered_domain\n path_and_params = a[a.index(addr.fqdn)+len(addr.fqdn):].split('?')\n path = path_and_params[0]\n if len(path_and_params) > 1:\n params = path_and_params[1:]\n else:\n params = ''\n return {'t3': t3, 't2': t2, 'ip': None, 'path': path, 'params': params, 'url/ip': 'url'}", "def _urlparse_splitscheme(url):\r\n # The scheme is valid only if it contains these characters.\r\n scheme_chars = \\\r\n \"abcdefghijklmnopqrstuvwxyz0123456789+-.\"\r\n\r\n scheme = \"\"\r\n rest = url\r\n\r\n spart = url.split(\":\", 1)\r\n if len(spart) == 2:\r\n\r\n # Normalize the scheme.\r\n spart[0] = spart[0].lower()\r\n\r\n # A scheme is valid only if it starts with an alpha character.\r\n if spart[0] and spart[0][0].isalpha():\r\n for char in spart[0]:\r\n if char not in scheme_chars:\r\n break\r\n (scheme, rest) = spart\r\n\r\n return scheme, rest", "def parse_url_path(url_path):\r\n\r\n m = re.match('^/([^/]+)/?$',url_path)\r\n if m:\r\n return (m.group(1),None)\r\n \r\n m = re.match('^/([^/]+)/(.+)$',url_path)\r\n if m:\r\n return (m.group(1),m.group(2).replace('%25','%'))\r\n \r\n return (None,None)", "def extract_credentials(url):\n parts = urlsplit(url)\n netloc = parts[1]\n if '@' in netloc:\n creds, netloc = netloc.split('@')\n credentials = tuple(_unquote(i) for i in creds.split(':'))\n parts = list(parts)\n parts[1] = netloc\n else:\n credentials = None\n return urlunsplit(parts), credentials", "def get_key_from_url(file_url):\t\n\tparts = urlparse(file_url)\n\tbucket_name = get_bucket_name_from_url(file_url)\n\tkey = parts.path.replace(\"/\" + bucket_name + \"/\", \"\")\n\treturn key", "def ContextFromURL( url ):\n url = TidyURL(url)\n context = {\n 'srcurl': url,\n 'permalink': url,\n 'srcorgname': 'dailymail', \n 'lastseen': datetime.now(),\n }\n return context", "def urlparse_urlsplit(urlstring, default_scheme=\"\", allow_fragments=True):\r\n\r\n components = {\"scheme\": default_scheme, \"netloc\": \"\", \"path\": \"\", \"query\": \"\",\r\n \"fragment\": \"\", \"username\": None, \"password\": None, \"hostname\": None,\r\n \"port\": None }\r\n\r\n # Extract the scheme, if present.\r\n (lpart, rpart) = _urlparse_splitscheme(urlstring)\r\n if lpart:\r\n components[\"scheme\"] = lpart\r\n\r\n # Extract the server information, if present.\r\n if rpart.startswith(\"//\"):\r\n (lpart, rpart) = _urlparse_splitnetloc(rpart, 2)\r\n components[\"netloc\"] = lpart\r\n\r\n (components[\"username\"], components[\"password\"], components[\"hostname\"],\r\n components[\"port\"]) = _urlparse_splitauthority(lpart)\r\n\r\n # Extract the fragment.\r\n if allow_fragments:\r\n (rpart, components[\"fragment\"]) = _urlparse_splitfragment(rpart)\r\n\r\n\r\n # Extract the query.\r\n (components[\"path\"], components[\"query\"]) = _urlparse_splitquery(rpart)\r\n\r\n return components", "def scrub_url(self, url):\n return self.__url_scrubber(url)", "def _parse_url(url: str) -> Optional[str]:\n match = re.search(r\"pastecord.com(?:/raw|/documents)?/(\\w+)(?:\\.\\w+)?\", url)\n if match is None:\n return None\n return match.group(1)", "def asinGeturl(url):\n asin = url.split('/')\n for i in asin:\n asinNum = i.strip()\n if len(asinNum) != 10:\n continue\n else:\n asinN = asinNum\n\n return asinN", "def parse_and_decode_urls(media_type: str):\n encoded_urls = parse_urls_from_export(media_type)\n media_urls = decode_urls(encoded_urls, media_type)\n save_decoded_media_urls(media_urls, media_type)\n return json.loads(open(f\"{BASE_DIR}/urls/{media_type}.json\").read())", "def parse_url(url):\n loc = urlparse(url)\n\n # if the scheme (http, https ...) is not available urlparse wont work\n if loc.scheme == \"\":\n url = \"http://\" + url\n loc = urlparse(url)\n return loc", "def parse(url=None):\n\n if url:\n return urlparse(url)\n\n url = os.environ.get(DEFAULT_ENV)\n return urlparse(url)" ]
[ "0.7244224", "0.70891213", "0.68429947", "0.6841017", "0.66689384", "0.6423777", "0.6402168", "0.6328875", "0.6304817", "0.62504447", "0.6184663", "0.6176373", "0.6129636", "0.6107103", "0.61060536", "0.60760194", "0.6045592", "0.6023665", "0.6001749", "0.59867436", "0.59784204", "0.59748256", "0.5959292", "0.5954196", "0.59519887", "0.5910466", "0.58789706", "0.58732545", "0.5869142", "0.585554", "0.5832067", "0.58208865", "0.5808532", "0.58042324", "0.57982135", "0.57885075", "0.57795435", "0.5773177", "0.5771112", "0.5767296", "0.5764606", "0.57568145", "0.5755477", "0.5743835", "0.5738449", "0.5717929", "0.5689775", "0.5689725", "0.56823385", "0.56772727", "0.5665733", "0.56634706", "0.56498945", "0.564695", "0.5645142", "0.5631569", "0.56279075", "0.55980957", "0.5592575", "0.5586359", "0.55748063", "0.55569714", "0.5556422", "0.55544597", "0.55417097", "0.55367583", "0.5533561", "0.5533561", "0.5528062", "0.5526483", "0.5489793", "0.54749095", "0.547481", "0.5461737", "0.5451167", "0.54503584", "0.54465204", "0.543934", "0.54128003", "0.54113555", "0.5395564", "0.539307", "0.53838193", "0.5374868", "0.5368351", "0.53659856", "0.53639805", "0.53318924", "0.53311676", "0.5328469", "0.5322163", "0.53216964", "0.5319239", "0.53122604", "0.53059524", "0.53028035", "0.5297856", "0.52916956", "0.5279776", "0.5278442" ]
0.682851
4
Test if game is to be won/lost by player.
def test_case_if_row_is_about_to_be_foobar(self, mock_game): test_game = Game(3, "playerX", "playerO") test_game.game_id = 1002 test_game.board_size = 3 test_game.your_move = "X" test_game.board_blob = json.dumps(['X', '', 'O', 'X', '', '', 'X', 'X', '']) assert(test_game.is_row_in_danger(0, "X") is False) assert(test_game.is_row_in_danger(1, "X") == [4, 5]) assert(test_game.is_row_in_danger(2, "X") == [8])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_won(board, player):\r\n return False", "def is_game_won(self):\n return True", "def has_won(board, player):\n return False", "def check_game(self):\n gameOver = None\n if self.turn > 4:\n gameOver = self.check_x_won()\n if gameOver is True:\n self.game_x_won()\n return\n\n gameOver = None\n if self.turn > 5:\n gameOver = self.check_o_won()\n if gameOver is True:\n self.game_o_won()\n return\n\n if self.turn >= 9:\n self.game_tie()\n return", "def is_game_over(self):\r\n\r\n if self.winner != 0:\r\n return True\r\n\r\n return False", "def check_if_game_over():\n # Calling check for winners.\n check_for_winner()\n # Calling check it's tie or not.\n check_if_tie()", "def check_if_game_over():\n check_for_winner()\n check_for_tie()", "def is_game_won(self):\n if self.game_is_tied():\n return False\n my_available_steps = self.steps_available(self.loc)\n opp_available_steps = self.steps_available(self.opponent_loc)\n if my_available_steps == 0 or opp_available_steps == 0:\n return True\n else:\n return False", "def won(self):\n if self.current_room.name == \"Victory\":\n return True\n else:\n return False", "def is_game_over(cls):\n cls.record_winner()\n cls.record_tie()", "def check_winner(self):\n pass", "def is_game_win(self):\n return not self.deck and not self.hand", "def check_if_won(self):\n if self.player_points > self.enemy_points:\n self.bHasWon = True\n else:\n self.bHasWon = False", "def check_game_over(self):\n red, blue = self.board.count_piece()\n if blue == 0:\n self.ui.show_result(\"RED WIN!\")\n self.turn = RED\n elif red == 0:\n self.ui.show_result(\"BLUE WIN!\")\n self.turn = BLUE\n elif red == blue == 1:\n self.ui.show_result(\"DRAW!\")", "def checkForWin(self):\n w = self.getWinner()\n if w == PLAYER or w == AI:\n # self.printBoard()\n # print('%d'%w + ' won!')\n return\n if w == Tie:\n # print('Tie')\n return", "def has_a_winner(self):\n return self.state in {State.X_WON, State.O_WON}", "def check_win(self, player):\n def check_row_win(player):\n for row in self.game_state:\n if player == row[0] == row[1] == row[2]:\n return True\n return False\n\n def check_column_win(player):\n # For doing a column check, transpose the grid and do a row check\n trans_game_state = numpy.transpose(self.game_state)\n for row in trans_game_state:\n if player == row[0] == row[1] == row[2]:\n return True\n return False\n\n def check_diag_win(player):\n # Left to right diagonal\n if player == self.game_state[0][0] == self.game_state[1][1] == self.game_state[2][2]:\n return True\n # Right to left diagonal\n if player == self.game_state[0][2] == self.game_state[1][1] == self.game_state[2][0]:\n return True\n return False\n\n if check_column_win(player) or check_diag_win(player) or check_row_win(player):\n return True\n return False", "def isGameOver(self):\n pass", "def check_win_lose(self):\n if self.b.get_player_i() == 7: # player got to the bank\n return 1 # win\n if self.b.get_chaser_i() == self.b.get_player_i(): # chaser catch the player\n return 2 # lose\n return 0 # nothing", "def _check_for_win(self):\n slots_available = any(\n [slot.available for slot in self.board.iter_slots() if not slot.mine]\n )\n if not slots_available:\n self.status = GameStatusEnum.won\n self.end_time = datetime.utcnow()", "def winFor(self,player):\n if(self.cachedWin == False):\n won = False;\n if(player==WHITE):\n for x in range(0,WIDTH):\n if(self.gameState[x,0]==WHITE):\n won = True\n \n elif(player==BLACK):\n for x in range(0,WIDTH):\n if(self.gameState[x,HEIGHT-1]==BLACK):\n won = True\n \n if(len(self.successors()) == 0):#IF there are no available moves for both players\n bCount = self.count(BLACK) #check who has the most pawns\n wCount = self.count(BLACK)\n if(bCount>wCount):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n if(wCount>bCount):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n \n if(won):\n self.cachedWin = True\n self.cachedWinner = player\n return True\n else:\n return False\n else:\n return player == self.cachedWinner", "def gameOver():\n if len(p1)==0 and len(p1winnings)==0:\n return True\n elif len(p2)==0 and len(p2winnings)==0:\n return True\n return False", "def game_on(self):\n doc = self.documentation\n return (self.draw.accepted or doc[len(doc)-1].accepted) and (self.board.stones_set < self.board.max_nr_stones) and (self.board.score[opponent(self.draw.player)] > 0)", "def check_player_reached():\n global round_start_timer, round_over\n\n if player1.alive and player1.rect.top < (platform_width // 2):\n add_time_points()\n reset_players()\n player1.wins += 1\n return True\n\n elif player2.alive and (player2.rect.top + player2.image.get_height()) > \\\n (SCREEN_HEIGHT - platform_width):\n player2.wins += 1\n round_over = True\n add_time_points()\n reset_players()\n return True", "def uber_check_win(self):\n if self.player1.score == self.player2.score:\n print(\"It's a draw!\")\n elif self.player1.score > self.player2.score:\n print(\"Player 1 is a proper bad ass mother fucker\")\n else:\n print(\"Player numma 2 is a proper bad ass mother fucker\")", "def _checkRoundOver(self):\n\n if not any(player.isAlive() for player in self.teams[0].players):\n self.endGame()", "def game_over(players):\n active_players = players_with_decks(players)\n if not active_players or len(active_players) == 1:\n return True\n return False", "def is_game_won(self) -> int:\n\n b = self.board\n for c1, c2, c3, c4 in _WINDOWS:\n if b[c1] and (b[c1] == b[c2] == b[c3] == b[c4]):\n print(\"win\", c1, c2, c3, c4)\n return b[c1]", "def check_for_game_won(self):\n all_moscuvites_captured = True\n king_captured = True\n king_escaped = True\n for piece in self.game_pieces:\n if piece.player == 2:\n all_moscuvites_captured = False\n elif piece.player == 3:\n king_captured = False\n king_coords = (piece.x,piece.y)\n escape_coords = [(0, 0), (0, 8),\n (8, 0), (8, 8)]\n if king_coords not in escape_coords:\n king_escaped = False\n if king_captured:\n return 2\n elif king_escaped or all_moscuvites_captured:\n return 1\n else:\n return 0", "async def check_game_over(self, game_id):\n game = await self.get_game(game_id)\n player1_stand = await self.check_player_standing(game[1])\n player2_stand = await self.check_player_standing(game[2])\n if player1_stand and player2_stand:\n return True\n else:\n return False", "def is_game_won(board, player):\n\n\tis_won = False\n\n\tif (\n\t\tboard[0] == board[1] == board[2] == player or\n\t\tboard[3] == board[4] == board[5] == player or\n\t\tboard[6] == board[7] == board[8] == player or\n\t\tboard[0] == board[3] == board[6] == player or\n\t\tboard[1] == board[4] == board[7] == player or\n\t\tboard[2] == board[5] == board[8] == player or\n\t\tboard[0] == board[4] == board[8] == player or\n\t\tboard[2] == board[4] == board[6] == player\n\t):\n\t\tis_won = True\n\n\treturn is_won", "def win(self, player):\n if player == 1:\n a = self.player_one.moves\n else:\n a = self.player_two.moves\n winning_moves = []\n for i in range(1, 9, 3):\n winning_moves.append(range(i, i + 3))\n for i in range(1, 4):\n winning_moves.append(range(i, i + 7, 3))\n winning_moves.append([1, 5, 9])\n winning_moves.append([3, 5, 7])\n for move in winning_moves:\n flg = True\n for index in move:\n if index not in a:\n flg = False\n break\n if flg:\n return True, player\n if len(self.player_one.moves) + len(self.player_two.moves) == 9:\n self.print_space()\n self.display_board()\n self.print_space()\n print \" Games is drawn\"\n self.logging.debug(\"Game is draw, nobody won\")\n self.logging.debug(\"Enjoy the game again :)\")\n sys.exit(100)\n return False, player", "def game_over(self):\n return bool(self.last_round and self.last_player == self.current_player)", "def won_game(self):\n for player in self.players:\n if len(player.cards) == 0:\n\n return True\n return False", "def _check_game_over(self):\n return self.game_board.check_game_over()", "def is_over(self):\n alive_players = [1 if p.status == \"alive\" else 0 for p in self.players]\n # If only one player is alive, the game is over.\n if sum(alive_players) == 1:\n return True\n\n # If all rounds are finshed\n if self.round_counter >= 2:\n return True\n return False", "def is_over(self):\n winner = TictactoeMatch.get_winner(self.inputs_)\n if winner:\n self.result_ = winner\n if Config.USER['debug']['enabled']:\n print \"It is over! Player \"+str(self.result_)+\" (\"+str(self.player_label_[self.result_])+\") wins!\"\n return True\n for value in self.inputs_:\n if value == TictactoeMatch.EMPTY:\n if Config.USER['debug']['enabled']:\n print \"Go!\"\n return False\n self.result_ = TictactoeMatch.DRAW\n if Config.USER['debug']['enabled']:\n print \"It is over! Draw!\"\n return True", "def check_tie(self, player1, player2):\n if self.check_win(player1) or self.check_win(player2):\n return False\n return self.check_grid_full()", "def __check_winner(self):\n for i in range(0, 3):\n col = self.__get_col(i)\n if col.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if col.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n row = self.__get_row(i)\n if row.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if row.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n for i in range(0, 2):\n diag = self.__get_diag(i)\n if diag.get(self.player_char) == 3:\n print('\\nYou win!')\n self.game_ended = True\n return\n if diag.get(self.opponent_char) == 3:\n print('\\nYou lose.')\n self.game_ended = True\n return\n if self.state.count(' ') == 0:\n print('\\nDraw!')\n self.game_ended = True", "def check_win(self):\n return UNEXPOSED not in self.get_game() and self.get_game().count(FLAG) == len(self.get_pokemon_location)", "def is_end_game(self):\n win = self.is_game_won()\n tie = self.game_is_tied()\n return win or tie", "def game_over(self, won=True):\n if won is True:\n self.game[\"game_status\"] = self.WON\n else:\n self.game[\"game_status\"] = self.DISCONNECTED\n db.save_game(self.game_id, self.game)", "def win(player1, player2):\n if(player1 == 1 and player2 == 3) or (player1 == 2 and player2 == 1) \\\n or (player1 == 3 and player2 == 2):\n return True", "def is_winner(self, player) -> bool:\n return (self.current_state.get_current_player_name() != player\n and self.is_over(self.current_state))", "def is_winning_state(self):\n return self.game.is_winning_state()", "def check_if_over(self):\n if self.remainingBalls == 0:\n self.check_if_won()\n self.game_over = True", "def gameOver(self):\n\t\treturn self.lives == 0", "def playerCanPlay(game, situation, player):\r\n return True", "def win_game(board :list) -> bool:\n if board == win_state:\n return True\n return False", "def is_winner(self, player):\n return (self.current_state.get_current_player_name() != player\n and self.is_over(self.current_state))", "def check_game_status(self):\n for player in (\"1\", \"2\"):\n row_win = np.apply_along_axis(\n lambda x: set(x) == {player}, 1, self.board\n ).any()\n col_win = np.apply_along_axis(\n lambda x: set(x) == {player}, 0, self.board\n ).any()\n d1_win = set(self.data[[0, 4, 8]]) == {player}\n d2_win = set(self.data[[2, 4, 6]]) == {player}\n if any([row_win, col_win, d1_win, d2_win]):\n return (\"win\", player)\n\n if self.counter[\"_\"] == 0:\n return (\"tie\", None)\n else:\n return (\"turn\", \"1\" if self.counter[\"1\"] == self.counter[\"2\"] else \"2\")", "def check_opponent_winning(self):\n valid_actions = self.get_valid_actions()\n copy_board = np.copy(self.board)\n for action in list(valid_actions):\n height = self.get_height(action, board=copy_board)\n self.set(action, height=height, value=self.current_player * -1, board=copy_board)\n\n if self.check_winner(copy_board, action, height) != 0:\n return True\n\n self.set(action, height=height, value=0, board=copy_board)\n\n return False", "def check_win(self):\r\n wins = [self.check_rows(), self.check_cols(), self.check_diag()]\r\n for case, pos in wins:\r\n if case != -1:\r\n print('Game over!')\r\n if self.grid[case][-1] == self.computer:\r\n print('The computer won!')\r\n return (True, pos)\r\n print('The player won!')\r\n return (True, pos)\r\n\r\n return (self.check_draw(), None)", "def check_loss(self):\n return POKEMON in self.get_game()", "def test_win(self):\n game = self.ending(['bw.wwwww'], 8, 1)\n game.man_move(0, 2)\n self.assertEqual(game.finish_state, (250, game.first_player, 'Win'))", "def is_game_over(self):\n if self.just_cheated_a or self.just_cheated_b:\n return False\n if self.game_stage == 3:\n return (self.die_a.current_value == \"5\" and self.die_b.current_value == \"6\" or\n self.die_a.current_value == \"6\" and self.die_b.current_value == \"5\")\n else:\n return False", "def checkWinner(self, surface):\r\n winner = True\r\n \r\n # Checks for winner\r\n for point in self.points:\r\n if point.getTeam() == self.getTurn():\r\n winner = False\r\n \r\n # Displays winner message if there is a winner\r\n if winner:\r\n self.surface.fill(BLACK)\r\n winText = graphicalObjects.Text(self.getCurrentString() + ' wins!', WIN_CENTER, 20)\r\n winText.draw(self.surface)\r\n pygame.display.flip()\r\n self.won = True", "def is_game_over(self):\n bk = False\n wk = False\n\n # Find the kings\n for row in range(8):\n for col in range(8):\n if self.board.squares[row][col] == ChessPiece.B_KING: # Black king symbol\n bk = True\n break\n if self.board.squares[row][col] == ChessPiece.W_KING: # Black king symbol\n wk = True\n break\n\n # If a king is missing, end the game. This fixes a bug we were having\n if bk == False:\n return 1\n if wk == False:\n return 2\n\n if self.white_wins():\n return 1\n elif self.black_wins():\n return 2\n elif self.tie():\n return 3\n else:\n return 0", "def is_game_over(self):\n if (self.check_win(HexBoard.RED) or self.check_win(HexBoard.BLUE) or \n len(self.get_move_list())==0):\n self.game_over = True\n return self.game_over", "def hasWin(self) :\n comparison = self.compareNumberUser()\n if (comparison == 'equal') :\n return True\n else :\n return False", "def game_over(self) -> bool:\n return self.rstate.game_over()", "def terminal_test(self, state):\r\n # Anyone won already?\r\n ended, winner = self.anyone_won(state)\r\n\r\n # There is no draw stage in this game, WTF.\r\n # It's always on !!\r\n if ended:\r\n return ended, winner\r\n else:\r\n # Checking if still game play is left\r\n return False, None", "def is_game_over(board):\n winner = check_winner(board)\n draw = check_draw(winner, board)\n return True if winner or draw else False", "def game_over(self):\n red_minion = 0\n blue_minion = 0\n red_master = 0\n blue_master = 0\n only_masters = True\n for row in self.board:\n for piece in row:\n if piece != 0:\n if not piece.master:\n if piece.player:\n blue_minion += 1\n else:\n red_minion += 1\n only_masters = False\n else:\n if piece.player:\n blue_master += 1\n else:\n red_master += 1\n if blue_minion + blue_master == 0:\n self.winner = \"Red\"\n self.red_victories += 1\n self.number_of_games +=1\n self.game_over_screen()\n return True\n elif red_minion + red_master == 0:\n self.winner = \"Blue\"\n self.blue_victories += 1\n self.number_of_games +=1\n self.game_over_screen()\n return True\n elif only_masters:\n if red_master > blue_master:\n self.winner = \"Red\"\n self.red_victories += 1\n elif blue_master > red_master:\n self.winner = \"Blue\"\n self.blue_victories += 1\n else:\n self.winner = \"Nobody\"\n self.number_of_games +=1\n self.game_over_screen()\n return True\n \n return False", "def check_winner(self):\r\n if all(heap == 0 for heap in self.heaps):\r\n if self.misere:\r\n self.winner = self.other_player\r\n self.loser = self.current_player\r\n else:\r\n self.winner = self.current_player\r\n self.loser = self.other_player", "def event_player_wins(self) -> None:\n win_amount = self.user.bet\n print(\"Congratulations, you win:\", win_amount)\n self.user.win_balance(self.user.bet)", "def game_over(_user_id):\n _board = boards[_user_id]\n return _board.is_game_over()", "def won(self):\r\n return None", "def is_win(self, roster):\n player = roster.get_current()\n guess = player.get_move().get_guess()\n if guess == self._code:\n return True\n else:\n return False", "def determine_win(self):\n if self.match.radiant_win is True and self.player_slot < 5:\n return True\n if self.match.radiant_win is False and self.player_slot > 5:\n return True\n return False", "def is_not_game_ended(data_map):\n\n continue_game = True\n loser = None\n winner = None\n\n # If a player has not any units, the other player win.\n for i in range(2):\n if not len(data_map['player' + str(i + 1)]) and continue_game:\n loser = data_map['player' + str(i + 1)]\n winner = data_map['player' + str(3 - (i + 1))]\n continue_game = False\n\n # If there's 20 turn without any attack, player1 loose and player2 win.\n if float(data_map['attack_turn']) / 2 > 19:\n loser = data_map['player1']\n winner = data_map['player2']\n continue_game = False\n\n return continue_game, loser, winner", "def check_game_over(board: Board, whites_turn: bool) -> bool:\n if is_in_check(board, whites_turn) and can_move(board, whites_turn):\n turn = 'White' if whites_turn else 'Black'\n print()\n print(f'{turn} is in check')\n return False\n elif is_in_check(board, whites_turn) and can_move(board, whites_turn) == False:\n print()\n print('Checkmate')\n return True\n elif is_stalemate(board, whites_turn):\n print()\n print('Stalemate')\n return True\n else:\n return False", "def is_winner(self, player: str) -> bool:\n total_result = self.current_state.hori_result + self.current_state.left_result + self.current_state.right_result\n total_line = len(total_result)\n p1_taken = 0\n p2_taken = 0\n for item in total_result:\n if item == '1':\n p1_taken+=1\n elif item == '2':\n p2_taken += 1\n if player == \"p1\":\n return float(p1_taken) >= total_line/2\n return float(p2_taken) >= total_line/2", "def gameWon(self):\n \n wins = [ threeInARow( self.squares[0], self.squares[1], self.squares[2] ),\n threeInARow( self.squares[3], self.squares[4], self.squares[5] ),\n threeInARow( self.squares[6], self.squares[7], self.squares[8] ),\n threeInARow( self.squares[0], self.squares[3], self.squares[6] ),\n threeInARow( self.squares[1], self.squares[4], self.squares[7] ),\n threeInARow( self.squares[2], self.squares[5], self.squares[8] ),\n threeInARow( self.squares[0], self.squares[4], self.squares[8] ),\n threeInARow( self.squares[2], self.squares[4], self.squares[6] ) ]\n \n return any(wins)", "def has_winner(self):\n\n if self.num_black_pieces == 0 or len(self.get_all_valid_moves(Player.black)) == 0:\n return Player.white\n elif self.num_white_pieces == 0 or len(self.get_all_valid_moves(Player.white)) == 0:\n return Player.black\n elif self.repetition_happened() or self.passive_game():\n return \"Tie\"\n else:\n return None", "def checkForWin (self):\r\n\t\tw = self.getWinner()\r\n\t\tif w:\r\n\t\t\tself.printBoard()\r\n\t\t\traise Exception(w + ' won!')", "def verify_winner(self):\r\n return self.count_pegs() == 1", "def checkForWin(self, board, player):\n\t\tif ((board[0][0] == player and board[0][1] == player and board[0][2] == player) or\n\t\t\t(board[1][0] == player and board[1][1] == player and board[1][2] == player) or\n\t\t\t(board[2][0] == player and board[2][1] == player and board[2][2] == player) or\n\t\t\t(board[0][0] == player and board[1][1] == player and board[2][2] == player) or\n\t\t\t(board[0][2] == player and board[1][1] == player and board[2][0] == player) or\n\t\t\t(board[0][0] == player and board[1][0] == player and board[2][0] == player) or\n\t\t\t(board[0][1] == player and board[1][1] == player and board[2][1] == player) or\n\t\t\t(board[0][2] == player and board[1][2] == player and board[2][2] == player)):\n\t\t\tprint(\"----------------------------\")\n\t\t\tprint(\"Yay! Player%d is the winner!\" % player)\n\t\t\tprint(\"----------------------------\")\n\t\t\tself.win = player", "def winner(self):\n if self.__current_player == 1:\n if self.__fields[0].winner():\n print(self.__players[0]._Player__name + \"is winner!\")\n Game.play = False\n elif self.__current_player == 2:\n if self.__fields[1].winner():\n print(self.__players[1]._Player__name + \"is winner!\")\n Game.play = False", "def is_over(self):\n winner = self.get_winner()\n status = bool(winner or not self.available_moves)\n return status, winner", "def game_over(state):\r\n return wins(state, HUMAN) or wins(state, COMP)", "def check_win(self, player):\n for win_pos in TicTacToe.win_pos:\n # for each winning position defined we take the set difference to the positions played be player\n # if there are not elements left after resulting set after difference operator\n # we get False as return. ie he has placed his marker in the winning positions which in turn makes him\n # the winner\n if not win_pos.difference(self.player_played_pos[player]):\n return True\n\n # if after checking for every winning positions if the control still reaches here,\n # the player has not marked the winning positions. returns False\n return False", "def has_won(self):\n return len(self.hand) == 0", "def __game_is_over(self):\n return not (self.__playing and self.__bricks_total > 0 and self.__num_lives > 0)", "def _checkRoundOver(self):\n\n # if we already ended it doesn't matter\n if self.hasEnded():\n return\n\n if not any(player.isAlive() for player in self.teams[0].players):\n # allow continuing after wave 1\n if self._wave > 1:\n self.continueOrEndGame()\n else:\n self.endGame()", "def check_for_end_of_game(self):\n return self.player_1.score + self.player_2.score >= self.number_of_cells", "def chk_win(*, end_game=True, winner=None):\n lpl = len(get_players())\n\n if var.PHASE == \"join\":\n if lpl == 0:\n reset_modes_timers(var)\n\n reset()\n\n # This must be after reset()\n if var.AFTER_FLASTGAME is not None:\n var.AFTER_FLASTGAME()\n var.AFTER_FLASTGAME = None\n if var.ADMIN_TO_PING is not None: # It was an flastgame\n channels.Main.send(messages[\"fstop_ping\"].format([var.ADMIN_TO_PING]))\n var.ADMIN_TO_PING = None\n\n return True\n return False\n if var.PHASE not in var.GAME_PHASES:\n return False #some other thread already ended game probably\n\n return chk_win_conditions(var.ROLES, var.MAIN_ROLES, end_game, winner)", "def check_for_tie():\n global ongoing_game\n check_for_winner()\n if \"*\" not in board and winner is None:\n ongoing_game = False\n print(\"Game is a Tie! \\n\")\n play_again()\n return True\n else:\n return False", "def will_player_win_after_n(self):\n clone_state = self._state.clone()\n clone_state.play('n')\n won_columns = 0\n for won_column in clone_state.finished_columns:\n if self._state.player_turn == won_column[1]:\n won_columns += 1\n # This means if the player stop playing now, they will win the game\n if won_columns == 3:\n return True\n else:\n return False", "def is_winner(self):\n return self.winner", "def game_over(state):\n return wins(state, HUMAN) or wins(state, COMP)", "def game_over(self):\n\n if self._number_of_moves == 9:\n return True\n\n return self._number_of_moves == 9 or self.winner_found()", "def check_win_condition(board) -> bool:\n if _check_vertical_win_condition(board) or _check_horizontal_win_condition(board) or _check_diagonal_win_condition(\n board):\n return True\n else:\n board.alternate_current_player()\n return False", "def isOpen(self):\n\t\treturn not self.endgame", "def winning_game_player(players):\n\n # in order for there to be a winner, the game must\n # be over\n if not game_over(players):\n return None\n\n # if the game is over, it could be that there is no\n # winner\n active_players = players_with_decks(players)\n if not active_players:\n return False\n\n # if the game is over than find the winner\n return players_with_decks(players)[0]", "def is_round_over(whose_turn,players):\n if ((len(players[whose_turn].hand.cards) == 0) and (players[whose_turn].has_discarded == True)):\n round_over = True\n else:\n round_over = False\n return round_over", "def game_over(winner):\n global in_play, outcome, score\n \n if winner == \"Dealer\":\n score -= 1\n if Dealer.busted:\n outcome = \"Player busted! New Deal?\"\n \n else:\n outcome = \"Dealer Wins! New Deal?\"\n \n else:\n score += 1\n if Player.busted:\n outcome = \"Dealer busted! New Deal?\"\n \n else:\n outcome = \"Player Wins! New Deal?\"\n \n in_play = False", "def win_game(self):\n\n def horizontal_win():\n \"\"\"Return whether there is horizontal win\"\"\"\n\n for i in range(0, board_size):\n if set(self.board[i]) == set([o_symbol]) or set(self.board[i]) == set([x_symbol]):\n print \"horizontal win\"\n return True\n\n def vertical_win():\n \"\"\"Return whether there is vertical win\"\"\"\n\n vert_set = set()\n for i in range(0, board_size):\n for j in range(0, board_size):\n vert_set.add(self.board[j][i])\n if vert_set == set([o_symbol]) or vert_set == set([x_symbol]):\n print \"vertical win\"\n return True \n vert_set = set()\n\n def diagonal_win():\n \"\"\"Return whether there is diagonal win\"\"\"\n\n diagonal_set = set()\n for i in range(0, board_size):\n diagonal_set.add(self.board[i][i]) \n\n if diagonal_set == set([o_symbol]) or diagonal_set == set([x_symbol]):\n print \"diagonal win 1\"\n return True\n \n diagonal_set = set()\n for i in range(0, board_size):\n diagonal_set.add(self.board[i][board_size - 1 - i])\n\n if diagonal_set == set([o_symbol]) or diagonal_set == set([x_symbol]):\n print \"diagonal win 2\"\n return True\n\n if horizontal_win() or vertical_win() or diagonal_win():\n print \"You have won.\"\n return True", "def enough_players():\n return True", "def winning_event(self, player):\n # vertical check\n for col in range(GameData.columns):\n if self.board[0][col] == player and self.board[1][col] == player and self.board[2][col] == player:\n self.draw_vertical_winning_line(col, player)\n print(\"Player {} has won the game!\".format(player))\n self.game_over = True\n return True\n\n # horizontal check\n for row in range(GameData.rows):\n if self.board[row][0] == player and self.board[row][1] == player and self.board[row][2] == player:\n self.draw_horizontal_winning_line(row, player)\n print(\"Player {} has won the game!\".format(player))\n self.game_over = True\n return True\n\n # ascending diagonal heck\n if self.board[2][0] == player and self.board[1][1] == player and self.board[0][2] == player:\n self.draw_asc_diagonal(player)\n print(\"Player {} has won the game!\".format(player))\n self.game_over = True\n return True\n\n # descending diagonal win chek\n if self.board[0][0] == player and self.board[1][1] == player and self.board[2][2] == player:\n self.draw_desc_diagonal(player)\n print(\"Player {} has won the game!\".format(player))\n self.game_over = True\n return True\n\n return False", "def game_won(self):\n return all((foundation.is_full() for foundation in self.foundations.values()))" ]
[ "0.8094866", "0.8090733", "0.8032095", "0.8010544", "0.79272014", "0.7739902", "0.7669931", "0.75953156", "0.75601757", "0.7536703", "0.7533477", "0.7526654", "0.7492473", "0.7458491", "0.7454935", "0.7428993", "0.73916984", "0.7306342", "0.72584236", "0.7249623", "0.7238585", "0.7216704", "0.7208936", "0.7204491", "0.7199169", "0.7198927", "0.71976984", "0.71856856", "0.7164469", "0.7160259", "0.71544766", "0.71469325", "0.7135561", "0.713127", "0.71107817", "0.7103785", "0.7078808", "0.7073694", "0.70572776", "0.7048463", "0.7040742", "0.7031919", "0.7027985", "0.70170885", "0.70013684", "0.69969714", "0.6995441", "0.6993015", "0.69907326", "0.6973749", "0.6957413", "0.6942008", "0.6938601", "0.69293165", "0.6926266", "0.6916159", "0.6914755", "0.69079965", "0.6906807", "0.68881017", "0.6883454", "0.6879381", "0.68671304", "0.6863428", "0.68555623", "0.68487513", "0.6843369", "0.6839018", "0.6833756", "0.68298095", "0.6818288", "0.6812966", "0.68102366", "0.68087715", "0.6800392", "0.6795666", "0.6795245", "0.6794394", "0.6792341", "0.6789267", "0.67868847", "0.6772665", "0.67594284", "0.67561746", "0.675365", "0.6742806", "0.673953", "0.6737732", "0.67350096", "0.6730896", "0.67291164", "0.6728859", "0.67162186", "0.6713989", "0.6710545", "0.6707183", "0.6705339", "0.67048514", "0.67022866", "0.6699435", "0.6695663" ]
0.0
-1
Evaluate and apply formatting on template, apply any art if provided. Any additional parameters are passed as extra variables to the template. The extra variables have priority when there's conflicting variable names.
def run(self, template: str, art: Optional[str] = None, **kwargs: Any) -> str: variables = self.__dict__ variables.update(kwargs) template = CustomFormats().format(template, **variables) if art: art = art.format(nfo=template) template = art for m in re.finditer(r"<\?([01])\?([\D\d]*?)\?>", template): # TODO: This if check is quite yucky, look into alternative options. # Ideally a custom format spec would be great. template = template.replace( m.group(0), m.group(2) if int(m.group(1)) else "" ) template = "\n".join(map(str.rstrip, template.splitlines(keepends=False))) return template
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render(template, variables={}):\r\n\treturn prettify( parse(template).render(dict(variables.items())) )", "def format_template(template, *args):\n return textwrap.dedent(template % args).strip()", "def formatEval(self, template, attrs, scale=1, noScale=None):\n # Boat width not stored, so calculated here...\n try:\n attrs.update({\n 'boatWidth': self.canvas.boat.attrs['width']\n - (self.canvas.boat.attrs['wallWidth'] * 2)\n })\n except:\n # Boat hasn't been added to self yet.\n pass\n\n # First put the values in place as normal.\n s = template.format(**attrs)\n # Then split it at the quotes into a list.\n s = s.split('\"')\n ret = s\n for i, section in enumerate(s):\n # Take the even elements from the list to\n # get the bits between the quotes.\n if (i+1)%2 == 0:\n # Try to evaluate it, if it causes an error, I will\n # assume it's not an expression, and leave it alone.\n try:\n result = eval(section)\n if not noScale == None and not noScale in s[i-1]:\n result *= scale\n except:\n result = section\n # Add the result back to the list.\n ret[i] = '\"' + str(result) + '\"'\n else:\n # The bits not in quotes are left alone.\n ret[i] = str(section)\n # Join the list back into a string.\n return ''.join(ret)", "def render(self, template: str, **vars) -> str:", "def _fill_template_text(\n self,\n template: Dict[Text, Any],\n template_vars: Dict[Text, Any]\n ) -> Dict[Text, Any]:\n line_text_keys = [\"text\", \"altText\", \"label\", \"uri\"]\n try:\n for key in line_text_keys:\n if key in template:\n template[key] = template[key].format(**template_vars)\n except KeyError as e:\n logger.exception(\n \"Failed to fill line template '{}'. \"\n \"Tried to replace '{}' but could not find \"\n \"a value for it. There is no slot with this \"\n \"name nor did you pass the value explicitly \"\n \"when calling the template. Return template \"\n \"without filling the template. \"\n \"\".format(template, e.args[0]))\n return template", "def part_render(self, attr, *a, **kw):\r\n style = kw.get('style', 'html')\r\n template = self.template(style)\r\n dt = template.get_def(attr)\r\n return unsafe(dt.render(thing = self, *a, **kw))", "def insert_evaluate_variables(text, var_dict):\n if isinstance(text, list):\n text.insert(0, '{% load quest_render_tags %}')\n rndr_string = '\\n'.join(text)\n else:\n rndr_string = r'{% load quest_render_tags %} ' + text\n\n var_dict_rendered = {}\n for key, values in var_dict.iteritems():\n var_dict_rendered[key] = values[1]\n\n tmplte = Template(rndr_string)\n cntxt = Context(var_dict_rendered)\n return tmplte.render(cntxt)", "def _template_formatting(field, inputs, inputs_dict_st):\n from .specs import MultiInputObj, MultiOutputFile\n\n # if a template is a function it has to be run first with the inputs as the only arg\n template = field.metadata[\"output_file_template\"]\n if callable(template):\n template = template(inputs)\n\n # as default, we assume that keep_extension is True\n keep_extension = field.metadata.get(\"keep_extension\", True)\n\n inp_fields = re.findall(r\"{\\w+}\", template)\n inp_fields_fl = re.findall(r\"{\\w+:[0-9.]+f}\", template)\n inp_fields += [re.sub(\":[0-9.]+f\", \"\", el) for el in inp_fields_fl]\n if len(inp_fields) == 0:\n return template\n\n val_dict = {}\n file_template = None\n\n for fld in inp_fields:\n fld_name = fld[1:-1] # extracting the name form {field_name}\n if fld_name not in inputs_dict_st:\n raise AttributeError(f\"{fld_name} is not provided in the input\")\n fld_value = inputs_dict_st[fld_name]\n if fld_value is attr.NOTHING:\n # if value is NOTHING, nothing should be added to the command\n return attr.NOTHING\n else:\n # checking for fields that can be treated as a file:\n # have type File, or value that is path like (including str with extensions)\n if isinstance(fld_value, os.PathLike) or (\n isinstance(fld_value, str) and \".\" in fld_value\n ):\n if file_template:\n raise Exception(\n f\"can't have multiple paths in {field.name} template,\"\n f\" but {template} provided\"\n )\n else:\n file_template = (fld_name, fld_value)\n else:\n val_dict[fld_name] = fld_value\n\n # if field is MultiOutputFile and some elements from val_dict are lists,\n # each element of the list should be used separately in the template\n # and return a list with formatted values\n if field.type is MultiOutputFile and any(\n [isinstance(el, (list, MultiInputObj)) for el in val_dict.values()]\n ):\n # all fields that are lists\n keys_list = [\n k for k, el in val_dict.items() if isinstance(el, (list, MultiInputObj))\n ]\n if any(\n [len(val_dict[key]) != len(val_dict[keys_list[0]]) for key in keys_list[1:]]\n ):\n raise Exception(\n f\"all fields used in {field.name} template have to have the same length\"\n f\" or be a single value\"\n )\n formatted_value = []\n for ii in range(len(val_dict[keys_list[0]])):\n val_dict_el = copy(val_dict)\n # updating values to a single element from the list\n for key in keys_list:\n val_dict_el[key] = val_dict[key][ii]\n\n formatted_value.append(\n _element_formatting(\n template, val_dict_el, file_template, keep_extension=keep_extension\n )\n )\n else:\n formatted_value = _element_formatting(\n template, val_dict, file_template, keep_extension=keep_extension\n )\n return formatted_value", "def _element_formatting(template, values_template_dict, file_template, keep_extension):\n if file_template:\n fld_name_file, fld_value_file = file_template\n # splitting the filename for name and extension,\n # the final value used for formatting depends on the template and keep_extension flag\n name, *ext = Path(fld_value_file).name.split(\".\", maxsplit=1)\n filename = str(Path(fld_value_file).parent / name)\n # updating values_template_dic with the name of file\n values_template_dict[fld_name_file] = filename\n # if keep_extension is False, the extensions are removed\n if keep_extension is False:\n ext = []\n else:\n ext = []\n\n # if file_template is at the end of the template, the simplest formatting should work\n if file_template and template.endswith(f\"{{{fld_name_file}}}\"):\n # recreating fld_value with the updated extension\n values_template_dict[fld_name_file] = \".\".join([filename] + ext)\n formatted_value = template.format(**values_template_dict)\n # file_template provided, but the template doesn't have its own extension\n elif file_template and \".\" not in template:\n # if the fld_value_file has extension, it will be moved to the end\n formatted_value = \".\".join([template.format(**values_template_dict)] + ext)\n # template has its own extension or no file_template provided\n # the simplest formatting, if file_template is provided it's used without the extension\n else:\n formatted_value = template.format(**values_template_dict)\n return formatted_value", "def apply_to(self, template):\n pass", "def _substitute(template, fuzzer, benchmark):\n return template.format(fuzzer=fuzzer, benchmark=benchmark)", "def render_string(self, template: str, **vars) -> str:", "def render(self, template, *args, **kwargs):\n self._render(template, sys.stdout, *args, **kwargs)", "def reformat(ctx):\n pass", "def highlight(val, conditions: dict, tablefmt):\n val = round(val, ROUND)\n for color, cond in conditions.items():\n if tablefmt == 'simple':\n if cond:\n return pfont([color, 'BOLD'], format(round(val, ROUND), f\".{ROUND}f\"), PrintFont)\n elif tablefmt in ['latex', 'latex_raw']: # needs to be amended by hand\n if cond:\n return pfont([color, 'BOLD'], str(format(round(val, ROUND), f\".{ROUND}f\")), LaTeXFont)\n return format(val, f\".{ROUND}f\")", "def render(template, context):\n if not template:\n return None\n\n text = \"\"\n filename = \"templates/\" + template\n with open(filename) as f:\n text = f.read()\n # First compile template into extended base template.\n is_child = re.search(extend_search, text.splitlines()[0])\n if is_child:\n base_filename = \"templates/\" + is_child.group(2)\n with open(base_filename) as base:\n text = extend_template(base.read(), text)\n # Run conditional checks\n has_conditions = re.search(if_search, text)\n if has_conditions:\n text = render_conditionals(text, context)\n # Replace any variables passed to the render function.\n for replace in context.replaces.keys():\n arg_search = re.compile(\"{{ \" + replace + \" }}\")\n text = re.sub(arg_search, context.replaces[replace], text)\n return text", "def render_template(*args, **kwargs):\r\n params = {'cache_buster': cache_buster, 'user': {}, 'user_json': {}, 'PROD': PRODUCTION,\r\n 'static_route': 'http://cdn1.pythonhackers.com'}\r\n params.update(**kwargs)\r\n\r\n return template_render(*args, **params)", "def render( *args, **kwargs ):", "def render( context, *args, **kwargs ):", "def persona_from_template_values(topic: str, topic_item: str, extra_details: str = ''):\n pers = f'My favorite {topic} is {topic_item}.'\n if extra_details:\n pers += f'\\n{extra_details}'\n return pers", "def _render_thing(self, thing):\n function = \"{:}\".format\n if (type(thing) in self.fmatdict):\n function = self.fmatdict[type(thing)]\n return function(thing).strip()", "def render(self, template, **kw):\n self.write(self.render_string(template, **kw))", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def apply_format(self, **format_vars):\n for construction_dict in (self._actions, self._conditions):\n for construction_key, construction_objs in construction_dict.iteritems():\n for construction in construction_objs:\n construction.apply_format(**format_vars)", "def format_html(format_string, *args, **kwargs):\n args_safe = map(conditional_escape, args)\n kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in\n six.iteritems(kwargs)])\n return mark_safe(format_string.format(*args, **kwargs))", "def _text(self, template, **kw):\n ns = dict()\n ns['csv'] = _args_to_csv\n ns['f'] = _Namespace(kw)\n return Template(template).render(**ns)", "def substitution_func_gen(self, variables, code):\n \n #print(self.rule.name, self.external_vars)\n ext, rest = separate(variables, lambda v: v in self.external_vars.keys())\n \n substitution_dict = dict()\n substitution_dict.update( { e : self.external_vars[e] for e in ext } )\n substitution_dict.update( { r : p(r) for r in rest } )\n \n new_format_string = code.format(**substitution_dict)\n \n return ( set(rest), lambda vd = { r : r for r in rest }: new_format_string.format(**vd) )", "def template_file(task, template, path, jinja_filters=None, **kwargs):\n jinja_filters = jinja_filters or {} or task.nornir.config.jinja_filters\n merged = merge_two_dicts(task.host, kwargs)\n text = jinja_helper.render_from_file(\n template=template,\n path=path,\n host=task.host,\n jinja_filters=jinja_filters,\n **merged\n )\n return Result(host=task.host, result=text)", "def format(\n self,\n format_string,\n module=None,\n param_dict=None,\n force_composite=False,\n attr_getter=None,\n ):\n if param_dict is None:\n param_dict = {}\n\n # if the processed format string is not in the cache then create it.\n if format_string not in self.block_cache:\n self.build_block(format_string)\n\n first_block = self.block_cache[format_string]\n\n def get_parameter(key):\n \"\"\"\n function that finds and returns the value for a placeholder.\n \"\"\"\n if key in param_dict:\n # was a supplied parameter\n param = param_dict.get(key)\n elif module and hasattr(module, key):\n param = getattr(module, key)\n if hasattr(param, \"__call__\"):\n # we don't allow module methods\n raise Exception()\n elif attr_getter:\n # get value from attr_getter function\n try:\n param = attr_getter(key)\n except: # noqa e722\n raise Exception()\n else:\n raise Exception()\n if isinstance(param, Composite):\n if param.text():\n param = param.copy()\n else:\n param = \"\"\n return param\n\n # render our processed format\n valid, output = first_block.render(get_parameter, module)\n\n # clean things up a little\n if isinstance(output, list):\n output = Composite(output)\n if not output:\n if force_composite:\n output = Composite()\n else:\n output = \"\"\n\n return output", "def evaluate_template(template: str, prefix=\"$\", sufix=\"$\"):\n return get_context().evaluate_template(template, prefix=prefix, sufix=sufix)", "def format_result_rows(parsed_args, ordered_dict, template_name,\n indent=DEFAULT_INDENT):\n if parsed_args.format == 'template':\n out = ['{{%s\\n' % template_name]\n for k, v in ordered_dict.items():\n if v is not None:\n out.append(('|{0: <%s}= {1}\\n' % indent).format(k, v))\n out.append('}}')\n elif parsed_args.format == 'module':\n ordered_dict['debug_id'] = 1\n out = ['{']\n for k, v in ordered_dict.items():\n if v is not None:\n out.append('{0} = \"{1}\", '.format(k, v))\n out[-1] = out[-1].strip(', ')\n out.append('}')\n return ''.join(out)", "def render_with_full_rendering_pipeline(\n # regarding the given template:\n parsed_template: parse_templates.ParsedTemplateRefined,\n ids_used_in_template: typing.FrozenSet[str],\n template_contains_unspecified_ids: bool,\n\n # regarding the given pronoun data:\n grpd: parse_pronoun_data.GRPD) -> str:\n\n parsed_template, grpd = GRenderer.id_resolution(parsed_template, ids_used_in_template,\n template_contains_unspecified_ids, grpd)\n parsed_template, grpd = GRenderer.resolve_addressing(parsed_template, grpd)\n result = GRenderer.render_final_context_values(parsed_template, grpd)\n\n return result", "def render(data_dict, *args, **kwargs):", "def render(request, *args, **kw):", "def render_string(self, template, **params):\n t = jinja_env.get_template(template)\n return t.render(params)", "def render_to(template):\n def renderer(func):\n def wrapper(request, *args, **kw):\n output = func(request, *args, **kw)\n if isinstance(output, (list, tuple)):\n return render(request, output[1], output[0])\n elif isinstance(output, dict):\n return render(request, template, output)\n return output\n return wrapper\n return renderer", "def render_func(raw_str: str) -> str:\n try:\n rendered_str = raw_str.format(**live_context)\n except KeyError as err:\n raise SQLTemplaterError(\n \"Failure in Python templating: {}. Have you configured your \"\n \"variables? https://docs.sqlfluff.com/en/stable/\"\n \"configuration.html#templating-configuration\".format(err)\n )\n return rendered_str", "def renderstr_from_template(self, template, args=None):\n renderedtext = template.render_string(args)\n return renderedtext", "def render(variables, input_path, output_path, delimiter='%%'):\n\n try:\n infile = open(input_path, 'r')\n except IOError as e:\n print 'unable to open input file: {}'.format(input_path)\n print_debug(e)\n return\n try:\n outfile = open(output_path, 'w')\n except IOError as e:\n print 'unable to open output file: {}'.format(output_path)\n print_debug(e)\n return\n\n for line in infile.readlines():\n rendered_string = ''\n match = re.search(delimiter + '[a-zA-Z_0-9]*' + delimiter, line)\n if match:\n delim_index = [m.start() for m in re.finditer(delimiter, line)]\n if len(delim_index) < 2:\n continue\n\n template_string = line[delim_index[0] + len(delimiter): delim_index[1]]\n for item in variables:\n if item == template_string:\n rendered_start = line[:delim_index[0]]\n rendered_middle = variables[item]\n rendered_end = line[delim_index[0] + len(delimiter) + len(item) + len(delimiter):]\n rendered_string += str(rendered_start) + str(rendered_middle) + str(rendered_end)\n else:\n continue\n else:\n rendered_string = line\n outfile.write(rendered_string)", "def cformat(template, *args, **kwargs):\n kwargs.update(**__TERMINAL_CODES)\n return template.format(*args, **kwargs)", "def _render(self, template, out, *args, **kwargs):\n text = self.render_only(template, *args, **kwargs)\n try:\n out.write(text)\n except UnicodeEncodeError:\n text = text.encode('utf-8')\n out.write(text)", "def decorate_template(mlist, template, extradict=None):\n # Create a dictionary which includes the default set of interpolation\n # variables allowed in headers and footers. These will be augmented by\n # any key/value pairs in the extradict.\n substitutions = {\n key: getattr(mlist, key)\n for key in ('fqdn_listname',\n 'list_name',\n 'mail_host',\n 'display_name',\n 'request_address',\n 'description',\n 'info',\n )\n }\n if extradict is not None:\n substitutions.update(extradict)\n text = expand(template, mlist, substitutions)\n # Turn any \\r\\n line endings into just \\n\n return re.sub(r' *\\r?\\n', r'\\n', text)", "def build_format(i, ex, args, meta_args):\n formatter = string.Formatter()\n format_string = meta_args.format_string\n fields = list(formatter.parse(format_string))\n\n kwarg_fields = []\n indexed_fields = []\n\n i.result = hive.variable('str')\n i.result_out = hive.pull_out(i.result)\n\n for index, field in enumerate(fields):\n literal_text = field[1]\n\n if literal_text is None:\n continue\n\n if not literal_text.isidentifier():\n field_name = \"field_{}\".format(index)\n indexed_fields.append(field_name)\n\n else:\n field_name = literal_text\n kwarg_fields.append(field_name)\n\n # Create IO\n attr = hive.variable()\n setattr(i, field_name, attr)\n\n in_attr = hive.pull_in(attr)\n setattr(i, \"{}_in\".format(field_name), in_attr)\n\n setattr(ex, field_name, hive.antenna(in_attr))\n hive.trigger(i.result_out, in_attr, pretrigger=True)\n\n ex.result = hive.output(i.result_out)\n\n def do_format(self):\n args = [getattr(self, \"_{}\".format(attr_name)) for attr_name in indexed_fields]\n kwargs = {attr_name: getattr(self, \"_{}\".format(attr_name)) for attr_name in kwarg_fields}\n self._result = formatter.format(format_string, *args, **kwargs)\n\n i.func = hive.modifier(do_format)\n hive.trigger(i.result_out, i.func, pretrigger=True)", "def render(self, template, **kw):\n self.write(self._render_str(template, **kw))", "def resolve(text, *args, **kwargs):\n text = gettext(text)\n # Allways close formatting\n text += '{c.end}{c.default}'\n colors = NO_COLORS\n if not settings.opt('no_color'):\n colors = COLORS\n return text.format(*args, c=colors, **kwargs)", "def fill_template_instruction(self,\n obj,\n ref_obj=None,\n spat_rel=None,\n drop_color=False):\n obj_urdf = os.path.splitext(os.path.basename(obj[\"urdf\"]))[0]\n if not drop_color:\n obj_color = obj[\"color\"] + \" \"\n else:\n obj_color = \"\"\n if ref_obj is not None:\n ref_obj_color = get_ref_obj_color(ref_obj)\n ref_obj_urdf = os.path.splitext(os.path.basename(ref_obj[\"urdf\"]))[0]\n\n if not spat_rel:\n return f\"Put the {obj_color}{obj_urdf} in the {self.bowl_color} bowl.\"\n else:\n return f\"Put the {obj_color}{obj_urdf} which is {SPATIAL_RELATIONS[spat_rel]} {ref_obj_color} {ref_obj_urdf} in the {self.bowl_color} bowl.\"", "def render_template(template, **template_values):\n # retrieve the html template\n t = jinja_environment.get_template(template)\n\n # render the html template with th given dictionary\n return t.render(template_values)", "def render(repo=None, branch=None, compare=None, inserted=0, deleted=0,\n modified=0, **kwargs):\n if not repo or not branch:\n return ''\n\n parts = ['{repo}/{branch}']\n\n # Compare against\n if compare not in ('HEAD', branch, None):\n parts.append('Comparing against {compare}')\n\n # File statistics\n if inserted:\n parts.append('{inserted}+')\n if deleted:\n parts.append('{deleted}-')\n if modified:\n parts.append(u'{modified}≠')\n\n # join template and fill with locals\n return ', '.join(parts).format(**locals())", "def template(self, record):\n\n def _log_format_onecolor(record):\n \"\"\"\n Normal console output format\n \"\"\"\n\n return LEVEL_COLORS.get(record.levelname)\n\n def _log_format_notset(record, stylized=True):\n \"\"\"\n Default log format.\n \"\"\"\n\n reset = Style.RESET_ALL\n\n levelname = {\n 'style_before': LEVEL_COLORS.get(record.levelname) + Style.BRIGHT,\n 'format': '(%(levelname)s)',\n 'style_after': reset,\n 'prefix': '',\n 'suffix': '',\n }\n\n name = {\n 'style_before': Fore.WHITE + Style.DIM + Style.BRIGHT,\n 'format': '%(name)s',\n 'style_after': Fore.RESET + Style.RESET_ALL,\n 'prefix': ' ',\n 'suffix': ' ',\n }\n\n # format prefix + style_before + message + style_after + suffix\n result = reset\n for i in [levelname, name]:\n result += f\"{i['prefix']}{i['style_before']}{i['format']}{i['style_after']}{i['suffix']}\"\n result += reset\n\n return result\n\n # Template Switcher\n templates = {\n 'NOTSET': _log_format_notset,\n 'INFO': _log_format_onecolor,\n 'DELIMITER': _log_format_onecolor,\n 'TOPIC': _log_format_onecolor,\n 'WARNING': _log_format_onecolor,\n }\n\n return templates.get(record.levelname, _log_format_notset)(record)", "def process_template(template, args, ill_sg_vowel=None):\n parts = []\n delparts = []\n for x in template:\n if x.isdigit():\n k = int(x)\n if k in args:\n v = args[k]\n else:\n v = args.get(x, \"\")\n if v == \"(')\":\n v = \"\"\n if x == \"9\":\n if \"par_sg_a\" in args:\n parts.append(args[\"par_sg_a\"])\n else:\n if not delparts:\n return None\n parts.append(delparts[-1])\n if x == \"3\" and not v:\n # XXX what exactly was this kludge for...? I'm not sure if\n # this is now handled by other means (default value for last\n # argument).\n v = EMPTY_CHAR\n for y in v:\n parts.append(y)\n elif x == \"@\":\n if ill_sg_vowel is not None:\n parts.append(ill_sg_vowel)\n else:\n p = \"\".join(parts + delparts)\n m = re.search(r\"([aeiouyåäöAEIOUYÅÄÖ])\"\n r\"[^aeiouyåäöAEIOUYÅÄÖ]*$\",\n p)\n if m:\n parts.append(m.group(1).lower())\n else:\n m = re.search(r\"[éÉ]\"\n r\"[^aeiouyåäöAEIOUYÅÄÖ]*$\",\n p)\n if m:\n parts.append(\"e\")\n elif p:\n ch = last_char_to_vowel(p[-1])\n parts.append(ch)\n else:\n return None\n elif x == \"A\":\n a = args.get(\"par_sg_a\", None)\n if a:\n parts.append(a)\n else:\n p = \"\".join(parts + delparts)\n parts.append(word_to_aae(p))\n elif x == \"O\":\n p = \"\".join(parts + delparts)\n if needs_aou(p):\n parts.append(\"o\")\n else:\n parts.append(\"ö\")\n elif x == \"U\":\n p = \"\".join(parts + delparts)\n if needs_aou(p):\n parts.append(\"u\")\n else:\n parts.append(\"y\")\n elif x == \"D\":\n p = \"\".join(parts)\n if not p:\n return None\n if p[-1] in \"rnml\":\n parts.append(p[-1])\n else:\n parts.append(\"d\")\n elif x == \"I\":\n # Inserts either previously removed character or \"e\" if it was\n # \"i\".\n if not delparts:\n return None\n if delparts[-1] == \"i\":\n parts.append(\"e\")\n else:\n parts.append(delparts[-1])\n elif x == \"-\":\n # Drop last, move to delparts so it counts for gradation\n if not parts:\n return None\n p = parts.pop()\n if p not in \"aeiouyäöp\": # Must be vowel or p\n return None\n delparts.append(p)\n elif x == \"/\":\n # Drop second to last\n if len(parts) < 2:\n return None\n p = parts.pop()\n if p not in \"aeiouyäö\": # Must be vowel\n return None\n p2 = parts.pop()\n if p2 not in \"aeiouyäö\": # Must be vowel\n return None\n parts.append(p)\n else:\n parts.append(x)\n v = \"\".join(parts)\n if v.find(EMPTY_CHAR) >= 0:\n for ch in \"aeiouyäöAEIOUYÄÖ\":\n v = re.sub(\"([aeiouyäöAEIOUYÄÖ]\" + ch + \")\" + EMPTY_CHAR +\n \"(\" + ch + \")\", r\"\\1'\\2\", v)\n v = re.sub(EMPTY_CHAR, \"\", v)\n return v", "def profile(template_path, func, *a, **b):\n template_dict = func(*a, **b)\n ### Generate HTML given the path to the template and the data dictionary.\n loader = TemplateLoader()\n template = loader.load(template_path)\n stream = template.generate(**template_dict)\n return stream.render('xhtml')", "def materialize(template, substitutions):\n\n script_str = template\n for param, value in substitutions.items():\n script_str = re.sub(param, str(value), script_str)\n\n return script_str", "def template_string(template, **kwargs):\n\n temp = Template(template)\n return temp.render(**kwargs)", "def format(self, *args: object, **kwargs: object) -> HTML:\n return HTML(FORMATTER.vformat(self.value, args, kwargs))", "def __call__(self, template_name, **kwargs):\n if not template_name.endswith('.jinja2'):\n template_name += '.jinja2'\n\n template = self._env.get_template(template_name)\n context = self._system.copy()\n context.update(kwargs)\n\n return jinja2.Markup(template.render(context))", "def uses_template(template):\n def wrapper(func):\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n template_path = template\n ctx = func(*args, **kwargs)\n if type(ctx) is dict:\n try:\n return render_template(template_path,\n inators=ctx['inators'])\n except KeyError:\n try:\n return render_template(template_path,\n inator=ctx['inator'])\n except KeyError:\n return render_template(template_path, inators=ctx)\n else:\n return ctx\n return wrapped\n return wrapper", "def texManipContext(*args, exists: bool=True, image1: Union[AnyStr, bool]=\"\", image2:\n Union[AnyStr, bool]=\"\", image3: Union[AnyStr, bool]=\"\", q=True, query=True,\n e=True, edit=True, **kwargs)->Union[AnyStr, Any]:\n pass", "def render_str(template, **params):\n t = env.jinja_env.get_template(template)\n return t.render(params)", "def render_str(self, template, **params):\n tmp = JINJA_ENV.get_template(template)\n return tmp.render(params)", "def render_callable(self, inner_template_name, arg_string, outer_args):\r\n # First render the arg_string (mustache doesn't do this for you, and it may itself\r\n # contain mustache constructs).\r\n rendered_arg_string = self.render(arg_string, outer_args)\r\n # Parse the inner args as CGI args.\r\n inner_args = dict([(k, v[0]) for k, v in urlparse.parse_qs(rendered_arg_string).items()])\r\n # Order matters: lets the inner args override the outer args.\r\n args = dict(outer_args.items() + inner_args.items())\r\n # Render.\r\n return self.render_name(inner_template_name, args)", "def __init__(self, text, *contexts):\n self.context = {}\n for context in contexts:\n self.context.update(context)\n\n self.all_vars = set()\n self.loop_vars = set()\n\n # We construct a function in source form, then compile it and hold onto\n # it, and execute it to render the template.\n code = CodeBuilder()\n\n code.add_line(\"def render_function(context, do_dots):\")\n code.indent()\n vars_code = code.add_section()\n code.add_line(\"result = []\")\n code.add_line(\"append_result = result.append\")\n code.add_line(\"extend_result = result.extend\")\n code.add_line(\"to_str = str\")\n\n buffered = []\n\n def flush_output():\n \"\"\"Force `buffered` to the code builder.\"\"\"\n if len(buffered) == 1:\n code.add_line(\"append_result(%s)\" % buffered[0])\n elif len(buffered) > 1:\n code.add_line(\"extend_result([%s])\" % \", \".join(buffered))\n del buffered[:]\n\n ops_stack = []\n\n # Split the text to form a list of tokens.\n tokens = re.split(r\"(?s)({{.*?}}|{%.*?%}|{#.*?#})\", text)\n\n squash = in_joined = False\n\n for token in tokens:\n if token.startswith('{'):\n start, end = 2, -2\n squash = (token[-3] == '-')\n if squash:\n end = -3\n\n if token.startswith('{#'):\n # Comment: ignore it and move on.\n continue\n elif token.startswith('{{'):\n # An expression to evaluate.\n expr = self._expr_code(token[start:end].strip())\n buffered.append(\"to_str(%s)\" % expr)\n else:\n # token.startswith('{%')\n # Action tag: split into words and parse further.\n flush_output()\n\n words = token[start:end].strip().split()\n if words[0] == 'if':\n # An if statement: evaluate the expression to determine if.\n if len(words) != 2:\n self._syntax_error(\"Don't understand if\", token)\n ops_stack.append('if')\n code.add_line(\"if %s:\" % self._expr_code(words[1]))\n code.indent()\n elif words[0] == 'for':\n # A loop: iterate over expression result.\n if len(words) != 4 or words[2] != 'in':\n self._syntax_error(\"Don't understand for\", token)\n ops_stack.append('for')\n self._variable(words[1], self.loop_vars)\n code.add_line(\n \"for c_{} in {}:\".format(\n words[1],\n self._expr_code(words[3])\n )\n )\n code.indent()\n elif words[0] == 'joined':\n ops_stack.append('joined')\n in_joined = True\n elif words[0].startswith('end'):\n # Endsomething. Pop the ops stack.\n if len(words) != 1:\n self._syntax_error(\"Don't understand end\", token)\n end_what = words[0][3:]\n if not ops_stack:\n self._syntax_error(\"Too many ends\", token)\n start_what = ops_stack.pop()\n if start_what != end_what:\n self._syntax_error(\"Mismatched end tag\", end_what)\n if end_what == 'joined':\n in_joined = False\n else:\n code.dedent()\n else:\n self._syntax_error(\"Don't understand tag\", words[0])\n else:\n # Literal content. If it isn't empty, output it.\n if in_joined:\n token = re.sub(r\"\\s*\\n\\s*\", \"\", token.strip())\n elif squash:\n token = token.lstrip()\n if token:\n buffered.append(repr(token))\n\n if ops_stack:\n self._syntax_error(\"Unmatched action tag\", ops_stack[-1])\n\n flush_output()\n\n for var_name in self.all_vars - self.loop_vars:\n vars_code.add_line(f\"c_{var_name} = context[{var_name!r}]\")\n\n code.add_line('return \"\".join(result)')\n code.dedent()\n self._render_function = code.get_globals()['render_function']", "def parse_template(data, template):\n img_html = \"\"\"<div class=\"thumb-wrap\"><div class=\"thumb-holder\"></div><a href=\"{{URL}}\" target=\"_top\"><div class=\"thumb-img\" style=\"background-image:url('{{IMG}}');\"></div></a></div>\"\"\"\n template = template.replace('{{URL}}', data['link'].replace('http:','https:'))\n template = template.replace('{{URLX}}', data['link'])\n template = template.replace('{{TITLE}}', data['title'])\n #template = template.replace('{{BLURB}}', data['summary'])\n img_html = img_html.replace('{{URL}}', data['link'].replace('http:','https:'))\n if hasattr(data, 'tags') and len(data['tags']) > 0:\n template = template.replace('{{SECTION}}', data['tags'][0]['term'])\n else:\n template = template.replace('<h2><a href=\"{{URL}}\" target=\"_top\">{{SECTION}}</a></h2>', '')\n if hasattr(data, 'media_content') and len(data['media_content']) > 0:\n template = template.replace('{{IMG}}', '%s?w=150' % data['media_content'][0]['url'].replace('http:','https:'))\n else:\n template = template.replace(img_html, '')\n\n return template", "def render_str(self, template, **params):\n return render_str(template, **params)", "def render_str(self, template_name, **params):\n template = jinja_env.get_template(template_name)\n return template.render(params)", "def _substitute(template, files, user_values):\n # Get all placeholder names\n placeholders = _get_placeholders(template)\n\n # Pre-fill placeholders based on existing file aliases\n placeholder_values = _prefill_placeholders(placeholders, files,\n user_values)\n\n # Add user specified values for the placeholders\n placeholder_values.update(**user_values)\n\n # Check whether all placeholder values are now properly provided.\n provided = set(placeholder_values.keys())\n needed = set(placeholders)\n missing = needed - provided\n if len(missing) > 0:\n raise ValueError('Cannot construct filename, because the following '\n 'parameters are missing: %s' % missing)\n\n # Do the substitution\n return template.format(**placeholder_values)", "def add_to_pr_export(self, exp_template):", "def template_message(include_title=False, template='markdown.md.j2', exclude_labels=True, current_length=0, **kwargs):\n processed = {'message': ''}\n alerts_count = len(kwargs['alerts'])\n title = f\"{alerts_count} alert(s) received\"\n if not include_title:\n processed.update({'title': f\"{title}\"})\n title = None\n processed['message'] = render_template(\n template,\n title=title,\n alerts=kwargs['alerts'],\n external_url=kwargs['external_url'],\n receiver=kwargs['receiver'],\n exclude_labels=exclude_labels,\n current_length=current_length,\n )\n for alert in kwargs['alerts']:\n if int(alert['annotations'].get('priority', -1)) > processed.get('priority', -1):\n processed['priority'] = int(alert['annotations']['priority'])\n return processed", "def main():\n print(\"Enter name:\")\n name = input()\n\n print(\"Enter age:\")\n age = input()\n\n print(\"Enter town:\")\n town = input()\n\n print(\"Enter salary:\")\n salary = float(input())\n\n sentence = f\"{name} is {age} years old, is from {town} and makes ${salary}\"\n sentence2 = \"{} is {} years old, is from {} and makes ${}\".format(name, age, town, salary)\n sentence3 = \"%(a)s is %(b)s years old, is from %(c)s and makes $%(d)s\" % {'a':name, 'b':age, 'c':town, 'd':salary }\n\n print(sentence)\n print(sentence2)\n print(sentence3)\n\n num = 13.3456\n sentence4 = 'Place my variable here: %s' %(num)\n print(sentence4)\n\n sentence5 = 'Place my variable here: %1.3f' %(num)\n print(sentence5)\n\n sentence6 = 'First {x}, Second {y} and Third {x}'.format(x='XXX', y='YYY')\n print(sentence6)", "def render_string(self, template_name, **kwargs):\n raise NotImplementedError()", "def get_rendered_text(self, context):\n missing = set()\n for required in utils.get_variable_names_from_template(self):\n if required not in context:\n missing.add(required)\n if missing:\n raise MissingContext(missing)\n tmpl = utils.PyratempTemplate(self.text)\n context = context.copy()\n context[\"locale\"] = self.language.iso_code\n return tmpl.render(context)", "def render(self, context=None, **kwargs):\n # store the given context\n global_context = {}\n # store the result\n result = []\n # group the given context or kwargs\n if context:\n global_context.update(context)\n elif kwargs:\n global_context.update(kwargs)\n\n # this function to output from context\n # to the rendered template\n def write(*args):\n result.extend([str(arg) for arg in args])\n\n def fmt_write(fmt, *args):\n result.append(fmt % args)\n\n # add write and fmt_write into global_context\n global_context['write'] = write\n global_context['fmt_write'] = fmt_write\n # run the code\n for is_code, token in self.tokens:\n if is_code:\n exec(token, global_context)\n else:\n result.append(token)\n return ''.join(result)", "def format(self, *args, **kwargs):\n return self._format(args, kwargs)", "def cheetah_template(self, pre=False):\n if self.is_req_output:\n cht_tmpl = self.req_out_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_output:\n xml_out = self.xml_out\n xml_out['out_sel_name'] = self.out_sel_name\n cht_tmpl = self.file_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_input and not pre:\n if self.pname in self.gen_in_fmt:\n if self.gen_in_fmt[self.pname] == 'vcf,vcf_bgzip':\n cht_tmpl = self.vcf_choose\n else:\n cht_tmpl = PercentTemplate(self.reg_arg)\n elif self.pname in self.tool_data[self.tool_name]['input_fmt']:\n cht_tmpl = self.req_out_chth\n return cht_tmpl.substitute(self.xml_out)\n elif self.is_input and pre:\n cht_tmpl = self.vcf_tabix\n return cht_tmpl.substitute(self.xml_out)\n else:\n if self.xml_out['section'] not in ['required']:\n template_string = self.ext_arg\n else:\n template_string = self.reg_arg\n if self.xml_out['type'] == 'boolean':\n cht_tmpl = PercentTemplate(template_string.replace('%argument ', ''))\n else:\n cht_tmpl = PercentTemplate(template_string)\n return cht_tmpl.substitute(self.xml_out)", "def from_template(template, **extra_args):\n if hasattr(template, 'read') and callable(template.read):\n template_contents = template.read()\n elif os.path.exists(template):\n template_file = file(template, 'r')\n template_contents = template_file.read()\n template_file.close()\n else:\n # treat `template` as a string\n template_contents = template\n # substitute `extra_args` into `t` and return it\n return (template_contents % extra_args)", "def render(self, template: str, **vars) -> str:\n vars.setdefault('ctx', self._ctx)\n return self._renderer.render(template, **vars)", "def _interpolate(format):\n from tokenize import tokenprog\n\n def matchorfail(text, pos):\n match = tokenprog.match(text, pos)\n if match is None:\n raise _ItplError(text, pos)\n return match, match.end()\n\n namechars = \"abcdefghijklmnopqrstuvwxyz\" \\\n \"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_\";\n chunks = []\n pos = 0\n\n while 1:\n dollar = format.find(\"$\", pos)\n if dollar < 0: \n break\n nextchar = format[dollar + 1]\n\n if nextchar == \"{\":\n chunks.append((0, format[pos:dollar]))\n pos, level = dollar + 2, 1\n while level:\n match, pos = matchorfail(format, pos)\n tstart, tend = match.regs[3]\n token = format[tstart:tend]\n if token == \"{\": \n level = level + 1\n elif token == \"}\": \n level = level - 1\n chunks.append((1, format[dollar + 2:pos - 1]))\n\n elif nextchar in namechars:\n chunks.append((0, format[pos:dollar]))\n match, pos = matchorfail(format, dollar + 1)\n while pos < len(format):\n if format[pos] == \".\" and \\\n pos + 1 < len(format) and format[pos + 1] in namechars:\n match, pos = matchorfail(format, pos + 1)\n elif format[pos] in \"([\":\n pos, level = pos + 1, 1\n while level:\n match, pos = matchorfail(format, pos)\n tstart, tend = match.regs[3]\n token = format[tstart:tend]\n if token[0] in \"([\": \n level = level + 1\n elif token[0] in \")]\": \n level = level - 1\n else: \n break\n chunks.append((1, format[dollar + 1:pos]))\n else:\n chunks.append((0, format[pos:dollar + 1]))\n pos = dollar + 1 + (nextchar == \"$\")\n\n if pos < len(format): \n chunks.append((0, format[pos:]))\n return chunks", "def test_basics(key_formats, template, expected, fields, errors):\n\n format_product = FormatTemplate(key_formats=key_formats)\n statement = partial(format_product, template, **fields)\n\n if errors is None:\n result = statement()\n assert result == expected\n else:\n with pytest.raises(errors):\n result = statement()", "def render(self, context):\n #Turn our resolvers into actual values:\n try:\n object_obj = self.object_name_resolver.resolve(context)\n except AttributeError: #Happens if a string was passed in as the object name. Try to rescue this and treat as a var:\n object_obj = context.get(self.object_name_resolver, None)\n method_name = self.method_name_resolver.resolve(context) or str(self.method_name_resolver) #Can resolve as variable, but will also resolve as a string. Put in \"inverted commas\" to force string resolution\n if not object_obj or not method_name:\n raise TemplateSyntaxError(\"{{%% callmethod object_name.method_name %%}} cannot make sense of the resolved values for object_name.method_name '{object_name}.{method_name}'\".format(object_name=self.object_name_resolver, method_name=self.method_name_resolver))\n #Resolve the args\n args = []\n for arg_resolver in self.args_resolvers:\n arg = arg_resolver.resolve(context)\n args.append(arg)\n #Resolve the kwargs\n kwargs = {}\n for k_raw, v_resolver in self.kwargs_resolvers.items():\n k = smart_text(k_raw,'ascii')\n v = v_resolver.resolve(context)\n kwargs[k]=v\n \n #Now try to call the method on the object\n try:\n output = getattr(object_obj, method_name)(*args, **kwargs)\n except Exception as e: #Fail silently, but tell the console:\n print(\"\\033[91m{err_type} from {{%% callmethod <{obj_name}>.{method_name}() %%}}: {err_msg}\\033[0m\".format(err_type=e.__class__.__name__, obj_name=object_obj, method_name=method_name, err_msg=e))\n output = \"\"\n \n #Set to context variable if a context variable:\n if self.asvar:\n context[self.asvar] = output #NB: context is a dict, which is mutable :-)\n return \"\"\n return output #Otherwise return output (i.e. render this string into the page) ", "def _render_str(self, template, ** params):\n\n for key in params:\n if(isinstance(params[key], str)):\n params[key] = params[key].decode('utf-8')\n if(isinstance(params[key], dict)):\n for sub_key in params[key]:\n if(isinstance(params[key][sub_key], str)):\n params[key][sub_key] = params[key][sub_key].decode('utf-8')\n t = constants.JINJA_ENV.get_template(template)\n return t.render(params)", "def render(self, template, context):\n try:\n template = self.environment.from_string(template)\n except TemplateSyntaxError as e:\n raise TemplateError(e)\n try:\n return template.render(**context)\n except (UndefinedError, TypeError) as e:\n raise TemplateError(e)", "def render_template(self):\n # create and expand commandline template\n tmpl_r1 = self.finditem.sub(r'{{\\2}}', self.raw_template)\n tmpl_r2 = jinja2.Template(tmpl_r1).render(self.variables)\n self.relation.script = tmpl_r2\n self.relation.template_sha256 = self.variables['template_sha256']", "def template1(self, width, height):\n style = '\\n'.join(self.style_lines())\n defs = '\\n'.join(self.defs_lines())\n body = '\\n'.join(self.body_lines())\n defs_block = '' if not (style or defs) else '''<defs>\n <style type=\"text/css\"><![CDATA[\n%s\\n ]]></style>\\n%s\\n</defs>''' % (style, defs)\n txt = '''<?xml version=\"1.0\" standalone=\"no\"?>\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n<svg width=\"%s\" height=\"%s\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\">\n%s\\n%s\\n</svg>\\n''' % (width, height, defs_block, body)\n return txt", "def _atomic_prescriptive_template(\n cls,\n configuration: Optional[ExpectationConfiguration] = None,\n result: Optional[ExpectationValidationResult] = None,\n language: Optional[str] = None,\n runtime_configuration: Optional[dict] = None,\n ) -> Tuple[str, dict, Optional[dict]]:\n if runtime_configuration is None:\n runtime_configuration = {}\n\n styling: Optional[dict] = runtime_configuration.get(\"styling\")\n\n expectation_type: str\n expectation_kwargs: dict\n if configuration:\n expectation_type = configuration.expectation_type\n expectation_kwargs = configuration.kwargs\n else:\n if not isinstance(result, ExpectationValidationResult):\n expectation_validation_result_value_error_msg = (\n \"Renderer requires an ExpectationConfiguration or ExpectationValidationResult to be passed in via \"\n \"configuration or result respectively.\"\n )\n raise ValueError(expectation_validation_result_value_error_msg)\n\n if not isinstance(result.expectation_config, ExpectationConfiguration):\n expectation_configuration_value_error_msg = (\n \"Renderer requires an ExpectationConfiguration to be passed via \"\n \"configuration or result.expectation_config.\"\n )\n raise ValueError(expectation_configuration_value_error_msg)\n expectation_type = result.expectation_config.expectation_type\n expectation_kwargs = result.expectation_config.kwargs\n\n params_with_json_schema = {\n \"expectation_type\": {\n \"schema\": {\"type\": \"string\"},\n \"value\": expectation_type,\n },\n \"kwargs\": {\n \"schema\": {\"type\": \"string\"},\n \"value\": expectation_kwargs,\n },\n }\n template_str = \"$expectation_type(**$kwargs)\"\n\n return template_str, params_with_json_schema, styling", "def __format__(self, format_spec):\n # This calls the compiled regex stored on ANSIString's class to analyze the format spec.\n # It returns a dictionary.\n format_data = self.re_format.match(format_spec).groupdict()\n clean = self.clean()\n base_output = ANSIString(self.raw())\n align = format_data.get(\"align\", \"<\")\n fill = format_data.get(\"fill\", \" \")\n\n # Need to coerce width into an integer. We can be certain that it's numeric thanks to regex.\n width = format_data.get(\"width\", None)\n if width is None:\n width = len(clean)\n else:\n width = int(width)\n\n if align == \"<\":\n base_output = self.ljust(width, fill)\n elif align == \">\":\n base_output = self.rjust(width, fill)\n elif align == \"^\":\n base_output = self.center(width, fill)\n elif align == \"=\":\n pass\n\n # Return the raw string with ANSI markup, ready to be displayed.\n return base_output.raw()", "def render_str(template, **params):\n\n template_jinja = jinja_env.get_template(template)\n return template_jinja.render(params)", "def format(self, extra=None, *args, **kwargs):\n if extra is not None:\n for key, value in extra.items():\n if key not in kwargs:\n kwargs[key] = value\n return super(Format, self).format(self.format_string, *args, **kwargs)", "def _format_output(**values):\r\n return WEATHER_TEXT.format(**values)", "def apply_custom_template(self, name, filename, context):\n with open(os.path.join(self.build_path, filename), 'r') as f:\n template = actions.ActionsTemplate.from_dict(json.loads(f.read()))\n\n outputs = template.apply(context, self)\n\n for key, value in six.iteritems(outputs):\n context[key] = value", "def render(*args, **kwargs):\n if args:\n assert len(args) == 1, \\\n 'Expected exactly one argument, but got %r' % (args,)\n template = loader.load(args[0])\n else:\n template = cherrypy.thread_data.template\n ctxt = Context(url=cherrypy.url)\n ctxt.push(kwargs)\n return template.generate(ctxt)", "def render_template(self, template_txt, model, res_ids, post_process=False):\n multi_mode = True\n if isinstance(res_ids, (int, long)):\n multi_mode = False\n res_ids = [res_ids]\n\n results = dict.fromkeys(res_ids, u\"\")\n\n # try to load the template\n try:\n mako_env = mako_safe_template_env if self.env.context.get('safe') else mako_template_env\n template = mako_env.from_string(tools.ustr(template_txt))\n except Exception:\n _logger.info(\"Failed to load template %r\", template_txt, exc_info=True)\n return multi_mode and results or results[res_ids[0]]\n\n # prepare template variables\n records = self.env[model].browse(filter(None, res_ids)) # filter to avoid browsing [None]\n res_to_rec = dict.fromkeys(res_ids, None)\n for record in records:\n res_to_rec[record.id] = record\n variables = {\n 'format_date': lambda date, format=False, context=self._context: format_date(self.env, date, format),\n 'format_tz': lambda dt, tz=False, format=False, context=self._context: format_tz(self.env, dt, tz, format),\n 'format_amount': lambda amount, currency, context=self._context: format_amount(self.env, amount, currency),\n 'user': self.env.user,\n 'ctx': self._context, # context kw would clash with mako internals\n }\n for res_id, record in res_to_rec.iteritems():\n variables['object'] = record\n try:\n render_result = template.render(variables)\n except Exception:\n _logger.info(\"Failed to render template %r using values %r\" % (template, variables), exc_info=True)\n raise UserError(_(\"Failed to render template %r using values %r\")% (template, variables))\n if render_result == u\"False\":\n render_result = u\"\"\n results[res_id] = render_result\n\n if post_process:\n for res_id, result in results.iteritems():\n results[res_id] = self.render_post_process(result)\n\n return multi_mode and results or results[res_ids[0]]", "def expand_template(self, template, context):\n r = Template(template).render(Context(context))\n logging.debug(\"LDAP: Expanding template: '%s' -> '%s'\" % (template, r))\n return r", "def _make_formatter(*args, **kwargs):\n # pylint: disable = no-else-return\n\n assert not(args and kwargs)\n\n if args:\n # tuples are given for the whole command string but applied per token.\n # We need to supply only the tuples which are needed for the current\n # token.\n args = list(args[::-1])\n pcents = _re.compile(r'%[^%]').findall\n\n def formatter(value):\n \"\"\" Tuple formatter \"\"\"\n count = len(pcents(value))\n torepl = []\n while len(torepl) < count:\n torepl.append(args.pop())\n return value % tuple(torepl)\n return formatter\n\n elif kwargs:\n return lambda x: x % kwargs\n\n return lambda x: x", "def format_substitutions(subs: Union[SubstituteTerm, List[SubstituteTerm]]):\n text = \"\"\n if isinstance(subs, SubstituteTerm):\n term_str = str(subs)\n for line in term_str.split('\\n'):\n text += Markup.escape(line) + Markup('<br />')\n text += Markup('<br />')\n return text\n for term in subs:\n term_str = str(term)\n for line in term_str.split('\\n'):\n text += Markup.escape(line) + Markup('<br />')\n text += Markup('<br />')\n return text", "def render_template(content, **context_args):\n template = Template(\"{% load font_awesome %}\" + content)\n return template.render(Context(context_args))", "def parse_template(self, template, **context):\n required_blocks = [\"subject\", \"body\"]\n optional_blocks = [\"text_body\", \"html_body\", \"return_path\", \"format\"]\n\n if self.template_context:\n context = dict(self.template_context.items() + context.items())\n blocks = self.template.render_blocks(template, **context)\n\n for rb in required_blocks:\n if rb not in blocks:\n raise AttributeError(\"Template error: block '%s' is missing from '%s'\" % (rb, template))\n\n mail_params = {\n \"subject\": blocks[\"subject\"].strip(),\n \"body\": blocks[\"body\"]\n }\n for ob in optional_blocks:\n if ob in blocks:\n if ob == \"format\" and mail_params[ob].lower() not in [\"html\", \"text\"]:\n continue\n mail_params[ob] = blocks[ob]\n return mail_params", "def format_template(cls, **kw):\n d = cls.DEFAULT_PARAMS.copy()\n d.update(kw)\n try:\n return cls.PROTO_TEMPLATE % d\n except KeyError as err:\n raise OriginateMissingParameter(\n \"Missing originate parameter %s\" % err)", "def tpl(x):\n return 3 * x", "def render_template(\n template_name: str = \"index.html\", context: t.Dict[str, str] = {}\n):\n html_str: str\n with open(template_name, \"r\") as f:\n html_str = f.read()\n html_str = html_str.format(**context)\n return html_str\n # return f\"<h1>Hello {path=}</h1>\\n{template_name=}\"" ]
[ "0.5875008", "0.58742577", "0.58426213", "0.5590589", "0.5554982", "0.5440268", "0.5436101", "0.53913677", "0.5359046", "0.5331052", "0.530326", "0.5283194", "0.5190745", "0.51852167", "0.5137004", "0.51337534", "0.5114587", "0.50861675", "0.5056899", "0.5040804", "0.5000452", "0.49814096", "0.49765435", "0.49765435", "0.49765435", "0.4975772", "0.49756187", "0.49577516", "0.49286833", "0.49285817", "0.49148536", "0.49020845", "0.48935142", "0.48931962", "0.48916045", "0.4890418", "0.4855819", "0.48554567", "0.48526788", "0.48464078", "0.4842193", "0.4842049", "0.48415282", "0.4840397", "0.48272595", "0.48153144", "0.48125166", "0.47976214", "0.47890607", "0.4781522", "0.4778412", "0.4774761", "0.47601396", "0.47395313", "0.47304574", "0.47255942", "0.4723588", "0.47121242", "0.47048384", "0.47013688", "0.46955845", "0.4685758", "0.46846196", "0.46813762", "0.46778238", "0.46773133", "0.46769473", "0.46754304", "0.46715644", "0.46604434", "0.46594727", "0.46507043", "0.46459737", "0.46450374", "0.4642772", "0.4638452", "0.46371263", "0.4632677", "0.46320418", "0.46303558", "0.4630071", "0.46289706", "0.4624258", "0.4604405", "0.45994496", "0.45958912", "0.45942006", "0.45886615", "0.45870003", "0.45854285", "0.458324", "0.45829248", "0.45826912", "0.45791575", "0.45778686", "0.45776254", "0.45710394", "0.45696896", "0.45694172", "0.45586598" ]
0.6679136
0
Get an IMDB ID from either the media's global tags, or the config. Since IMDB IDs are required for this project, it will bug the user for one interactively if not found.
def get_imdb_id(self, imdb_id: Any) -> str: if not imdb_id: general_track = self.media_info.general_tracks[0].to_data() imdb_id = general_track.get("imdb") if not imdb_id: print("No IMDB ID was provided but is required...") while not imdb_id or not isinstance(imdb_id, str): user_id = input("IMDB ID (e.g., 'tt0487831'): ") if not self.IMDB_ID_T.match(user_id): print(f"The provided IMDB ID {user_id!r} is not valid...") print("Expected e.g., 'tt0487831', 'tt10810424', (include the 'tt').") else: imdb_id = user_id return imdb_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def alternative_media_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"alternative_media_id\")", "def imdb_id(title):\n pass", "def _get_id(mf, url=None):\n\n\tprops = mf['properties']\n\n\tif 'uid' in props:\n\t\treturn props['uid'][0]\n\telif 'url' in props:\n\t\treturn props['url'][0]\n\telse:\n\t\treturn None", "def _get_device_id(api: Mobileclient) -> str:\n\n try:\n _get_device_id_from_environment()\n except KeyError:\n pass\n\n return _get_device_id_from_registered(api)", "def _get_device_id_from_environment() -> str:\n\n return os.environ[\"GOOGLE_MUSIC_DEVICE_ID\"]", "def id(self):\n return self.settings['your_botid']", "def media_id(self):\n try:\n return Html.toId(self.content)\n except:\n Mp3Error(1)", "def media_content_id(self) -> str | None:\n if self._device.movie.handle:\n return self._device.movie.handle\n return None", "def get_media_id(media_url):\n split_url = media_url.split(\"/\")\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/file.png\n if split_url[-2] == \"media\":\n return split_url[-1]\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/abc/0/file.png\n else:\n #This is required for now due to the SDK parsing out the `/`s\n return \"%2F\".join(split_url[-3:])", "def get_tmdb_id(self, tmdb_id: Any) -> Optional[str]:\n if not tmdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n tmdb_id = general_track.get(\"tmdb\")\n if not tmdb_id:\n print(\"Warning: No TMDB ID was provided...\")\n return None\n if not self.TMDB_ID_T.match(tmdb_id) or not isinstance(tmdb_id, str):\n print(f\"The provided TMDB ID {tmdb_id!r} is not valid...\")\n print(\"Expected e.g., 'tv/2490', 'movie/14836', (include the 'tv/' or 'movie/').\")\n raise ValueError(\"Invalid TMDB ID\")\n return tmdb_id", "def lookup_by_id(i_d):\n imdb_id = 0\n str_id = str(i_d)\n if str_id[0].isdigit():\n #contact the moviedb api for inmdb id\n res = requests.get(\n f\"https://api.themoviedb.org/3/movie/{i_d}/external_ids?api_key=28dda9f76d76f128b47831768bc9a103\")\n res.raise_for_status()\n mov = res.json()\n imdb_id = mov[\"imdb_id\"]\n else:\n imdb_id = i_d\n # Contact API\n try:\n response = requests.get(\n f\"http://www.omdbapi.com/?i={imdb_id}&apikey=ced7be9a\")\n response.raise_for_status()\n except requests.RequestException:\n return None\n\n # parse response\n try:\n movie = response.json()\n return {\n \"title\":movie[\"Title\"],\n \"id\":movie[\"imdbID\"],\n \"plot\":movie[\"Plot\"],\n \"year\":movie[\"Year\"],\n \"poster\":movie[\"Poster\"],\n \"gross\":movie[\"BoxOffice\"],\n \"rating\":movie[\"imdbRating\"],\n \"website\":movie[\"Website\"],\n \"director\":movie[\"Director\"],\n \"writer\":movie[\"Writer\"],\n \"genre\":movie[\"Genre\"],\n \"actors\":movie[\"Actors\"]\n }\n\n except (KeyError, TypeError, ValueError):\n return None", "def the_tvdb_dot_com_id(title):\n pass", "def media_content_id(self) -> str | None:\n # The lovelace app loops media to prevent timing out, don't show that\n if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:\n return None\n media_status = self._media_status()[0]\n return media_status.content_id if media_status else None", "def get_id(conf_name: str=CONFIG_FILE) -> Optional[int]:\n with open(conf_name, 'r') as fobj:\n data = json.load(fobj)\n\n uid = data.get('id')\n\n assert uid is None or isinstance(uid, int), \\\n 'The user id must be an integer if it exists'\n\n return uid", "def spotify_id_from_token(access_token: str) -> Optional[str]:\n if access_token is None:\n return None\n headers = {\"Authorization\": \"Bearer {}\".format(access_token)}\n response = requests.post(\"https://api.spotify.com/v1/me\", headers=headers)\n if response.status_code != 200:\n return None\n user = response.json()\n if \"id\" not in user:\n return None\n return user[\"id\"]", "def get_id(self):\n try:\n return self.inst.query('*IDN?')[:36]\n except errors.VisaIOError as e:\n logger.warning(e)\n return 'Device not connected.'", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n #logger.debug('DAILYMOTION VIDEO FOUND %s' % url)\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if len(path_list) == 3 and (p.path.startswith('/embed/video/') or p.path.startswith('/swf/video/')):\n # http://www.dailymotion.com/embed/video/xmp7zw\n return re.sub('_.+', '', path_list[2])\n elif len(path_list) == 2 and (p.path.startswith('/video/') or p.path.startswith('/swf/')):\n # http://www.dailymotion.com/video/xmp7zw_whatever\n # http://www.dailymotion.com/swf/xmp7zw\n return re.sub('_.+', '', path_list[1])\n \n return ''", "def fetch_current_user_id(s):", "def get_self_id(self):\n configfilepath=\"./camera.ini\"\n\n config = configparser.ConfigParser()\n config.read(configfilepath)\n camid = \"\"\n if config.has_section(\"camera\"):\n camid = config.get(\"camera\",\"id\")\n print(\"Found CamID in camera.ini: \" + camid)\n else:\n config.add_section(\"camera\")\n\n if (camid == \"\"):\n h = iter(hex(getnode())[2:].zfill(12))\n camid = \":\".join(i + next(h) for i in h)\n config.set(\"camera\",\"id\",camid)\n with open(configfilepath, 'w') as configfile:\n config.write(configfile)\n print(\"Generated CamID and wrote to camera.ini: \" + camid)\n \n return camid", "def _get_ID(self):\n raw_data = imdb.search_for_title(self.title)\n if len(raw_data) > 1:\n raw_data = raw_data[0] # Pulls the first value of the title (the closest match)\n # if there is more than one\n self.ID = raw_data['imdb_id']", "def _get_experiment_id(experiment_name: str, config: SQAConfig) -> Optional[int]:\n exp_sqa_class = config.class_to_sqa_class[Experiment]\n with session_scope() as session:\n sqa_experiment_id = (\n session.query(exp_sqa_class.id) # pyre-ignore\n .filter_by(name=experiment_name)\n .one_or_none()\n )\n\n if sqa_experiment_id is None:\n return None\n return sqa_experiment_id[0]", "def get_id(connection):\n if connection is None:\n return None\n return connection.id", "def _get_device_id_from_registered(api) -> str:\n\n try:\n api.oauth_login(\"bad\")\n except InvalidDeviceId as original_exception:\n error_message = original_exception.args[0]\n\n device_ids_str = error_message.split(\"Your valid device IDs are:\")[-1]\n device_ids = device_ids_str.split(\"\\n\")\n device_ids = [device_id.replace(\"* \", \"\") for device_id in device_ids]\n return device_ids[-1]", "def _GetIdFromInstanceDirStr(instance_dir):\n match = _RE_LOCAL_INSTANCE_ID.match(instance_dir)\n if match:\n return match.group(\"ins_id\")\n\n # To support the device which is not created by acloud.\n if os.path.expanduser(\"~\") in instance_dir:\n return \"1\"\n\n return None", "def media_content_id(self):\n return self._table.active_track.id if self._table.active_track else None", "def fn_GetTMDBId(self, details):\n\n # If the custom url was not actually defined and we had no cached\n # data, then there is nothing to do.\n #\n if details is None:\n return\n print \"GetTMDBId details: %s\" % details", "def get_picture_id(path):\n\t\tif path is None:\n\t\t\treturn\n\t\tcon = mdb.connect('localhost', 'root', 'sensepass', 'sensecambrowser')\n\t\twith con:\n\t\t\tquery = \"SELECT id from fileuploader_picture WHERE file=%s\" % (path)\n\t\t\tcur = con.cursor()\n\t\t\tcur.execute(query)\n\t\t\tdata = cur.fetchall()\n\t\t\tprint \"len(data)\"\n\t\t\tprint data\n\t\t\tif len(data) > 0:\n\t\t\t\treturn data[0]\n\t\t\treturn None", "def get_mediatype_id(self, description):\n result = self.conn.mediatype.get(filter={'description': description})\n\n if result:\n mediatypeid = result[0]['mediatypeid']\n else:\n mediatypeid = None\n\n return mediatypeid", "def unique_id(self):\n if self._uuid != '':\n return \"linkplay_media_\" + self._uuid", "def get_tvdb_id(self, tvdb_id: Any) -> Optional[int]:\n if not tvdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n tvdb_id = general_track.get(\"tvdb\")\n if not tvdb_id:\n print(\"Warning: No TVDB ID was provided...\")\n return None\n if isinstance(tvdb_id, int):\n tvdb_id = str(tvdb_id)\n if not self.TVDB_ID_T.match(tvdb_id) or not isinstance(tvdb_id, str):\n print(f\"The provided TVDB ID {tvdb_id!r} is not valid...\")\n print(\"Expected e.g., '79216', '1395', (not the url slug e.g., 'the-office-us').\")\n raise ValueError(\"Invalid TVDB ID\")\n return int(tvdb_id)", "def get_media_id_from_post(media_obj):\n if media_obj:\n media_id = media_obj.get('id')\n return media_id\n return", "def getGUIDByBdcfg(configfile):\n generalDict, projectDict, solutionDict = Engine.readConfiguration(configfile)\n return projectDict['uuid']", "def id_check(self, message):\n matches = ID_SYNTAX.match(message)\n if matches:\n return matches.group(1)\n return None", "def _get_suggested_id(self, info: dict) -> str:\n return ulid_util.ulid()", "def getID():", "def _id_from_url(url):\n url = re.sub(r'\\?.*', '', url)\n video_id = url.split('/')[-2]\n return video_id", "def parse_id(app_object_id_string):\n splitter = re.compile(r'-')\n tokens = splitter.split(app_object_id_string)\n app_string = tokens[0]\n model_string = tokens[1]\n content_id = int(tokens[2])\n content_type = ContentType.objects.get(app_label=app_string, model=model_string)\n object = content_type.model_class().objects.get(id=content_id)\n return object", "def get_current_record_id(self):\n url = self.selenium.get_location()\n for part in url.split(\"/\"):\n oid_match = re.match(OID_REGEX, part)\n if oid_match is not None:\n return oid_match.group(2)\n raise AssertionError(\"Could not parse record id from url: {}\".format(url))", "def image_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"image_id\")", "def image_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"image_id\")", "def getLocalizedIdentifier(_session, _el):\n import kernel as core\n kernel = core.Kernel.getSingleton()\n assert _el is not None\n _caption = getIdentifier(kernel.session(), _el, kernel.getCurrentTranslation())\n _system = False\n if _caption is None:\n caption = kernel.session().get_idtf(_el)\n if not isSystemId(caption):\n _caption = unicode(caption)\n _system = True\n else:\n _caption = u\"\"\n else:\n assert isinstance(_caption, unicode)\n return _caption, _system", "def getEpisodeId(path, conn):\n cur = conn.cursor()\n cur.execute(\"SELECT id_episode FROM episode WHERE path=?\", (path,))\n id_episode = cur.fetchone()[0]\n return id_episode", "def media_entry_id(self):\n return self.getattr('media_entry_id')", "def media_content_id(self):\n return int(self._gallery_status[\"current_item\"])", "def get_image_id(image):\n if not is_valid_image(image):\n return False\n\n return AVAILABLE_IMAGES[image]['imageid']", "def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n if p.netloc.endswith('vimeo.com') and 'hubnut/album/' in p.path:\n return ''\n \n if p.netloc.endswith('vimeo.com') and p.path.split('/')[-1:][0].isdigit():\n # Url of type http://vimeo.com/21347521\n # mobile type http://vimeo.com/m/21347521\n return p.path.split('/')[-1:][0]\n elif p.netloc.endswith('vimeo.com') and p.path == '/moogaloop.swf' and 'clip_id' in p.query:\n # Old embed code style url\n #params = dict([part.split('=') for part in p.query.split('&')])\n params = cgi.parse_qs(p.query)\n if 'clip_id' in params:\n return params['clip_id'][0]\n elif p.netloc == 'player.vimeo.com' and p.path.startswith('/video/'):\n # Url of type http://player.vimeo.com/video/21347521?title=0&amp;byline=0&amp;portrait=0\n path = p.path.split('/')\n return path[-1]\n \n return ''", "def get_video_id(url):\n\n if not url:\n return \"\"\n\n # If URL is embedded\n if \"embed\" in url:\n return url.split(\"/\")[-1]\n\n parse_result = urlparse(url)\n query = parse_qs(parse_result.query)\n return query[\"v\"][0]", "def shared_gallery_image_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"shared_gallery_image_id\")", "def up_id_from_rx_id(reactome_id):\n react_url = 'http://www.reactome.org/ContentService/data/query/' \\\n + reactome_id + '/referenceEntity'\n res = requests.get(react_url)\n if not res.status_code == 200:\n return None\n _, entry, entry_type = res.text.split('\\t')\n if entry_type != 'ReferenceGeneProduct':\n return None\n id_entry = entry.split(' ')[0]\n db_ns, db_id = id_entry.split(':')\n if db_ns != 'UniProt':\n return None\n return db_id", "def app_id(self):\n return self._app_id or self._modules['default'].data.get('application')", "def get_id(html):\n\ttry:\n\t\tsong_id = re.findall('soundcloud://sounds:(.*?)\"', html)[0]\n\t\treturn song_id\n\texcept IndexError:\n\t\tprint(\"\\033[91m✘ Could not find song ID\\033[0m\")\n\t\tsys.exit()", "def media_content_id(self):\n if 'current_title' in self._status:\n return self._status['current_title']", "def giveId(what,string):\n if what == \"characters\":\n return list(engine.execute(f\"SELECT char_id FROM characters WHERE name ='{string}';\"))[0][0]\n elif what == \"episodes\":\n return list(engine.execute(f\"SELECT ep_id FROM episodes WHERE episode ='{string}';\"))[0][0]", "def tag_id(self, tag):\n assert isinstance(tag, str)\n\n df = self.dfs[\"tags\"]\n tag_records = df[df.tag == tag]\n if 1 == len(tag_records): \n return tag_records[\"id\"].values[0]\n elif 1 < len(tag_records): \n raise Exception(\"More than one record exist by tag\")\n else :\n # We should not be strict to tag name since it is a user input.\n import warnings\n warnings.warn(\"No record matched with tag\", Warning)\n return None", "def getID(self) -> int:\n ...", "def __getIDFromCID(self, cid):\n if cid == \"daemon\": return self._did\n \n if cid in self._attachments or cid == self._did:\n return cid\n \n for k,v in self._attachments.items():\n if cid == v.cmd: return k\n \n return None", "def getSteamid(value):\r\n value = str(value)\r\n if value.startswith( (\"STEAM_\", \"BOT_\")):\r\n return value\r\n userid = es.getuserid(value)\r\n if userid:\r\n steamid = playerlib.uniqueid( userid, True )\r\n return steamid\r\n return None", "def get_id_regular_link(link = None):\n #Legacy compatibility\n choppedLink = legacy_check(link)\n # dont bother if we are none.\n if link == None:\n return link\n\n vid_url_params = choppedLink[3].split(\"&\")\n # Search the id in the list of elements of the url\n vid = search_video_id(vid_url_params)\n\n # And dont forget the links with hashtags #\n vid = vid.split(\"#\")[0]\n\n return vid # change this var names TODO", "async def getemojiid(ctx, emoji: discord.Emoji):\n return await ctx.send(f\"{emoji} - `{emoji}`\")", "def get_device_id(self):\n\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['tracking_id'].attrs['device_id']\n\t\texcept:\n\t\t\treturn None", "def getOpenId(self):\n if self.sess is None: return None\n return self.sess.data.get('openid.identity')", "def get_device_id(self) -> str:\n return Config.get('device_id')", "def get_video_uploader(self, video_ID): #WORKS\n try:\n done = self.cur.execute(\"SELECT uploader FROM videos WHERE video_ID = \\\"{}\\\"\".format(video_ID))\n uploader = self.cur.fetchone()[0]\n return uploader\n except:\n return \"Error getting username\"", "def _DefaultAppId():\n return os.getenv('APPLICATION_ID', '_')", "def find_id(href):\n ID = idRE.search(href)\n if ID:\n return ID.group(1)", "def _get_suggested_id(self, info: dict) -> str:\n return info[CONF_NAME]", "def get_random_ID(self): # WORKS\n self.cur.execute(\"SELECT video_ID FROM videos ORDER BY RAND() LIMIT 1\") # Selects video_ID from 1 random row.\n return self.cur.fetchone()[0]", "def get_osd_id(path):\n osd_id = read_one_line(path, 'whoami')\n if osd_id is not None:\n check_osd_id(osd_id)\n return osd_id", "def get_user_id(self):\r\n message = self.q(css='BODY').text[0].strip()\r\n match = re.search(r' user_id ([^$]+)$', message)\r\n return match.groups()[0] if match else None", "def getCurrentWindowId(*args):", "def extract_gi_id(description):\n fields = description[1:].split('|')\n if 'gi' not in fields:\n return None\n return fields[1 + fields.index('gi')]", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def get_midas_id(trd, ai_fallback=False):\n\n def from_string(s):\n \"\"\"\n\n :param s:\n :return:\n \"\"\"\n srch = re.search(MIDAS_ID_REGEX, s)\n if srch:\n return int(srch.group(1))\n\n if trd.Counterparty().Name() == MDK_CPTY_NAME:\n val = from_string(trd.OptionalKey())\n if val:\n return val\n\n if ai_fallback: # additional info fallback is really slow\n source_system = trd.AdditionalInfo().Source_System()\n if source_system and source_system.upper() == 'MIDAS':\n full_id = trd.AdditionalInfo().Source_Trade_Id()\n if full_id:\n return from_string(full_id)", "def guid(self):\n _, _, _, guid, _ = RPR.GetSetMediaItemTakeInfo_String(\n self.id, 'GUID', 'stringNeedBig', False\n )\n return guid", "def id(user=None):\n command = \"id {0}\".format(user) if user else \"id\"\n system_command(command)", "def _get_user_id(self, name):\n try:\n apiResponse = twitchAPI.twitchAPIGet(\"/users\", {\"login\": name}) #Try to get user id from API\n userID = apiResponse[\"data\"][0][\"id\"]\n except (KeyError, APIUnavailable):\n userID = input(\"Please enter the user id of the user: \")\n except IndexError: #If Twitch API does not return user id\n print(\"That user does not exist on Twitch.\")\n userID = False\n return(userID)", "def community_gallery_image_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"community_gallery_image_id\")", "def get_take_audio_id(self, take_id):\n def execute_sql(cursor):\n cursor.execute(\"SELECT audioId FROM Takes WHERE id = ?\",\n (take_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n \n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\"Failed to get audio ID for take id ({take_id}): \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d", "def get_image_id(filename):\n del filename\n global GLOBAL_IMG_ID\n GLOBAL_IMG_ID += 1\n return GLOBAL_IMG_ID", "def create_omdb_poster_get(omdb_id, base=\"http://img.omdbapi.com/\"):\n apikey = current_app.config.get(\"OMDB_API_KEY\", \"\").strip()\n if not apikey:\n raise ValueError(\"No OMDB API Key supplied in configuration!\")\n\n omdb_id = norm_imdbid(omdb_id)\n if not omdb_id:\n return None\n\n return requests.get(base, params={\n 'apikey': apikey,\n 'i': omdb_id\n })", "def get_movie_id(self) -> str:\n return self.movie.id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id", "def _get_id(self):\n return self.__id" ]
[ "0.5761104", "0.5743748", "0.559037", "0.5428841", "0.5407892", "0.5339938", "0.5334296", "0.53217006", "0.5253112", "0.52254945", "0.52251637", "0.5214888", "0.5210509", "0.5178844", "0.5142981", "0.5113672", "0.5091264", "0.5082251", "0.5070948", "0.5068511", "0.5062682", "0.505709", "0.50503653", "0.5044069", "0.5041648", "0.50350076", "0.50331473", "0.50239646", "0.5009026", "0.5008585", "0.4988611", "0.49815857", "0.49786764", "0.497707", "0.49744946", "0.49635744", "0.4961391", "0.49568427", "0.49430528", "0.49430528", "0.49374864", "0.49351898", "0.49306956", "0.49255538", "0.49176556", "0.4914819", "0.4914564", "0.49047527", "0.49030146", "0.4902392", "0.4895131", "0.4871925", "0.4866877", "0.48449716", "0.48361808", "0.48360136", "0.48304534", "0.4823161", "0.48122358", "0.48111048", "0.48038837", "0.4794285", "0.47894034", "0.4787571", "0.47852415", "0.47756982", "0.47738975", "0.47716516", "0.47697526", "0.47592273", "0.47586614", "0.47566086", "0.47566086", "0.47566086", "0.47566086", "0.47566086", "0.47566086", "0.47559422", "0.47542766", "0.47494608", "0.47483176", "0.47482577", "0.47445375", "0.47326455", "0.4712688", "0.47039795", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874", "0.47018874" ]
0.64897996
0
Get a TMDB ID from either the media's global tags, or the config. It will raise a ValueError if the provided ID is invalid.
def get_tmdb_id(self, tmdb_id: Any) -> Optional[str]: if not tmdb_id: general_track = self.media_info.general_tracks[0].to_data() tmdb_id = general_track.get("tmdb") if not tmdb_id: print("Warning: No TMDB ID was provided...") return None if not self.TMDB_ID_T.match(tmdb_id) or not isinstance(tmdb_id, str): print(f"The provided TMDB ID {tmdb_id!r} is not valid...") print("Expected e.g., 'tv/2490', 'movie/14836', (include the 'tv/' or 'movie/').") raise ValueError("Invalid TMDB ID") return tmdb_id
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_tvdb_id(self, tvdb_id: Any) -> Optional[int]:\n if not tvdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n tvdb_id = general_track.get(\"tvdb\")\n if not tvdb_id:\n print(\"Warning: No TVDB ID was provided...\")\n return None\n if isinstance(tvdb_id, int):\n tvdb_id = str(tvdb_id)\n if not self.TVDB_ID_T.match(tvdb_id) or not isinstance(tvdb_id, str):\n print(f\"The provided TVDB ID {tvdb_id!r} is not valid...\")\n print(\"Expected e.g., '79216', '1395', (not the url slug e.g., 'the-office-us').\")\n raise ValueError(\"Invalid TVDB ID\")\n return int(tvdb_id)", "def tag_id(self, tag):\n assert isinstance(tag, str)\n\n df = self.dfs[\"tags\"]\n tag_records = df[df.tag == tag]\n if 1 == len(tag_records): \n return tag_records[\"id\"].values[0]\n elif 1 < len(tag_records): \n raise Exception(\"More than one record exist by tag\")\n else :\n # We should not be strict to tag name since it is a user input.\n import warnings\n warnings.warn(\"No record matched with tag\", Warning)\n return None", "def get_imdb_id(self, imdb_id: Any) -> str:\n if not imdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n imdb_id = general_track.get(\"imdb\")\n if not imdb_id:\n print(\"No IMDB ID was provided but is required...\")\n while not imdb_id or not isinstance(imdb_id, str):\n user_id = input(\"IMDB ID (e.g., 'tt0487831'): \")\n if not self.IMDB_ID_T.match(user_id):\n print(f\"The provided IMDB ID {user_id!r} is not valid...\")\n print(\"Expected e.g., 'tt0487831', 'tt10810424', (include the 'tt').\")\n else:\n imdb_id = user_id\n return imdb_id", "def media_id(self):\n try:\n return Html.toId(self.content)\n except:\n Mp3Error(1)", "def get_id(conf_name: str=CONFIG_FILE) -> Optional[int]:\n with open(conf_name, 'r') as fobj:\n data = json.load(fobj)\n\n uid = data.get('id')\n\n assert uid is None or isinstance(uid, int), \\\n 'The user id must be an integer if it exists'\n\n return uid", "def _get_id(mf, url=None):\n\n\tprops = mf['properties']\n\n\tif 'uid' in props:\n\t\treturn props['uid'][0]\n\telif 'url' in props:\n\t\treturn props['url'][0]\n\telse:\n\t\treturn None", "def get_id(self) -> Optional[str]:\n return self.id_", "def get_device_id(self) -> str:\n return Config.get('device_id')", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def _GetIdFromInstanceDirStr(instance_dir):\n match = _RE_LOCAL_INSTANCE_ID.match(instance_dir)\n if match:\n return match.group(\"ins_id\")\n\n # To support the device which is not created by acloud.\n if os.path.expanduser(\"~\") in instance_dir:\n return \"1\"\n\n return None", "def alternative_media_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"alternative_media_id\")", "def get_dataset_id(thing: object) -> t.DatasetId:\n if isinstance(thing, int):\n return t.DatasetId(thing)\n try:\n int_id = int(thing) # type: ignore\n return t.DatasetId(int_id)\n except ValueError:\n raise err.InvalidDatasetError(id=str(thing))", "def getGUIDByBdcfg(configfile):\n generalDict, projectDict, solutionDict = Engine.readConfiguration(configfile)\n return projectDict['uuid']", "def get_media_id_from_post(media_obj):\n if media_obj:\n media_id = media_obj.get('id')\n return media_id\n return", "def _get_device_id(api: Mobileclient) -> str:\n\n try:\n _get_device_id_from_environment()\n except KeyError:\n pass\n\n return _get_device_id_from_registered(api)", "def get_media_id(media_url):\n split_url = media_url.split(\"/\")\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/file.png\n if split_url[-2] == \"media\":\n return split_url[-1]\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/abc/0/file.png\n else:\n #This is required for now due to the SDK parsing out the `/`s\n return \"%2F\".join(split_url[-3:])", "def _get_device_id_from_environment() -> str:\n\n return os.environ[\"GOOGLE_MUSIC_DEVICE_ID\"]", "def get_id(html):\n\ttry:\n\t\tsong_id = re.findall('soundcloud://sounds:(.*?)\"', html)[0]\n\t\treturn song_id\n\texcept IndexError:\n\t\tprint(\"\\033[91m✘ Could not find song ID\\033[0m\")\n\t\tsys.exit()", "def id(self) -> typing.Optional[str]:\n return self._values.get('id')", "def id(self) -> typing.Optional[str]:\n return self._values.get('id')", "def config_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"config_id\")", "def get_id(self, name=None):\n\n # Support using integer IDs directly\n if isinstance(name, int):\n return name\n\n self.ensure_loaded()\n if name is not None:\n ems_systems = self.search('name', name.upper(), searchtype=\"match\")\n if ems_systems.empty:\n sys_names = self.list_all()['name'].to_list()\n raise ValueError(\n 'No matching systems found. You have access to: {0}'.format(sys_names))\n id = ems_systems.iloc[0]['id']\n else:\n ems_systems = self.list_all()\n if ems_systems.shape[0] == 1:\n id = ems_systems.iloc[0]['id']\n else:\n raise LookupError(\n 'Multiple ems systems found. Please select one from the available:\\n{0}'\n .format(ems_systems.loc[:, ['id', 'name']])\n )\n return id", "def get_take_audio_id(self, take_id):\n def execute_sql(cursor):\n cursor.execute(\"SELECT audioId FROM Takes WHERE id = ?\",\n (take_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n \n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\"Failed to get audio ID for take id ({take_id}): \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d", "def get_record_id(thing: Union[\"Record\", t.RecordId, UUID, str]) -> t.RecordId:\n if isinstance(thing, UUID):\n return t.RecordId(thing)\n elif isinstance(thing, Record):\n return thing.id\n return t.RecordId(UUID(thing))", "def _id_from_url(url):\n url = re.sub(r'\\?.*', '', url)\n video_id = url.split('/')[-2]\n return video_id", "def get_url_param_by_id(measurement_id: str) -> str:\n if re.search('android|ios', measurement_id, re.IGNORECASE):\n return 'firebase_app_id'\n elif re.search('G-[A-Z0-9]{10}', measurement_id, re.IGNORECASE):\n return 'measurement_id'\n else:\n raise ValueError(f'Unsupported Measurement ID/Firebase App ID.')", "def up_id_from_rx_id(reactome_id):\n react_url = 'http://www.reactome.org/ContentService/data/query/' \\\n + reactome_id + '/referenceEntity'\n res = requests.get(react_url)\n if not res.status_code == 200:\n return None\n _, entry, entry_type = res.text.split('\\t')\n if entry_type != 'ReferenceGeneProduct':\n return None\n id_entry = entry.split(' ')[0]\n db_ns, db_id = id_entry.split(':')\n if db_ns != 'UniProt':\n return None\n return db_id", "def get_id(connection):\n if connection is None:\n return None\n return connection.id", "def _parse(self, the_id: typing.Union[int, str]) -> int:\n return int(the_id)", "def get_mediatype_id(self, description):\n result = self.conn.mediatype.get(filter={'description': description})\n\n if result:\n mediatypeid = result[0]['mediatypeid']\n else:\n mediatypeid = None\n\n return mediatypeid", "def getId(self):\n if getattr(self,'id',None):\n return self.id\n name = self.name\n #--Singleton records \n if name in frozenset(('FMAP','GAME','JOUR','KLST','PCDT','REFR','SPLM','TES3')):\n return None\n #--Special records.\n elif name == 'CELL':\n reader = self.getReader()\n srName = reader.findSubRecord('NAME',name)\n srData = reader.findSubRecord('DATA',name)\n (flags,gridX,gridY) = struct.unpack('3i',record.data)\n if flags & 1:\n self.id = cstrip(srName)\n else:\n self.id = '[%d,%d]' % (gridX,gridY)\n elif name == 'INFO':\n srData = self.getReader().findSubRecord('INAM',name)\n self.id = cstrip(srData)\n elif name == 'LAND':\n srData = self.getReader().findSubRecord('INTV',name)\n self.id = '[%d,%d]' % struct.unpack('2i',srData)\n elif name == 'PGRD':\n reader = self.getReader()\n srData = reader.findSubRecord('DATA',name)\n srName = reader.findSubRecord('NAME',name)\n gridXY = struct.unpack('2i',srData[:8])\n if srData != (0,0) or not srName:\n self.id = '[%d,%d]' % gridXY\n else:\n self.id = cstrip(srName)\n elif name == 'SCPT':\n srData = self.getReader().findSubRecord('SCHD',name)\n self.id = cstrip(srData[:32])\n #--Most records: id in NAME record.\n else:\n srData = self.getReader().findSubRecord('NAME',name)\n self.id = srData and cstrip(srData)\n #--Done\n return self.id", "def get_video_id(lookup_value, lookup_type='url'):\n if lookup_type == 'url':\n video_id = lookup_value.split('videos/')[1]\n elif lookup_type == 'content_id' or lookup_type == 'id':\n video_json = core.get_data('contents', lookup_value, return_json=True)\n video_id = video_json['id']\n else:\n errors.handlers.bad_lookup_type(lookup_type, ('url', 'content_id'))\n return video_id", "def id(self):\n return self.metadata[\"id\"]", "def get_model_id(thing: Union[\"Model\", t.ModelId, UUID, str]) -> t.ModelId:\n if isinstance(thing, UUID):\n return t.ModelId(thing)\n elif isinstance(thing, Model):\n return thing.id\n return t.ModelId(UUID(thing))", "def getID(self) -> int:\n ...", "def fn_GetTMDBId(self, details):\n\n # If the custom url was not actually defined and we had no cached\n # data, then there is nothing to do.\n #\n if details is None:\n return\n print \"GetTMDBId details: %s\" % details", "def get_device_id(self):\n\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['tracking_id'].attrs['device_id']\n\t\texcept:\n\t\t\treturn None", "def media_entry_id(self):\n return self.getattr('media_entry_id')", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def get_id(endpoint):\n _entity, _id = parser_endpoint(endpoint)\n\n return _id", "def get_video_id(url):\n\n if not url:\n return \"\"\n\n # If URL is embedded\n if \"embed\" in url:\n return url.split(\"/\")[-1]\n\n parse_result = urlparse(url)\n query = parse_qs(parse_result.query)\n return query[\"v\"][0]", "def _get_experiment_id(experiment_name: str, config: SQAConfig) -> Optional[int]:\n exp_sqa_class = config.class_to_sqa_class[Experiment]\n with session_scope() as session:\n sqa_experiment_id = (\n session.query(exp_sqa_class.id) # pyre-ignore\n .filter_by(name=experiment_name)\n .one_or_none()\n )\n\n if sqa_experiment_id is None:\n return None\n return sqa_experiment_id[0]", "def get_id(self):\n if self.is_root():\n return self.id_field.name\n elif self.has_id():\n return getattr(self, self.id_field)\n elif self.has_cid():\n return self.cid\n else:\n raise Exception(\"No id-like value set when get_id() called.\")", "def id_check(self, message):\n matches = ID_SYNTAX.match(message)\n if matches:\n return matches.group(1)\n return None", "def id(self) -> Optional[str]: # noqa: D401\n return self._id", "def getid(data):\n return int(data.split('/')[-1])", "def get_id(self):\n try:\n return self.inst.query('*IDN?')[:36]\n except errors.VisaIOError as e:\n logger.warning(e)\n return 'Device not connected.'", "def get_organization_id(thing: object) -> t.OrganizationId:\n if isinstance(thing, int):\n return t.OrganizationId(thing)\n try:\n int_id = int(thing) # type: ignore\n return t.OrganizationId(int_id)\n except ValueError:\n raise err.InvalidOrganizationError(id=str(thing))", "def get_by_id(cls, record_id):\n if any((isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float)))):\n return cls.query.get(int(record_id))\n return None", "def find_id(href):\n ID = idRE.search(href)\n if ID:\n return ID.group(1)", "def trace_id_get() -> tuple[str, str] | None:\n return trace_id_cv.get()", "def parse_id(app_object_id_string):\n splitter = re.compile(r'-')\n tokens = splitter.split(app_object_id_string)\n app_string = tokens[0]\n model_string = tokens[1]\n content_id = int(tokens[2])\n content_type = ContentType.objects.get(app_label=app_string, model=model_string)\n object = content_type.model_class().objects.get(id=content_id)\n return object", "def get_midas_id(trd, ai_fallback=False):\n\n def from_string(s):\n \"\"\"\n\n :param s:\n :return:\n \"\"\"\n srch = re.search(MIDAS_ID_REGEX, s)\n if srch:\n return int(srch.group(1))\n\n if trd.Counterparty().Name() == MDK_CPTY_NAME:\n val = from_string(trd.OptionalKey())\n if val:\n return val\n\n if ai_fallback: # additional info fallback is really slow\n source_system = trd.AdditionalInfo().Source_System()\n if source_system and source_system.upper() == 'MIDAS':\n full_id = trd.AdditionalInfo().Source_Trade_Id()\n if full_id:\n return from_string(full_id)", "def get_doc_id(element_tree):\n id_element = element_tree.xpath('labels[@name=\"id\"]')[0]\n return id_element.attrib['valueString']", "def __get_uuid_by_id(self, id_: int) -> int:\n for module in self._modules:\n if module.id == id_:\n return module.uuid\n return None", "def validate_id(cls, id: str) -> ObjectId:\n return ObjectId(id)", "def get_current_record_id(self):\n url = self.selenium.get_location()\n for part in url.split(\"/\"):\n oid_match = re.match(OID_REGEX, part)\n if oid_match is not None:\n return oid_match.group(2)\n raise AssertionError(\"Could not parse record id from url: {}\".format(url))", "def id(self):\n return self.settings['your_botid']", "def read_library_id(path):\n with open(os.path.join(path, \"library\")) as library_handler:\n library_id = bigml.api.get_library_id( \\\n library_handler.readline().strip())\n\n if not library_id:\n sys.exit(\"Failed to read import library ID from %s\" % \\\n os.path.join(path, \"library\"))\n return library_id", "def get_by_id(cls, record_id):\n if any(\n (isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float))),\n ):\n return cls.query.get(int(record_id))\n return None", "def get_by_id(cls, record_id):\n if any(\n (isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float))),\n ):\n return cls.query.get(int(record_id))\n return None", "def _get_ID(self):\n raw_data = imdb.search_for_title(self.title)\n if len(raw_data) > 1:\n raw_data = raw_data[0] # Pulls the first value of the title (the closest match)\n # if there is more than one\n self.ID = raw_data['imdb_id']", "def _get_id(self):\n return self.id", "def extract_dicom_id_from_dicom_filepath(dicom_filepath):\n file = os.path.basename(dicom_filepath)\n match = re.search(_DICOM_FILE_ID_EXTRACTION_RE, file)\n if not match:\n return -1\n return int(match.group(\"id\"))", "def find_id(self):\n\t\tx , y = self.id.split(':')\n\t\treturn int(x), int(y)", "def get_self_id(self):\n configfilepath=\"./camera.ini\"\n\n config = configparser.ConfigParser()\n config.read(configfilepath)\n camid = \"\"\n if config.has_section(\"camera\"):\n camid = config.get(\"camera\",\"id\")\n print(\"Found CamID in camera.ini: \" + camid)\n else:\n config.add_section(\"camera\")\n\n if (camid == \"\"):\n h = iter(hex(getnode())[2:].zfill(12))\n camid = \":\".join(i + next(h) for i in h)\n config.set(\"camera\",\"id\",camid)\n with open(configfilepath, 'w') as configfile:\n config.write(configfile)\n print(\"Generated CamID and wrote to camera.ini: \" + camid)\n \n return camid", "def getIdentifier(self):\n return self._config['identifier']", "def getIdentifier(self):\n return self._config['identifier']", "def id(self):\n return self.config['key']", "def id(self) -> Optional[str]:\n return self.elem.get('id')", "def get(self, id, key, default=None):\n try:\n id_settings = self.id_dict[id]\n val = id_settings[key]\n except KeyError:\n try:\n val = self.default_settings[key]\n except KeyError:\n val = default\n return val", "def get_id(share_url):\n url = get_redirect_url(share_url)\n id_num = re.findall('(\\d*)\\?', url)[0]\n if id_num.isnumeric():\n return id_num\n else:\n print(\"Something wrong with id number\")", "def media_content_id(self) -> str | None:\n # The lovelace app loops media to prevent timing out, don't show that\n if self.app_id == CAST_APP_ID_HOMEASSISTANT_LOVELACE:\n return None\n media_status = self._media_status()[0]\n return media_status.content_id if media_status else None", "def api_id(self) -> int:\n try:\n api_id = int(self._parser.get(constants.CLIENT_CREDENTIALS_SECTION, 'API_ID'))\n return api_id\n except configparser.NoSectionError:\n raise configparser.Error(f\"Конфигурационный файл сессии не содержит секции \"\n f\"{constants.CLIENT_CREDENTIALS_SECTION}, пожалуйста, добавьте её\")\n except configparser.NoOptionError:\n raise configparser.Error(\"Конфигурационный файл сессии не содержит опции API_ID, пожалуйста, \"\n \"добавьте её\")", "def try_to_convert (id):\n converted = id\n try:\n converted = int(id)\n except ValueError:\n pass\n return converted", "def getEpisodeId(path, conn):\n cur = conn.cursor()\n cur.execute(\"SELECT id_episode FROM episode WHERE path=?\", (path,))\n id_episode = cur.fetchone()[0]\n return id_episode", "def id(self) -> Optional[int]:\n return self.__id", "def get_project_id():\n path = '/computeMetadata/v1/project/project-id'\n try:\n http_response = _issue_http_request(\n HTTP_GET, path, REQUIRED_METADATA_HEADER)\n return http_response.read()\n except errors.MetadataServerHttpError:\n LOGGER.exception('Unable to read project id from metadata server.')\n return None", "def get_model_property_id(\n thing: Union[\"ModelProperty\", t.ModelPropertyId, UUID, str]\n) -> t.ModelPropertyId:\n if isinstance(thing, UUID):\n return t.ModelPropertyId(thing)\n elif isinstance(thing, ModelProperty):\n return thing.id\n return t.ModelPropertyId(UUID(thing))" ]
[ "0.64487255", "0.5630184", "0.5409801", "0.54041606", "0.54024845", "0.53911626", "0.5343543", "0.53333956", "0.5306317", "0.5306317", "0.5306317", "0.5306317", "0.5306317", "0.5306317", "0.52967757", "0.52887344", "0.52816087", "0.5266011", "0.52260435", "0.522252", "0.520782", "0.5163452", "0.5162279", "0.5128807", "0.5128807", "0.5128392", "0.5121896", "0.51204026", "0.51113844", "0.50650084", "0.5060613", "0.5056728", "0.50565845", "0.5052973", "0.5052242", "0.50473183", "0.50426906", "0.5031852", "0.5025687", "0.5025671", "0.5018903", "0.50029945", "0.500028", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.49808937", "0.4975379", "0.49707323", "0.49705753", "0.4956169", "0.4953475", "0.4941778", "0.4940698", "0.49398598", "0.4939241", "0.4939069", "0.49225837", "0.4920168", "0.49048853", "0.4903774", "0.49033567", "0.48996466", "0.48982474", "0.48968866", "0.48856747", "0.48849738", "0.488245", "0.488245", "0.4879169", "0.4869906", "0.4867752", "0.4863427", "0.48546773", "0.48485547", "0.48485547", "0.48359793", "0.4819094", "0.48159325", "0.48135287", "0.4809972", "0.47952986", "0.47922838", "0.4787356", "0.47838563", "0.47717705", "0.47711945" ]
0.64089936
1
Get a TVDB ID from either the media's global tags, or the config. It will raise a ValueError if the provided ID is invalid.
def get_tvdb_id(self, tvdb_id: Any) -> Optional[int]: if not tvdb_id: general_track = self.media_info.general_tracks[0].to_data() tvdb_id = general_track.get("tvdb") if not tvdb_id: print("Warning: No TVDB ID was provided...") return None if isinstance(tvdb_id, int): tvdb_id = str(tvdb_id) if not self.TVDB_ID_T.match(tvdb_id) or not isinstance(tvdb_id, str): print(f"The provided TVDB ID {tvdb_id!r} is not valid...") print("Expected e.g., '79216', '1395', (not the url slug e.g., 'the-office-us').") raise ValueError("Invalid TVDB ID") return int(tvdb_id)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_tmdb_id(self, tmdb_id: Any) -> Optional[str]:\n if not tmdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n tmdb_id = general_track.get(\"tmdb\")\n if not tmdb_id:\n print(\"Warning: No TMDB ID was provided...\")\n return None\n if not self.TMDB_ID_T.match(tmdb_id) or not isinstance(tmdb_id, str):\n print(f\"The provided TMDB ID {tmdb_id!r} is not valid...\")\n print(\"Expected e.g., 'tv/2490', 'movie/14836', (include the 'tv/' or 'movie/').\")\n raise ValueError(\"Invalid TMDB ID\")\n return tmdb_id", "def tag_id(self, tag):\n assert isinstance(tag, str)\n\n df = self.dfs[\"tags\"]\n tag_records = df[df.tag == tag]\n if 1 == len(tag_records): \n return tag_records[\"id\"].values[0]\n elif 1 < len(tag_records): \n raise Exception(\"More than one record exist by tag\")\n else :\n # We should not be strict to tag name since it is a user input.\n import warnings\n warnings.warn(\"No record matched with tag\", Warning)\n return None", "def get_device_id(self) -> str:\n return Config.get('device_id')", "def _GetIdFromInstanceDirStr(instance_dir):\n match = _RE_LOCAL_INSTANCE_ID.match(instance_dir)\n if match:\n return match.group(\"ins_id\")\n\n # To support the device which is not created by acloud.\n if os.path.expanduser(\"~\") in instance_dir:\n return \"1\"\n\n return None", "def get_video_id(lookup_value, lookup_type='url'):\n if lookup_type == 'url':\n video_id = lookup_value.split('videos/')[1]\n elif lookup_type == 'content_id' or lookup_type == 'id':\n video_json = core.get_data('contents', lookup_value, return_json=True)\n video_id = video_json['id']\n else:\n errors.handlers.bad_lookup_type(lookup_type, ('url', 'content_id'))\n return video_id", "def get_video_id(url):\n\n if not url:\n return \"\"\n\n # If URL is embedded\n if \"embed\" in url:\n return url.split(\"/\")[-1]\n\n parse_result = urlparse(url)\n query = parse_qs(parse_result.query)\n return query[\"v\"][0]", "def the_tvdb_dot_com_id(title):\n pass", "def get_id(self):\n try:\n return self.inst.query('*IDN?')[:36]\n except errors.VisaIOError as e:\n logger.warning(e)\n return 'Device not connected.'", "def _get_device_id(api: Mobileclient) -> str:\n\n try:\n _get_device_id_from_environment()\n except KeyError:\n pass\n\n return _get_device_id_from_registered(api)", "def _get_device_id_from_environment() -> str:\n\n return os.environ[\"GOOGLE_MUSIC_DEVICE_ID\"]", "def getGUIDByBdcfg(configfile):\n generalDict, projectDict, solutionDict = Engine.readConfiguration(configfile)\n return projectDict['uuid']", "def get_dataset_id(thing: object) -> t.DatasetId:\n if isinstance(thing, int):\n return t.DatasetId(thing)\n try:\n int_id = int(thing) # type: ignore\n return t.DatasetId(int_id)\n except ValueError:\n raise err.InvalidDatasetError(id=str(thing))", "def _id_from_url(url):\n url = re.sub(r'\\?.*', '', url)\n video_id = url.split('/')[-2]\n return video_id", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[str]:\n return pulumi.get(self, \"id\")", "def config_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"config_id\")", "def get_url_param_by_id(measurement_id: str) -> str:\n if re.search('android|ios', measurement_id, re.IGNORECASE):\n return 'firebase_app_id'\n elif re.search('G-[A-Z0-9]{10}', measurement_id, re.IGNORECASE):\n return 'measurement_id'\n else:\n raise ValueError(f'Unsupported Measurement ID/Firebase App ID.')", "def get_volume_from_id(item_id):\n return volumes[\"data\"][str(item_id)]", "def getEpisodeId(path, conn):\n cur = conn.cursor()\n cur.execute(\"SELECT id_episode FROM episode WHERE path=?\", (path,))\n id_episode = cur.fetchone()[0]\n return id_episode", "def get_id(self) -> Optional[str]:\n return self.id_", "def get_imdb_id(self, imdb_id: Any) -> str:\n if not imdb_id:\n general_track = self.media_info.general_tracks[0].to_data()\n imdb_id = general_track.get(\"imdb\")\n if not imdb_id:\n print(\"No IMDB ID was provided but is required...\")\n while not imdb_id or not isinstance(imdb_id, str):\n user_id = input(\"IMDB ID (e.g., 'tt0487831'): \")\n if not self.IMDB_ID_T.match(user_id):\n print(f\"The provided IMDB ID {user_id!r} is not valid...\")\n print(\"Expected e.g., 'tt0487831', 'tt10810424', (include the 'tt').\")\n else:\n imdb_id = user_id\n return imdb_id", "def get_id(conf_name: str=CONFIG_FILE) -> Optional[int]:\n with open(conf_name, 'r') as fobj:\n data = json.load(fobj)\n\n uid = data.get('id')\n\n assert uid is None or isinstance(uid, int), \\\n 'The user id must be an integer if it exists'\n\n return uid", "def get_video_id(vid_folder_string):\n parts = vid_folder_string.split(\"_\")\n return parts[0] + \"_\" + parts[1]", "def get_id(html):\n\ttry:\n\t\tsong_id = re.findall('soundcloud://sounds:(.*?)\"', html)[0]\n\t\treturn song_id\n\texcept IndexError:\n\t\tprint(\"\\033[91m✘ Could not find song ID\\033[0m\")\n\t\tsys.exit()", "def volume_id(self):\n if self.volume:\n return self.volume.id\n else:\n return None", "def trace_id_get() -> tuple[str, str] | None:\n return trace_id_cv.get()", "def id(self) -> typing.Optional[str]:\n return self._values.get('id')", "def id(self) -> typing.Optional[str]:\n return self._values.get('id')", "def get_record_id(thing: Union[\"Record\", t.RecordId, UUID, str]) -> t.RecordId:\n if isinstance(thing, UUID):\n return t.RecordId(thing)\n elif isinstance(thing, Record):\n return thing.id\n return t.RecordId(UUID(thing))", "def get_id(connection):\n if connection is None:\n return None\n return connection.id", "def video_id_from_url(url):\n\n parsed_url = urlparse(url)\n url_params = dict(parse_qsl(parsed_url.query))\n return url_params.get(\"v\", parsed_url.path.split(\"/\")[-1])", "def up_id_from_rx_id(reactome_id):\n react_url = 'http://www.reactome.org/ContentService/data/query/' \\\n + reactome_id + '/referenceEntity'\n res = requests.get(react_url)\n if not res.status_code == 200:\n return None\n _, entry, entry_type = res.text.split('\\t')\n if entry_type != 'ReferenceGeneProduct':\n return None\n id_entry = entry.split(' ')[0]\n db_ns, db_id = id_entry.split(':')\n if db_ns != 'UniProt':\n return None\n return db_id", "def alternative_media_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"alternative_media_id\")", "def find_variant_by_id(variant_id):\n\n variant = Variant.get(lambda s: s.id == variant_id and s.deletedAt is None)\n if variant is None:\n return variant, \"Variant Not Found !\"\n\n return variant, \"\"", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n if self.res.get('slideshow_id'):\n return self.res.get('slideshow_id')\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if len(path_list) == 3 and (p.path.startswith('/slideshow/embed_code')):\n # http://www.slideshare.net/slideshow/embed_code/1293644\n return path_list[2]\n elif len(path_list) == 2 and p.path.startswith('/swf'):\n # return -1 when url is like : http://static.slideshare.net/swf/ssplayer2.swf?doc=working-dogs-1201800078341935-2\n # FixMe :slideshare oembed api doesnt support this kind of url\n return -1\n return ''", "def _get_id(mf, url=None):\n\n\tprops = mf['properties']\n\n\tif 'uid' in props:\n\t\treturn props['uid'][0]\n\telif 'url' in props:\n\t\treturn props['url'][0]\n\telse:\n\t\treturn None", "def get_device_id(self):\n\n if self.have_metadata is False:\n self._get_metadata()\n self.have_metadata = True\n\n\t\ttry:\n\t\t\treturn self.keyinfo['tracking_id'].attrs['device_id']\n\t\texcept:\n\t\t\treturn None", "def get_id_regular_link(link = None):\n #Legacy compatibility\n choppedLink = legacy_check(link)\n # dont bother if we are none.\n if link == None:\n return link\n\n vid_url_params = choppedLink[3].split(\"&\")\n # Search the id in the list of elements of the url\n vid = search_video_id(vid_url_params)\n\n # And dont forget the links with hashtags #\n vid = vid.split(\"#\")[0]\n\n return vid # change this var names TODO", "def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n if p.netloc.endswith('vimeo.com') and 'hubnut/album/' in p.path:\n return ''\n \n if p.netloc.endswith('vimeo.com') and p.path.split('/')[-1:][0].isdigit():\n # Url of type http://vimeo.com/21347521\n # mobile type http://vimeo.com/m/21347521\n return p.path.split('/')[-1:][0]\n elif p.netloc.endswith('vimeo.com') and p.path == '/moogaloop.swf' and 'clip_id' in p.query:\n # Old embed code style url\n #params = dict([part.split('=') for part in p.query.split('&')])\n params = cgi.parse_qs(p.query)\n if 'clip_id' in params:\n return params['clip_id'][0]\n elif p.netloc == 'player.vimeo.com' and p.path.startswith('/video/'):\n # Url of type http://player.vimeo.com/video/21347521?title=0&amp;byline=0&amp;portrait=0\n path = p.path.split('/')\n return path[-1]\n \n return ''", "def media_id(self):\n try:\n return Html.toId(self.content)\n except:\n Mp3Error(1)", "def get_model_id(thing: Union[\"Model\", t.ModelId, UUID, str]) -> t.ModelId:\n if isinstance(thing, UUID):\n return t.ModelId(thing)\n elif isinstance(thing, Model):\n return thing.id\n return t.ModelId(UUID(thing))", "def get_by_id(cls, record_id):\n if any((isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float)))):\n return cls.query.get(int(record_id))\n return None", "def _parse(self, the_id: typing.Union[int, str]) -> int:\n return int(the_id)", "def variant_id(lang, layout, value):\n kn = _get_keyboard_names()\n return kn.variant_id(lang, layout, value)", "def get_media_id(media_url):\n split_url = media_url.split(\"/\")\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/file.png\n if split_url[-2] == \"media\":\n return split_url[-1]\n #Media urls of the format https://messaging.bandwidth.com/api/v2/users/123/media/abc/0/file.png\n else:\n #This is required for now due to the SDK parsing out the `/`s\n return \"%2F\".join(split_url[-3:])", "def regex_video_id(param):\n miregex = '(.*)v=(.*)&?(.*)'\n vid = None\n #log.debug(\"get video id: \" + repr(param))\n try:\n rs = re.search(miregex, param)\n params = rs.group(2)\n #log.debug(\"params \" + params)\n vid = params\n #id = params.split(\"&\")[0] if params != None and len(params)>12 else params\n except Exception as e:\n #log.debug(\"HURU\")\n #log.exception(e)\n pass # yes, we pass\n return vid", "def video_id(self):\n # type: () -> string_types\n return self._video_id", "def id(self):\n return self.metadata[\"id\"]", "def get(self, id, key, default=None):\n try:\n id_settings = self.id_dict[id]\n val = id_settings[key]\n except KeyError:\n try:\n val = self.default_settings[key]\n except KeyError:\n val = default\n return val", "def get_video_id_from_link(link):\n query_string = urlparse.urlparse(link).query\n qs_params = urlparse.parse_qs(query_string)\n return qs_params['v'][0]", "def get_vehicle_cfg_id(self, vehcfg_name):\n is_rooted = False\n cfg_name = vehcfg_name.rstrip(PATH_SEPARATOR)\n if self.is_absolute_name(cfg_name):\n cfg_name = cfg_name.lstrip(PATH_SEPARATOR)\n is_rooted = True\n cfg_name_parts = cfg_name.split(PATH_SEPARATOR)\n # if absolute name, get vehicle ID\n vehid_cond = None\n if is_rooted:\n if len(cfg_name_parts) != 2:\n tmp = \"Vehicle configuration name '%s' \" % vehcfg_name\n tmp += \"cannot be resolved. Name must have format '/vehicle/config'.\"\n raise AdasDBError(tmp)\n vehid = self.get_vehicle_id(cfg_name_parts[0])\n vehid_cond = SQLBinaryExpr(COL_NAME_VEHICLECFGS_VEHICLEID, OP_EQ, SQLLiteral(vehid))\n cfg_name = cfg_name_parts[1].strip()\n # get configuration id (for vehicle, if absolute)\n cfg_cond = SQLBinaryExpr(SQLFuncExpr(self.db_func_map[DB_FUNC_NAME_LOWER],\n COL_NAME_VEHICLECFGS_NAME),\n OP_EQ, SQLLiteral(cfg_name.lower()))\n if vehid_cond is not None:\n cfg_cond = SQLBinaryExpr(vehid_cond, OP_AND, cfg_cond)\n entries = self.select_generic_data(select_list=[COL_NAME_VEHICLECFGS_VEHCFGID],\n table_list=[TABLE_NAME_VEHICLECFGS],\n where=cfg_cond)\n if len(entries) == 1:\n return entries[0][COL_NAME_VEHICLECFGS_VEHCFGID]\n elif len(entries) > 1:\n tmp = \"Vehicle configuration '%s' \" % vehcfg_name\n tmp += \"cannot be resolved because it is ambiguous. (%s)\" % entries\n raise AdasDBError(tmp)\n\n raise AdasDBError(\"No resolution of '%s'. (%s)\" % (vehcfg_name, entries))", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n #logger.debug('DAILYMOTION VIDEO FOUND %s' % url)\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if len(path_list) == 3 and (p.path.startswith('/embed/video/') or p.path.startswith('/swf/video/')):\n # http://www.dailymotion.com/embed/video/xmp7zw\n return re.sub('_.+', '', path_list[2])\n elif len(path_list) == 2 and (p.path.startswith('/video/') or p.path.startswith('/swf/')):\n # http://www.dailymotion.com/video/xmp7zw_whatever\n # http://www.dailymotion.com/swf/xmp7zw\n return re.sub('_.+', '', path_list[1])\n \n return ''", "def get_device_by_id(self, id):\n if not isinstance(id, int):\n id = int(id)\n for i in self.devices:\n if self.devices[i].id == id:\n return self.devices[i]\n raise RuntimeError, \"Device not found\"", "def id(self):\n return self.config['key']", "def _parse_id(self, id):\n try:\n name, pid = id.split(':', 1)\n except ValueError:\n raise ValueError('id %s is not in the form provider:pid' % id)\n if name not in self.providers:\n raise ValueError('no such provider \"%s\"' % name)\n return self.providers[name], tostr(pid)", "def get_id(self):\n\t\treturn call_sdk_function('PrlSrvCfgDev_GetId', self.handle)", "def id(self): \n if self.cloudnet:\n return self.cloudnet.id\n else:\n return None", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"id\")", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n \n if p.path.startswith('/v/') or p.path.startswith('/broadcast/'):\n path = p.path.split('/')\n if len(path) == 3:\n return p.path.split('/')[-1].replace('.live', '')\n \n return ''", "def get_video_id(self):\n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n path = p.path\n if path.endswith('/'):\n path = path[:-1]\n path_list = path[1:].split('/')\n \n if path_list[0] == 'v':\n # https://vine.co/v/bjHh0zHdgZT\n return path_list[1]\n \n return ''", "def getID(self) -> int:\n ...", "def get_by_id(cls, record_id):\n if any(\n (isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float))),\n ):\n return cls.query.get(int(record_id))\n return None", "def get_by_id(cls, record_id):\n if any(\n (isinstance(record_id, (str, bytes)) and record_id.isdigit(),\n isinstance(record_id, (int, float))),\n ):\n return cls.query.get(int(record_id))\n return None", "def get_id(self):\n if self.is_root():\n return self.id_field.name\n elif self.has_id():\n return getattr(self, self.id_field)\n elif self.has_cid():\n return self.cid\n else:\n raise Exception(\"No id-like value set when get_id() called.\")", "def device_id(self) -> Optional[str]:\n return self.relay(\"device_id\")", "def get_id(self, name=None):\n\n # Support using integer IDs directly\n if isinstance(name, int):\n return name\n\n self.ensure_loaded()\n if name is not None:\n ems_systems = self.search('name', name.upper(), searchtype=\"match\")\n if ems_systems.empty:\n sys_names = self.list_all()['name'].to_list()\n raise ValueError(\n 'No matching systems found. You have access to: {0}'.format(sys_names))\n id = ems_systems.iloc[0]['id']\n else:\n ems_systems = self.list_all()\n if ems_systems.shape[0] == 1:\n id = ems_systems.iloc[0]['id']\n else:\n raise LookupError(\n 'Multiple ems systems found. Please select one from the available:\\n{0}'\n .format(ems_systems.loc[:, ['id', 'name']])\n )\n return id", "def get_organization_id(thing: object) -> t.OrganizationId:\n if isinstance(thing, int):\n return t.OrganizationId(thing)\n try:\n int_id = int(thing) # type: ignore\n return t.OrganizationId(int_id)\n except ValueError:\n raise err.InvalidOrganizationError(id=str(thing))", "def getid(data):\n return int(data.split('/')[-1])", "def __get_uuid_by_id(self, id_: int) -> int:\n for module in self._modules:\n if module.id == id_:\n return module.uuid\n return None", "def parse_id(app_object_id_string):\n splitter = re.compile(r'-')\n tokens = splitter.split(app_object_id_string)\n app_string = tokens[0]\n model_string = tokens[1]\n content_id = int(tokens[2])\n content_type = ContentType.objects.get(app_label=app_string, model=model_string)\n object = content_type.model_class().objects.get(id=content_id)\n return object", "def id(self):\n if self.cloudserver:\n return self.cloudserver.id\n else:\n return None", "def get_take_audio_id(self, take_id):\n def execute_sql(cursor):\n cursor.execute(\"SELECT audioId FROM Takes WHERE id = ?\",\n (take_id,))\n results = cursor.fetchone()\n if results is None:\n return None\n else:\n return results[0]\n \n def when_ready(dbpool):\n return dbpool.runInteraction(execute_sql)\n d = self.get_dbpool()\n d.addCallback(when_ready)\n\n def on_error(error):\n log.warn(\"Failed to get audio ID for take id ({take_id}): \"+\n str(error)\n )\n return error\n d.addErrback(on_error)\n\n return d", "def read_library_id(path):\n with open(os.path.join(path, \"library\")) as library_handler:\n library_id = bigml.api.get_library_id( \\\n library_handler.readline().strip())\n\n if not library_id:\n sys.exit(\"Failed to read import library ID from %s\" % \\\n os.path.join(path, \"library\"))\n return library_id", "def get_video_id(self):\n \n if self.video_id:\n return self.video_id\n \n if not self.original_url:\n return ''\n \n p = urlparse.urlparse(self.original_url)\n params = cgi.parse_qs(p.query)\n \n if p.path.endswith('/video'):\n # url type http://www.livestream.com/xprize/video?clipId=pla_1a25a2ba-9ca4-4c3b-b1b1-ebd7d79ef6d2\n if 'clipId' in params:\n return params['clipId'][0]\n if p.path.startswith('/embed'):\n # url type http://cdn.livestream.com/embed/xprize?layout=4&amp;clip=pla_1a25a2ba-9ca4-4c3b-b1b1-ebd7d79ef6d2&amp;width=560&amp;autoplay=false\n if 'clip' in params:\n return params['clip'][0]\n \n return ''", "def get_video_id(self, obj):\n return obj.id", "def get_mediatype_id(self, description):\n result = self.conn.mediatype.get(filter={'description': description})\n\n if result:\n mediatypeid = result[0]['mediatypeid']\n else:\n mediatypeid = None\n\n return mediatypeid", "def var_id(v):\n return int(sort_vid_split(v)[1])", "def getPID(self) -> \"Optional[str]\":\n the_pid: \"Optional[str]\"\n if self.id is not None:\n the_pid = str(self.id)\n parsedRepoURL = urllib.parse.urlparse(the_pid)\n\n # If it is not an URI / CURIE\n if parsedRepoURL.scheme == \"\":\n if (self.trs_endpoint is not None) and len(self.trs_endpoint) > 0:\n parsedTRSURL = urllib.parse.urlparse(self.trs_endpoint)\n trs_steps: \"Sequence[str]\" = parsedTRSURL.path.split(\"/\")\n pid_steps = [\"\", urllib.parse.quote(the_pid, safe=\"\")]\n\n if self.version_id is not None:\n pid_steps.append(\n urllib.parse.quote(str(self.version_id), safe=\"\")\n )\n\n the_pid = urllib.parse.urlunparse(\n urllib.parse.ParseResult(\n scheme=TRS_SCHEME_PREFIX,\n netloc=parsedTRSURL.netloc,\n path=\"/\".join(pid_steps),\n params=\"\",\n query=\"\",\n fragment=\"\",\n )\n )\n else:\n self.logger.debug(\"trs_endpoint was not provided\")\n the_pid = None\n else:\n the_pid = None\n\n return the_pid", "def get_id(\n name=None,\n tags=None,\n region=None,\n key=None,\n keyid=None,\n profile=None,\n in_states=None,\n filters=None,\n):\n instance_ids = find_instances(\n name=name,\n tags=tags,\n region=region,\n key=key,\n keyid=keyid,\n profile=profile,\n in_states=in_states,\n filters=filters,\n )\n if instance_ids:\n log.info(\"Instance ids: %s\", \" \".join(instance_ids))\n if len(instance_ids) == 1:\n return instance_ids[0]\n else:\n raise CommandExecutionError(\n \"Found more than one instance matching the criteria.\"\n )\n else:\n log.warning(\"Could not find instance.\")\n return None", "def try_to_convert (id):\n converted = id\n try:\n converted = int(id)\n except ValueError:\n pass\n return converted" ]
[ "0.60287786", "0.5454019", "0.5437956", "0.5360313", "0.5346196", "0.5333645", "0.5321893", "0.52784383", "0.5270552", "0.5240752", "0.5226761", "0.5213634", "0.5184168", "0.5155326", "0.5155326", "0.5155326", "0.5155326", "0.5155326", "0.5155326", "0.51372594", "0.5098536", "0.5097522", "0.5085566", "0.5076998", "0.5059803", "0.5056156", "0.5040745", "0.5027556", "0.5021359", "0.5015722", "0.5014442", "0.5014442", "0.5010005", "0.50011945", "0.49983272", "0.49706906", "0.49702853", "0.49627352", "0.4938334", "0.49352136", "0.49179354", "0.49156088", "0.4886544", "0.48862314", "0.48827773", "0.48744917", "0.48620445", "0.48582947", "0.4857559", "0.48556006", "0.48534238", "0.48461103", "0.48403594", "0.48378482", "0.48354232", "0.48297104", "0.4825063", "0.48241168", "0.48232174", "0.48198724", "0.48187742", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4815301", "0.4811836", "0.48105282", "0.48092827", "0.48085177", "0.48085177", "0.48066986", "0.4798965", "0.47982413", "0.479733", "0.47861746", "0.47758907", "0.47742218", "0.477308", "0.47728932", "0.47689378", "0.47680914", "0.47677204", "0.4764812", "0.47591215", "0.47571164", "0.4755563", "0.47555077" ]
0.694675
0
Scrape Title Name and Year (including e.g. 2019) from IMDB
def get_title_name_year(self) -> Tuple[str, str]: r = self.session.get(f"https://www.imdb.com/title/{self.imdb}") if r.status_code != 200: raise ValueError(f"An unexpected error occurred getting IMDB Title Page [{r.status_code}]") imdb_page = html.unescape(r.text) imdb_title = re.search( # testing ground: https://regex101.com/r/bEoEDn/1 r"<title>(?P<name>.+) \(((?P<type>TV (Movie|Series|Mini[- ]Series|Short|Episode) |Video |Short |)" r"(?P<year>(\d{4})(|– |–\d{4})))\) - IMDb</title>", imdb_page ) if not imdb_title: raise ValueError(f"Could not scrape Movie Title or Year for {self.imdb}...") return imdb_title.group("name").strip(), imdb_title.group("year").strip()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def scrape_movie_page(dom):\n # to save the information\n info = []\n\n # find the information block needed\n header = dom.find(\"div\", \"title_wrapper\")\n\n # find the title and strip the string\n name_dom = header.h1.get_text().encode(\"utf-8\")\n name = str(name_dom)[2:-16]\n info.append(name)\n\n # find the year and strip the year\n year_dom = header.h1.span.get_text().encode(\"utf-8\")\n year = str(year_dom)[3:-2]\n info.append(year)\n\n # find the duration and strip the string\n duration_dom = dom.find(\"time\", itemprop=\"duration\").get_text().encode(\"utf-8\")\n duration = str(duration_dom)[28:-23]\n info.append(duration)\n\n # find all the genres and strip the string\n genre_dom = dom.find(\"div\", itemprop=\"genre\").a.get_text().encode(\"utf-8\")\n genre = find_genres(genre_dom, dom)\n info.append(genre)\n\n # find all the directors and strip the string\n director_dom = dom.find(\"span\", itemprop=\"director\").get_text().encode(\"utf-8\")\n director = find_directors(director_dom, dom)\n info.append(director)\n\n # find all the writers and strip the string\n writer_dom = dom.find(\"span\", itemprop=\"creator\").a.get_text().encode(\"utf-8\")\n writer = find_writers(writer_dom, dom)\n info.append(writer)\n\n # find all the actors and strip the string\n actor_dom = dom.find(\"span\", itemprop=\"actors\").a.get_text().encode(\"utf-8\")\n actor = find_actors(actor_dom, dom)\n info.append(actor)\n\n # find the rating and strip the string\n rating_dom = dom.find(\"span\", itemprop=\"ratingValue\").get_text().encode(\"utf-8\")\n rating = str(rating_dom)[2:-1]\n info.append(rating)\n\n # find the number of ratings and strip the string\n number_ratings_dom = dom.find(\"span\", itemprop=\"ratingCount\").get_text().encode(\"utf-8\")\n number_ratings = str(number_ratings_dom)[2:-1]\n info.append(number_ratings)\n\n return info", "def extract_movies(dom):\n\n # extract data per movie\n movies = dom.find_all('div', class_ = 'lister-item mode-advanced')\n\n # list to store scraped data\n movielist = []\n\n for movie in movies:\n\n # append extracted data to this dict\n moviedict = {}\n\n # scrape titles and add to dict\n moviedict['title'] = movie.h3.a.text\n\n # scrape ratings and add to dict\n moviedict['rating'] = float(movie.strong.text)\n\n # scrape year of release and add to dict\n year = movie.h3.find('span', class_ = 'lister-item-year text-muted unbold')\n moviedict['year'] = re.findall('\\d+', year.text.strip('()'))[0]\n\n # scrape actors and add to dict\n actors = movie.find_all(href=re.compile(\"adv_li_st\"))\n actorlist = []\n for actor in actors:\n actorlist.append(actor.text)\n actorstring = ', '.join(actorlist)\n moviedict['actors'] = actorstring\n\n # scrape runtime and add to dict\n moviedict['runtime'] = movie.p.find('span', class_ = 'runtime').text.split(' ')[0]\n movielist.append(moviedict)\n\n\n # ADD YOUR CODE HERE TO EXTRACT THE ABOVE INFORMATION ABOUT THE\n # HIGHEST RATED MOVIES\n # NOTE: FOR THIS EXERCISE YOU ARE ALLOWED (BUT NOT REQUIRED) TO IGNORE\n # UNICODE CHARACTERS AND SIMPLY LEAVE THEM OUT OF THE OUTPUT.\n\n return movielist # REPLACE THIS LINE AS WELL IF APPROPRIATE", "def extract_movie_header(soup: BeautifulSoup) -> Tuple[str, str]:\n\n header = soup.find(\"h3\", class_=\"lister-item-header\")\n\n title = header.a.get_text()\n\n year = header.find(\"span\", class_=\"lister-item-year\").get_text()[-5:-1]\n year = int(year)\n\n return title, year", "def getMovieInfo(endpoint, title, year):\n\n params = {'t': title, 'y': year, 'plot':'short', 'r':'json', 'tomatoes':'true'}\n response = requests.get(endpoint, params=params)\n\n try:\n response.raise_for_status()\n response = response.json()\n\n if 'Error' in response.keys():\n raise LookupError\n\n results = {}\n strkeys = ['Actors', 'Director', 'Genre', 'Plot', 'Rated', 'Released', 'imdbID', 'tomatoConsensus']\n intkeys = ['Runtime', 'Metascore', 'imdbVotes', 'tomatoMeter', 'tomatoReviews']\n fltkeys = ['imdbRating']\n\n for key in strkeys:\n results[key] = response[key] if response[key] != 'N/A' else None\n for key in intkeys:\n results[key] = int(re.sub(r'[^\\d]', '', response[key])) if response[key] != 'N/A' else None\n for key in fltkeys:\n results[key] = float(re.sub(r'[^\\d]', '', response[key])) if response[key] != 'N/A' else None\n return results\n\n except requests.exceptions.HTTPError:\n print(\"There was a problem with the HTTP request: {0}\".format(response.status_code))\n except requests.exceptions.Timeout:\n print(\"The HTTP request timed out\")\n except LookupError:\n pass\n return None", "def list_titles(genre):\n text = genre_html(genre)\n num_titles = text.count('title=')\n\n titles = []\n for i in range(num_titles):\n start = text.find('title=')\n end = text[start+7:].find('\">')\n title = text[start+7:start+end]\n titles.append(title)\n text = text[start+7:]\n\n return titles", "def get_year_from_movielist_title(title):\n match = re.match(r'.*\\s+\\((\\d+)\\)', title)\n year = int(match.groups()[0])\n return year", "def get_movie_info(page: str, verbose:bool = True):\n\n def add_scoreInfo(pattern, raw_text, keyName):\n \"\"\"inner helper function to help add score information\n :param pattern: pattern to match\n :param raw_text: html text\n :param keyName: key name to be append to the dict\n \"\"\"\n match_pat = re.search(pattern, raw_text)\n if match_pat is None:\n info[keyName] = None\n else:\n info[keyName] = match_pat.group(1)\n\n info = dict() \n \n # verbose option\n if verbose:\n print('scraping main page')\n print('scraping url: ' + page)\n \n # make soup\n soup = _make_soup(page)\n \n if soup == '':\n return None\n \n else:\n ### extraction ###\n # movie id\n movieId = soup.find('a', href=re.compile('movieId=[0-9]+'))\n if movieId is None:\n info['movie_link'] = None\n else:\n movieId = re.search('movieId=([0-9]+)$', movieId[\"href\"])\n info['movie_link'] = '/m/'+ movieId.group(1)\n \n movieInfo= soup.find('script', type=\"application/ld+json\")\n if movieInfo is None:\n print('No movie information for this movie.')\n else:\n # movie name\n movieName = re.search('\"name\":\"?(.+?)\"?,\"', movieInfo.get_text())\n if movieName is None:\n info['movie_name'] = None\n else:\n info['movie_name'] = movieName.group(1)\n \n # rating\n rating = re.search('\"contentRating\":\"?(.+?)\"?,\"',movieInfo.get_text())\n if rating is None:\n info['rating'] = None\n else:\n info['rating'] = rating.group(1)\n \n # genre \n genre = re.search('\"genre\":\\[\"(.+?)\"\\]', movieInfo.get_text())\n if genre is None:\n info['genre'] = None\n else:\n info['genre'] = genre.group(1).replace('\"','')\n \n # directors\n directors = re.search('\"director\":(.+?),\"author\"', movieInfo.get_text())\n if directors is None:\n info['directors'] = None\n else:\n info['directors'] = ','.join(re.findall('\"name\":\"(.+?)\",\"', directors.group(1)))\n \n # writers\n writers = re.search('\"director\":.+?\"author\":(.+?),\"genre\"', movieInfo.get_text())\n if writers is None:\n info['writers'] = None\n else:\n info['writers'] = ','.join(re.findall('\"name\":\"(.+?)\",\"', writers.group(1)))\n \n # movie synopsis\n movieSyno = soup.find('div', id=re.compile('movieSynopsis'))\n if movieSyno is None:\n info['movie_info'] = None\n else:\n info['movie_info'] = movieSyno.get_text().strip()\n \n # poster_image\n poster_img = soup.find('meta', property = re.compile('image$'))\n if poster_img is None:\n info['poster_image'] = None\n else:\n info['poster_image'] = poster_img[\"content\"]\n \n # cast\n casts = soup.find_all('div', class_=re.compile('^cast-item'))\n if casts is None:\n info['casts'] = None\n else:\n info['casts'] = ','.join([cast.find('span').get_text().strip() for cast in casts])\n \n # in_theaters_date\n in_theaters_date = soup.find('div', text=re.compile(\"In Theaters\"))\n if in_theaters_date is None:\n info['in_theaters_date'] = None\n else:\n info['in_theaters_date'] = in_theaters_date.find_next_sibling('div').find('time').get_text().strip()\n \n # on_streaming_date\n on_streaming_date = soup.find('div', text=re.compile(\"On Disc/Streaming:\"))\n if on_streaming_date is None:\n info['on_streaming_date'] = None\n else:\n info['on_streaming_date'] = on_streaming_date.find_next_sibling('div').find('time').get_text().strip()\n \n # runtime_in_minutes\n runtime_in_minutes = soup.find('div', text=re.compile(\"Runtime:\"))\n if runtime_in_minutes is None:\n info['runtime_in_minutes'] = None\n else:\n info['runtime_in_minutes'] = re.search('[0-9]+',runtime_in_minutes.find_next_sibling('div').find('time').get_text().strip()).group(0)\n # studio_name\n studio_name = soup.find('div', text=re.compile(\"Studio:\"))\n if studio_name is None:\n info['studio_name'] = None\n else:\n info['studio_name'] = studio_name.find_next_sibling('div', class_=\"meta-value\").get_text().strip()\n \n # Extra: box office\n box_office = soup.find('div', text=re.compile(\"Box Office:\"))\n if box_office is None:\n info['box_office'] = None\n else:\n info['box_office'] = box_office.find_next_sibling('div', class_=\"meta-value\").get_text().strip()\n \n scoreInfo = soup.find('script', type=\"text/javascript\")\n if scoreInfo is None:\n print('No score information for this movie.')\n else:\n pat_head1 = 'root.RottenTomatoes.context.scoreInfo.+?'\n pat_keywrd = '\"consensus\":'\n pat_tail1 = '\"?(.+?)\"?,\"'\n pat_tail2 = '\"?([0-9]+?)\"?,\"'\n pat_tail3 = '\"?([0-9\\.]+?)\"?,\"'\n # critics_consensus\n criticsCns_pat = pat_head1 + pat_keywrd + pat_tail1\n add_scoreInfo(criticsCns_pat, scoreInfo.get_text(), 'critics_consensus')\n \n # tomatometer_status\n pat_keywrd ='\"tomatometerState\":'\n tmtStatus_pat = pat_head1 + pat_keywrd + pat_tail1\n add_scoreInfo(tmtStatus_pat, scoreInfo.get_text(), 'tomatometer_status')\n\n # tomatometer_rating\n pat_keywrd = '\"score\":'\n tmtRating_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(tmtRating_pat, scoreInfo.get_text(), 'tomatometer_rating')\n\n # tomatometer_count\n pat_keywrd ='\"numberOfReviews\":'\n tmtCnt_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(tmtCnt_pat, scoreInfo.get_text(), 'tomatometer_count')\n \n # audience_status\n audStatus_pat = 'root.RottenTomatoes.context.popcornMeterState.+?\"(.+?)\";'\n add_scoreInfo(audStatus_pat, scoreInfo.get_text(), 'audience_status')\n\n # Extra: audience_want_to_see\n audWantToSee_pat = 'root.RottenTomatoes.context.wantToSeeData.+?\"wantToSeeCount\":' + pat_tail2\n add_scoreInfo(audWantToSee_pat, scoreInfo.get_text(), 'audience_want_to_see_count')\n \n # audience_rating\n pat_keywrd = '\"audienceAll\".+?\"score\":'\n audRating_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audRating_pat, scoreInfo.get_text(), 'audience_rating')\n\n # audience_count\n pat_keywrd = '\"audienceAll\".+?\"ratingCount\":'\n audCnt_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audCnt_pat, scoreInfo.get_text(), 'audience_count')\n\n # audience_top_critics_count\n pat_keywrd = '\"tomatometerTopCritics\".+?\"numberOfReviews\":'\n audTopCritics_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audTopCritics_pat, scoreInfo.get_text(), 'audience_top_critics_count')\n \n # audience_fresh_critics_count\n pat_keywrd = '\"freshCount\":'\n audFreshCritics_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audFreshCritics_pat, scoreInfo.get_text(), 'audience_fresh_critics_count')\n \n # audience_rotten_critics_count\n pat_keywrd = '\"rottenCount\":'\n audRottenCritics_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audRottenCritics_pat, scoreInfo.get_text(), 'audience_rotten_critics_count')\n\n # Extra: audience_fresh_top_critics_count\n pat_keywrd = '\"tomatometerTopCritics\".+?\"freshCount\":'\n audFreshCritics_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audFreshCritics_pat, scoreInfo.get_text(), 'audience_fresh_top_critics_count')\n\n # Extra: audience_rotten_top_critics_count\n pat_keywrd = '\"tomatometerTopCritics\".+?\"rottenCount\":'\n audRottenCritics_pat = pat_head1 + pat_keywrd + pat_tail2\n add_scoreInfo(audRottenCritics_pat, scoreInfo.get_text(), 'audience_rotten_rotten_critics_count')\n \n # Extra: tomatometer_avg_rating\n pat_keywrd = '\"avgScore\":'\n tmtAvgRating_pat = pat_head1 + pat_keywrd + pat_tail3\n add_scoreInfo(tmtAvgRating_pat, scoreInfo.get_text(), 'tomatometer_avg_rating')\n\n # Extra: audience_top_critics_avg_rating\n pat_keywrd = '\"tomatometerTopCritics\".+?\"avgScore\":'\n audTopCriticsAvgRating_pat = pat_head1 + pat_keywrd + pat_tail3\n add_scoreInfo(audTopCriticsAvgRating_pat, scoreInfo.get_text(), 'audience_top_critics_avg_rating')\n\n # Extra: Score Sentiment\n pat_keywrd = '\"scoreSentiment\":'\n scoreSentiment_pat = pat_head1 + pat_keywrd + pat_tail1\n add_scoreInfo(scoreSentiment_pat, scoreInfo.get_text(), 'score_sentiment')\n\n # Extra: audience_avg_rating\n pat_keywrd = '\"averageRating\":'\n audienceAvgRating_pat = pat_head1 + pat_keywrd + pat_tail3\n add_scoreInfo(audienceAvgRating_pat, scoreInfo.get_text(), 'audience_avg_rating')\n print('done scraping movie info')\n return info", "def extract_names(filename):\n raw_text = read_html(filename) \n \n #searching for the year\n year = re.search('(<h3 align=\"center\">Popularity in )(\\d\\d\\d\\d)',raw_text).group(2)\n \n #searching for the list of names\n list_of_names = re.findall('<td>(\\d+)</td><td>(\\w+)</td><td>(\\w+)</td>',raw_text)\n \n #pair each name with it's rank\n name_and_rank = [] \n for line in list_of_names:\n name_and_rank.append((line[1], line[0]))\n name_and_rank.append((line[2], line[0]))\n \n # sort the list alphabetically\n name_and_rank = sorted(name_and_rank, key = lambda x:x[0])\n name_and_rank = dict(name_and_rank)\n\n return year, name_and_rank[:20]", "def parse_title_and_year(self, refstr):\n\n refstr = self.re_cleanup_unstructured.sub(', ', refstr, 1)\n match = self.rec_field_unstructured.match(refstr)\n if match:\n year = match.group('year')\n title = match.group('title')\n return title,year\n return None,None", "def parse_movie_page(movie_url: str) -> Dict[str, str]:\n movie_page = get_soup_for_page(movie_url)\n\n # title and id\n movie_id = movie_url.split(\"/\")[-2]\n title = movie_page.find(\"div\", class_=\"title_wrapper\").find(\"h1\").get_text(\";\", strip=True).split(\";\")[0]\n\n # director and stars\n credit_summary_elements = movie_page.find_all(\"div\", class_=\"credit_summary_item\")\n director = credit_summary_elements[0].find(\"a\").text if len(credit_summary_elements) > 0 else \"\"\n if len(credit_summary_elements) > 2:\n stars_links = credit_summary_elements[2].find_all(\"a\")\n stars = [str(elem.text) for elem in stars_links[:-1]]\n else:\n stars = []\n movie_data = {\n \"id\": movie_id,\n \"title\": title,\n \"director\": director,\n \"stars\": stars,\n }\n print(movie_data)\n return movie_data", "def imdb_crawl_by_year(year, verbose):\n _crawl_by_year_helper(year, verbose, True, False)", "def movie(response):\n\n response = response.json()\n\n if response.get(\"Error\"):\n raise NotFoundError(response[\"Error\"])\n\n if response[\"Type\"] != \"movie\":\n raise NotFoundError(\"Type is {}, should be movie\".format(response[\"Type\"]))\n\n return [OrderedDict([(\"Title\", response[\"Title\"]),\n (\"ID\", response[\"imdbID\"]),\n (\"Rating\", response[\"imdbRating\"]),\n (\"Year\", response[\"Year\"].split(u\"\\u2013\")[0])])]", "def get_info(url):\r\n soup = make_request(url)\r\n\r\n #get press release title\r\n title_text = soup.find(\"h2\", \"con-title\").text.strip()\r\n title = title_text.partition('\\n')[0]\r\n\r\n #get press release content and date\r\n div = soup.find_all(\"div\") #find div tags\r\n for ele in div:\r\n for div2 in ele(\"div\",\"text-right\"):\r\n if \"發佈日期\" in div2.text:\r\n text = ele.text\r\n date = re.findall(\"\\d\\d\\d\\d-\\d\\d-\\d\\d\", div2.text)[0]\r\n break #prevents reiterating upwards to all div parents\r\n return date, title, text", "def get_movies(iurl):\n movies = []\n \n if iurl[-3:] == '?s=':\n search_text = GetSearchQuery('WatchOnlineMovies')\n search_text = urllib.quote_plus(search_text)\n iurl += search_text\n\n html = requests.get(iurl, headers=mozhdr).text\n mlink = SoupStrainer('div', {'class':re.compile('postbox')})\n items = BeautifulSoup(html, parseOnlyThese=mlink)\n plink = SoupStrainer('div', {'class':'wp-pagenavi'})\n Paginator = BeautifulSoup(html, parseOnlyThese=plink)\n\n for item in items:\n title1 = item.h2.text\n try:\n title2 = title1.replace(\"Full Movie\", \"\")\n except:\n title2 = title1.replace(\"Watch Online\", \"\")\n try:\n title3 = title2.replace(\"Watch Online Placeholdernt\", \"\")\n except:\n title3 = title2.replace(\".\", \"\")\n try:\n title4 = title3.replace(\".\", \"\")\n except:\n title4 = title3.replace(\"Watch Online Placeholder\",\"\")\n try:\n title5 = title4.replace(\"Watch Online\", \"\")\n except:\n title5 = title4.replace(\"Download\",\"\")\n try:\n title6 = title5.replace(\"Watch Onlin\", \"\")\n except:\n title6 = title5.replace(\"Placeholder\",\"\")\n try:\n title7 = title6.replace(\"HD Pri\", \"\")\n except:\n title7 = title6.replace(\"Placeholder\",\"\")\n try:\n title8 = title7.replace(\" Watch On\", \"\")\n except:\n title8 = title7.replace(\"Placeholder\",\"\")\n try:\n title9 = title8.replace(\" Watch\", \"\")\n except:\n title9 = title8.replace(\"Placeholder\",\"\")\n try:\n title10 = title9.replace(\"Free Down\", \"\")\n except:\n title10 = title9.replace(\"Placeholder\",\"\")\n try:\n title11 = title10.replace(\"Free D\", \"\")\n except:\n title11 = title10.replace(\"Placeholder\",\"\")\n try:\n title12 = title11.replace(\"Free\", \"\")\n except:\n title12 = title11.replace(\"Placeholder\",\"\")\n try:\n title13 = title12.replace(\" F\", \"\")\n except:\n title13 = title12.replace(\"Placeholder\",\"\")\n try:\n title14 = title13.replace(\" Fr\", \"\")\n except:\n title14 = title13.replace(\"Placeholder\",\"\")\n try:\n title15 = title14.replace(\" Fre\", \"\")\n except:\n title15 = title14.replace(\"Placeholder\",\"\")\n try:\n title16 = title15.replace(\" HD\", \"\")\n except:\n title16 = title15.replace(\"Placeholder\",\"\")\n try:\n title17 = title16.replace(\" H\", \"\")\n except:\n title17 = title16.replace(\"Placeholder\",\"\")\n try:\n title18 = title17.replace(\" HD P\", \"\")\n except:\n title18 = title17.replace(\"Placeholder\",\"\")\n try:\n title19 = title18.replace(\" re\", \"\")\n except:\n title19 = title18.replace(\"Placeholder\",\"\")\n try:\n title120 = title19.replace(\" r\", \"\")\n except:\n title120 = title19.replace(\"Placeholder\",\"\")\n # Coloring Years\n try:\n title21 = title120.replace(\"(2018)\", \"[COLOR yellow](2018)[/COLOR]\")\n except:\n title21 = title120.replace(\"Placeholder\",\"\")\n try:\n title22 = title21.replace(\"(2016)\", \"[COLOR lightsalmon](2016)[/COLOR]\")\n except:\n title22 = title21.replace(\"Placeholder\",\"\")\n try:\n title23 = title22.replace(\"(2015)\", \"[COLOR lime](2016)[/COLOR]\")\n except:\n title23 = title22.replace(\"Placeholder\",\"\")\n # Language\n try:\n title24 = title23.replace(\"Hindi\", \"[COLOR green]Hindi[/COLOR]\")\n except:\n title24 = title23.replace(\"Placeholder\",\"\")\n try:\n title25 = title24.replace(\"Dubbed\", \"[COLOR cyan]Dubbed[/COLOR]\")\n except:\n title25 = title24.replace(\"Placeholder\",\"\")\n\n # Continued\n try:\n title26 = title25.replace(\" nt o\", \"\")\n except:\n title26 = title25.replace(\"Placeholder\",\"\")\n try:\n title27 = title26.replace(\" nt F\", \"\")\n except:\n title27 = title26.replace(\"Placeholder\",\"\")\n try:\n title28 = title27.replace(\" nt\", \"\")\n except:\n title28 = title27.replace(\"Placeholder\",\"\")\n try:\n title = title28.replace(\" Pr\", \"\")\n except:\n title = title28.replace(\"Placeholder\",\"\")\n\n url = item.h2.find('a')['href']\n try:\n thumb = item.find('img')['src'].strip()\n except:\n thumb = _icon\n movies.append((title, thumb, url))\n \n if 'next' in str(Paginator):\n\n nextli = Paginator.find('a', {'class':re.compile('page larger')})\n\n purl = nextli.get('href')\n pages = Paginator.findAll('span', {'class':re.compile('pages')})\n lastpg = pages[len(pages)-1].text\n title = 'Next Page.. (Currently in %s)' % (lastpg)\n movies.append((title, _icon, purl))\n \n return movies", "def __get_movies(title):\n params = {\n 's': title,\n 'type': 'movie'\n }\n\n response = requests.get(API_URL + API_KEY, params=params).json()\n return response", "def crawl_movie_profile(movie_name, year=None):\n\n # Search\n query = _TITLE_QUERY.format(title=_convert_title(movie_name))\n search_res = bs(request.urlopen(query), \"html.parser\")\n tables = search_res.find_all(\"table\", {\"class\": \"findList\"})\n if len(tables) < 1:\n return {}\n res_table = tables[0]\n if year is None:\n movie_row = res_table.find_all(\"tr\")[0]\n else:\n for row in res_table.find_all(\"tr\"):\n if (str(year) in str(row)) or (str(year-1) in str(row)):\n movie_row = row\n movie_code = re.findall(_MOVIE_CODE_REGEX, str(movie_row))[0]\n\n # Movie Profile\n cur_profile_url = _PROFILE_URL.format(code=movie_code)\n prof_page = bs(request.urlopen(cur_profile_url), \"html.parser\")\n\n # Extracting properties\n props = {}\n props['name'] = movie_name\n props['rating'] = _get_rating(prof_page)\n props['rating_count'] = _get_rating_count(prof_page)\n props['genres'] = _get_geners(prof_page)\n props['user_review_count'], props['critic_review_count'] = \\\n _get_review_counts(prof_page)\n props['metascore'] = _get_metascore(prof_page)\n props['year'] = _get_year(prof_page)\n props['duration'] = _get_duration(prof_page)\n props.update(_get_box_office_props(prof_page))\n props.update(_get_rating_props(movie_code))\n props.update(_get_business_props(movie_code))\n props.update(_get_release_props(movie_code))\n props.update(_get_reviews_props(movie_code))\n return props", "def find_year(title):\n # find all patterns that match the year pattern\n matches = year_pattern.findall(title)\n # if any matches\n if matches:\n # record for convienence\n year = matches[-1]\n too_short = len(title) < 8\n # If the year is the title then return None\n if year == title:\n return None\n # If we have enough room for 1 block of 4 digits and its at the start\n elif too_short and title.startswith(year):\n return None\n else:\n return year", "def parse_top_movies(html: str) -> ResultSet:\n\n soup = BeautifulSoup(html, \"html.parser\")\n return soup.find_all(\"div\", class_=\"lister-item-content\")", "def genre_html(genre):\n genre = genre\n link = 'https://www.imsdb.com/genre/%s' % genre\n html = str(BeautifulSoup(requests.get(link).text, 'lxml'))\n\n start = html.find('<h1>Romance Movie Scripts</h1>')\n end = html[start:].find('</td>')\n return html[start:start+end]", "def imdb_id(title):\n pass", "def get_info_game(soup):\n info = []\n\n content = soup.select(\"div.fftit.s20.b\").pop()\n info.append(content.span.text)\n info.append(re.search(r'\\((.*?)\\)', content.text).group(1))\n\n for dt, dd in zip(soup.findAll(\"dt\"), soup.findAll(\"dd\")):\n if dt.text == \"Desarrollador:\":\n info.append(dd.text)\n elif dt.text == \"Editor:\":\n info.append(dd.text)\n elif dt.text == \"Género:\":\n info.append(dd.text)\n\n info.append(soup.find(\"span\", {\"itemprop\": \"releaseDate\"}).attrs['content'])\n\n info.extend([div.span.text for div in soup.select(\"div.dtc.wi36\")])\n\n return zip([\"name\", \"platform\", \"study\", \"publisher\", \"genre\", \"releaseDate\", \"3DJuegosScore\", \"userScore\"], info)", "def meta_extract(doc):\n title_search = re.compile(r'(title:\\s*)(?P<title>.*(\\n *\\w.*)*)(\\nauthor:)', re.IGNORECASE)\n author_search = re.compile(r'(author:)(?P<author>.*)', re.IGNORECASE)\n translator_search = re.compile(r'(translator:)(?P<translator>.*)', re.IGNORECASE)\n illustrator_search = re.compile(r'(illustrator:)(?P<illustrator>.*)', re.IGNORECASE)\n title = re.search(title_search, doc).group('title')\n author = re.search(author_search, doc)\n translator = re.search(translator_search, doc)\n illustrator = re.search(illustrator_search, doc)\n if author: \n author = author.group('author')\n if translator:\n translator = translator.group('translator')\n if illustrator:\n illustrator = illustrator.group('illustrator')\n print \"Title: {}\".format(title)\n print \"Author(s): {}\".format(author)\n print \"Translator(s): {}\".format(translator)\n print \"Illustrator(s): {}\\n\".format(illustrator)\n # return title, author, illustrator, translator", "def search_mApe_title (title,format):\n\n mape_main_url = 'https://www.mightyape.co.nz/'\n # Defining the url paths for search types\n mape_mv_category_url = 'movies-tv/movies/all?q='+parse.quote_plus(title)+\"+\"\n mape_mv_format_search_url = 'movieformat~'+format\n\n # This is the final url string\n\n searchUrl = mape_main_url+mape_mv_category_url+mape_mv_format_search_url\n #'https://www.mightyape.co.nz/movies-tv/movies/all?sort=2&q=movieformat~blu-ray'\n\n # Using a dictionary to store data, as contains list with objects\n mape_list = {}\n\n page = requests.get(searchUrl)\n tree = html.fromstring(page.content)\n\n data = tree.xpath(\n '//div[@class=\"product-list gallery-view\"]/div[@class=\"product\"]/div[@class=\"title\"]/a') # <--- WORKS\n\n data_alt = tree.xpath('//div[@class=\"product-list gallery-view\"]/div[@class=\"product\"]')\n\n print('Getting results from url:', searchUrl)\n print('Number of objects=', len(data_alt))\n count = 1\n\n for item in data_alt:\n simple_item = item.xpath('div[@class=\"title\"]/a')\n title = simple_item[0].text\n link = simple_item[0].get('href')\n format = item.xpath('div[@class=\"format\"]/text()')\n rating = item.xpath('div[@class=\"customer-rating\"]/span/span[@class=\"average\"]/text()')\n base_price = item.xpath('div[@class=\"price\"]/s/text()')\n hot_price = item.xpath('div[@class=\"price\"]/span[@class=\"price hot\"]/text()')\n normal_price = item.xpath('div[@class=\"price\"]/span[@class=\"price\"]/text()')\n if len(rating) > 0:\n # temp_mv = Movie_object(title,format[0],rating[0].strip(), mape_main_url + link,normal_price, base_price, hot_price)\n print(title, format[0], rating[0].strip(), mape_main_url + link, normal_price, base_price, hot_price)\n # mape_list[title] = temp_mv\n else:\n print(title, format[0], 'n/a', mape_main_url + link, normal_price, base_price, hot_price)\n # temp_mv = Movie_object(title, format[0], 'n/a', mape_main_url + link, normal_price, base_price, hot_price)\n # mape_list[title] = temp_mv\n\n count += 1\n\n return mape_list", "def tmdb_info(title):\n result = False\n search_result = tmdb.Movies(title, limit=True)\n for movie in search_result.iter_results():\n result = movie\n break\n return result", "def extract_movie_titles(dictionary):\n results = dictionary['Similar']['Results']\n lstmt = [d['Name'] for d in results]\n return lstmt", "def check_ratings(self):\n\n self.browser.get('https://www.imdb.com/')\n\n for title in self.titles:\n input_bar = self.browser.find_element_by_id('navbar-query')\n input_bar.clear()\n\n input_bar.send_keys(title)\n input_bar.send_keys(Keys.RETURN)\n\n time.sleep(3)\n\n # Click on the first suggestion\n css_selector = \"div.findSection:nth-child(3) > table:nth-child(2) > tbody:nth-child(1) > tr:nth-child(1) > td:nth-child(2) > a:nth-child(1)\"\n self.browser.find_element_by_css_selector(css_selector).click()\n time.sleep(3)\n\n # Pull details that will always be available\n score = str(self.browser.find_element_by_class_name('ratingValue').text)\n score = score.split('/10')[0].replace(',', '.')\n\n time.sleep(3)\n\n summary = str(self.browser.find_element_by_class_name('summary_text').text)\n subtext = str(self.browser.find_element_by_class_name('subtext').text)\n\n # Pull details that differ between movies and series\n try:\n duration = str(self.browser.find_element_by_class_name('bp_sub_heading').text) # Only for series\n if 'episodes' not in duration:\n duration = 'Some episodes'\n except Exception:\n # bp_sub_heading won't be found on a movie page\n duration = 'movie'\n\n if subtext[0].isdigit():\n # Split up the details from the subtext\n subtext_list = subtext.split(' | ')\n else:\n # Some movies' subtext starts with 'R' / 'PG-13'\n subtext_list = subtext.split(' | ')\n del subtext_list[0]\n\n # Duration\n if duration == 'movie':\n show_type = 'Movie'\n duration = subtext_list[0]\n try:\n year = datetime.datetime.strptime(subtext_list[2].split(' (')[0], '%d %B %Y').strftime('%Y')\n except ValueError:\n year = str(subtext_list[2].split(' (')[0][-4:])\n\n else: # series\n show_type = 'Serie'\n # Retrieve last season and its release date\n season_tab = str(self.browser.find_element_by_class_name('seasons-and-year-nav').text).strip()\n\n numbers = re.findall('[0-9]+', season_tab)\n latest_season = int(numbers[0])\n latest_year = int(max(numbers, key=lambda x: int(x)))\n\n duration += ' (%d Seasons in %d), %s per episode' % (latest_season, latest_year, subtext_list[0])\n\n year = re.findall('[0-9]+', subtext_list[2])[0]\n\n # Pull some more data out from the subtext\n genres = subtext_list[1].split(', ')\n\n # Pull details that are not always available\n creds_list = []\n creds = self.browser.find_elements_by_class_name('credit_summary_item')\n for c in creds:\n temp = str(c.text)\n if '|' in temp:\n temp = temp.split('|')[0]\n\n creds_list.append(temp)\n\n self.data_dict[title] = {\n 'score': score,\n 'summary': summary,\n 'duration': duration,\n 'credits': creds_list,\n 'genres': genres,\n 'released': year,\n 'type': show_type,\n }", "def get_movie_info(movie_url):\n # 指定電影資訊的 CSS 選擇器\n rating_css = \"strong span\"\n genre_css = \".subtext a\"\n poster_css = \".poster img\"\n cast_css = \".primary_photo+ td a\"\n \n movie_doc = pq(movie_url)\n # 擷取資訊\n rating_elem = movie_doc(rating_css)\n movie_rating = float(rating_elem.text())\n genre_elem = movie_doc(genre_css)\n movie_genre = [x.text.replace(\"\\n\", \"\").strip() for x in genre_elem]\n movie_genre.pop()\n movie_poster_elem = movie_doc(poster_css)\n movie_poster = movie_poster_elem.attr('src')\n movie_cast_elem = movie_doc(cast_css)\n movie_cast = [x.text.replace(\"\\n\", \"\").strip() for x in movie_cast_elem]\n \n # 回傳資訊\n movie_info = {\n \"rating\": movie_rating,\n \"genre\": movie_genre,\n \"poster\": movie_poster,\n \"cast\": movie_cast\n }\n return movie_info", "def get_title_artist(title_element): \n \n \n title_token = title_element.text.split(\" \")\n\n word = title_token.pop(0)\n artist = ''\n title = ''\n first = True\n while(title_token != [] and word != '-' and word[-1] != '-'):\n if first:\n first = False\n artist += (word)\n else:\n artist += ' '\n artist += word\n\n word = title_token.pop(0)\n \n if word[-1] == '-':\n word = word[:-1]\n artist += word\n \n if title_token == []:\n print(\"ERROR HERE: \", title_element.text)\n return None, None\n \n word = title_token.pop(0)\n first = True\n\n while(True):\n if first:\n first = False\n title += word\n else:\n title += ' '\n title += word\n if title_token != []:\n word = title_token.pop(0)\n if word == \"ALBUM\" or (word == \"EP\" and title_token[0] == \"REVIEW\"):\n break\n else:\n break\n return title, artist", "def movie_spider(self, movieTag):\n index = 0\n logging.info(\"Start crawling tag: %s\" % movieTag)\n while index < self.MAX_NUM:\n root = \"https://movie.douban.com/tag/%s?start=%d&type=T\" % (movieTag, index)\n result = {}\n try:\n html = requests.get(root, headers=random.choice(self.headers)).content\n tree = etree.HTML(html.decode('utf-8'))\n items = tree.xpath(\"//table/tr[@class='item']\")\n if len(items) == 0:\n break\n index += len(items)\n for item in items:\n itemURL = item.xpath(\"td/a[@class='nbg']/@href\")[0].strip()\n itemHTML = requests.get(itemURL, headers=random.choice(self.headers)).content\n itemTree = etree.HTML(itemHTML.decode('utf-8'))\n title = itemTree.xpath(\"//h1/span[@property='v:itemreviewed']/text()\")[0].strip()\n info = itemTree.xpath(\"//div[@class='subject clearfix']/div[@id='info']\")[0]\n director = info.xpath(\".//a[@rel='v:directedBy']/text()\")\n scriptor = info.xpath(\"span\")[1].xpath(\"span/a/text()\") # scriptor is not well formatted\n actors = info.xpath(\".//a[@rel='v:starring']/text()\")\n genre = info.xpath(\".//span[@property='v:genre']/text()\")\n initDate = info.xpath(\".//span[@property='v:initialReleaseDate']/text()\")\n runtime = info.xpath(\".//span[@property='v:runtime']/text()\")\n rating = itemTree.xpath(\"//strong[@property='v:average']/text()\")[0].strip()\n \n result['title'] = title\n result['director'] = '/'.join(director[:])\n result['scriptor'] = '/'.join(scriptor[:])\n result['actors'] = '/'.join(actors[:])\n result['genre'] = '/'.join(genre[:])\n result['initDate'] = '/'.join(initDate[:])\n result['runtime'] = '/'.join(runtime[:])\n result['rating'] = rating\n\n self._movie_list.append(result)\n result = {}\n\n except Exception as e:\n logging.exception(\"Error while crawling tag: %s\" % movieTag)", "def fetch_title(url):\n # validate url.\n if \"http\" not in url or len(url) <= 11:\n return \"\"\n r = requests.get(url)\n if r:\n soup = BeautifulSoup(r.text, 'html.parser')\n try:\n title = soup.select(\"title\")[0].string\n except:\n title=\"\"\n else:\n title=\"\"\n return title", "def get_titles(filename):\n\n with open(filename) as f:\n reader = csv.DictReader(f, fieldnames = ['title', 'year'])\n titles = list(reader)\n return titles", "def getMovieInfo(url):\n infobox = getInfoBox(url)\n if infobox:\n infoDict = {}\n title = getTitle(infobox)\n infoDict[\"Title\"] = title\n for label in getLabels(infobox):\n infoDict[label] = getContents(infobox, label=label)\n # Adding IMDB and Meta scores\n omdbObject = getOmdbInfo(title)\n if omdbObject:\n infoDict[\"ImdbScore\"] = omdbObject.get(\"imdbRating\", \"N/A\")\n infoDict[\"Metascore\"] = omdbObject.get(\"Metascore\", \"N/A\")\n else:\n infoDict[\"ImdbScore\"] = None\n infoDict[\"Metascore\"] = None\n\n return infoDict\n else:\n pass", "def title_html(title):\n title = title.replace(' ', '-')\n link = 'https://www.imsdb.com/scripts/%s.html' % title\n html = str(BeautifulSoup(requests.get(link).text, 'lxml'))\n\n start = html.find('<pre>')\n end = html[start:].find('</pre')\n return html[start:start+end]", "def scrape_story_metadata(self, story_id):\n url = '{0}/s/{1}'.format(self.base_url, story_id)\n result = requests.get(url)\n html = result.content\n #print html \n soup = BeautifulSoup(html, self.parser)\n\n # print soup\n try:\n pre_story_links = soup.find(id='pre_story_links').find_all('a')\n except AttributeError:\n pre_story_links = None\n if re.search(r\"var userid = (.*);\", str(soup)) is None:\n author_id = \"0\"\n else: \n author_id = int(re.search(r\"var userid = (.*);\", str(soup)).groups()[0]);\n #print re.search(r\"var title = (.*);\", str(soup))\n if re.search(r\"var title = (.*);\", str(soup)) is None:\n title = \"NO-TITLE\"\n else:\n title = re.search(r\"var title = (.*);\", str(soup)).groups()[0];\n title = unquote_plus(title)[1:-1]\n metadata_div = soup.find(id='profile_top')\n# times = metadata_div.find_all(attrs={'data-xutime':True})\n# metadata_text = metadata_div.find(class_='xgray xcontrast_txt').text\n# metadata_parts = metadata_text.split('-')\n# genres = self.get_genres(metadata_parts[2].strip())\n metadata = {\n 'id': story_id,\n# 'canon_type': pre_story_links[0].text,\n# 'canon': pre_story_links[1].text,\n 'author_id': author_id,\n 'title': title,\n# 'updated': int(times[0]['data-xutime']),\n# 'published': int(times[1]['data-xutime']),\n# 'lang': metadata_parts[1].strip(),\n# 'genres': genres\n }\n \"\"\"\n for parts in metadata_parts:\n parts = parts.strip()\n tag_and_val = parts.split(':')\n if len(tag_and_val) != 2:\n continue\n tag, val = tag_and_val\n tag = tag.strip().lower()\n if tag not in metadata:\n val = val.strip()\n try:\n val = int(val.replace(',', ''))\n metadata['num_'+tag] = val\n except:\n metadata[tag] = val\n if 'status' not in metadata:\n metadata['status'] = 'Incomplete'\n \"\"\"\n return metadata", "def parse_title(self, pre):\n # Extract datapoints\n title_text = str(pre)\n title = {}\n\n identity_data = self.identity_regex.search(title_text)\n title['linc'] = int(identity_data.group(1).strip().replace(' ', ''))\n title['short_legal'] = identity_data.group(2).strip().replace(';', ' ')\n title['title_number'] = identity_data.group(3).strip()\n\n try:\n title['ats_reference'] = self.ats_regex.search(title_text).group(1).replace(';',' ')\n except AttributeError:\n title['ats_reference'] = ''\n\n title['municipality'] = self.municipality_regex.search(title_text).group(1).replace('\\r','')\n\n try:\n references = self.reference_regex.search(title_text).group(1).split(\"\\n\")\n references = [i.strip() for i in references]\n references = list(filter(None, references))\n title['reference_number'] = references\n except AttributeError:\n title['reference_number'] = ['']\n\n payday_raw = self.payday_regex.search(title_text).group(3).strip('</pre>').strip()\n title['registration'] = payday_raw[:11]\n title['date'] = reversed(payday_raw[15:25].split('/'))\n title['date'] = '-'.join(title['date'])\n title['document_type'] = payday_raw[27:46].strip()\n\n title['value'] = self._try_int(payday_raw[46:62].strip())\n title['consideration'] = self._try_int(payday_raw[62:80].strip())\n\n if \"CONDOMINIUM\" in title_text:\n title['condo'] = True\n else:\n title['condo'] = False\n\n title['title_text'] = title_text.strip('<pre>').strip('</pre>').strip()\n\n return title", "def get_podcast_episodes(url):\n\n def parse_pubdate(date_string):\n \"\"\"\n Change pubdate string to datetime object. Tries a bunch of\n possible formats, but if none of them is a match, it will\n return a epoch = 0 datetime object\n\n :param date_string: A string representing a date\n :return: datetime object\n \"\"\"\n date_formats = (\n '%a, %d %b %Y %H:%M:%S +0000',\n '%a, %d %b %Y',\n '%a, %d %b %Y%H:%M:%S +0000',\n '%a, %d %b %Y %H:%M',\n '%a, %d %b %Y %H.%M'\n )\n df_generator = (format for format in date_formats)\n\n date = None\n while date is None:\n try:\n date = datetime.strptime(date_string, next(df_generator))\n except ValueError:\n pass\n except StopIteration:\n date = datetime.fromtimestamp(0)\n\n return date\n\n doc = get_document(url)\n\n return (\n {\n 'url': item.select('guid')[0].text,\n 'Premiered': parse_pubdate(\n item.select('pubdate')[0].text\n ).strftime(\"%d.%m.%Y\"),\n # 'Duration': duration_to_seconds(item.find('itunes:duration').text),\n 'title': item.title.text,\n 'Plot': item.description.text\n }\n for item in doc.find_all(\"item\")\n )", "def extract_movie(soup: BeautifulSoup) -> Movie:\n\n title, year = extract_movie_header(soup)\n runtime, genres, certificate = extract_movie_meta(soup)\n rating, metascore = extract_movie_rating_bar(soup)\n votes, gross = extract_movie_extra(soup)\n\n return Movie(\n title, genres, rating, year, runtime, votes, metascore, certificate, gross\n )", "def fetch(self, movie_id: str) -> AVInfo:\n movie_id = movie_id.upper()\n content = requests.get(f'https://javbus.com/{movie_id}')\n content.raise_for_status()\n tree = BeautifulSoup(content.content, features='html.parser')\n info = AVInfo(movie_id=movie_id)\n\n # fill information\n info.title = tree.select_one('div.container > h3').text\n info_table = tree.select_one('div.container > div.row.movie > div.info')\n info_keys_mapping = {\n '識別碼': 'movie_id',\n '發行日期': 'premiered',\n '導演': 'director',\n '製作商': 'studio',\n '發行商': 'publisher',\n '系列': 'series',\n '長度': 'movie_length',\n }\n last_row = ''\n for info_row in info_table.select('p'):\n if last_row:\n # last row is a header, parse the content accordingly\n if last_row == 'categories':\n tags = [e.text.strip() for e in info_row.select('span.genre')]\n info.tags = [s for s in tags if s]\n elif last_row == 'actors':\n actors = [e.text.strip() for e in info_row.select('span.genre')]\n info.actors = [s for s in actors if s]\n last_row = ''\n else:\n # last row is not a header, parse it\n row_items = info_row.text.strip().split(':')\n if len(row_items) == 2:\n # \"key: value\"\n raw_key, raw_value = info_row.text.split(':')\n raw_key = raw_key.strip()\n raw_value = raw_value.strip()\n if raw_key in info_keys_mapping and raw_value:\n setattr(info, info_keys_mapping[raw_key], raw_value)\n elif raw_key == '類別':\n last_row = 'categories'\n elif raw_key == '演員':\n last_row = 'actors'\n\n # fill fanart images\n fanart_image = tree.select_one('div.container > div.row.movie a.bigImage')\n fanart_image = AVInfoImage(\n file=fanart_image['href'],\n thumbnail=fanart_image.select_one('img')['src'],\n )\n if fanart_image.thumbnail == fanart_image.file:\n fanart_image.thumbnail = None\n info.fanart_images = [fanart_image]\n\n # fill screenshot images\n screenshot_images = tree.select('div.container > div#sample-waterfall > a.sample-box')\n if screenshot_images:\n info.screenshot_images = []\n for screenshot_image in screenshot_images:\n uri = screenshot_image['href']\n thumbnail_uri = screenshot_image.select_one('img')['src']\n if thumbnail_uri == uri:\n thumbnail_uri = None\n info.screenshot_images.append(AVInfoImage(file=uri, thumbnail=thumbnail_uri))\n\n info.info_born_time = time.time()\n return info", "def parse(response):\n # print(response.text.encode('utf-8'))\n soup = BeautifulSoup(response.text, 'lxml')\n title = soup.find('title')\n answer = title.string\n return answer", "def extract_movie_meta(soup: BeautifulSoup) -> Tuple[int, str, Optional[str]]:\n\n meta = soup.find(\"p\", class_=\"text-muted\")\n\n runtime_with_suffix = meta.find(\"span\", class_=\"runtime\").get_text()\n runtime = runtime_with_suffix[:-4]\n runtime = int(runtime)\n\n genres = meta.find(\"span\", class_=\"genre\").get_text().split(\", \")\n genres = [genre.strip() for genre in genres]\n\n certificate = None\n if certificate_element := meta.find(\"span\", class_=\"certificate\"):\n certificate = certificate_element.get_text()\n\n return runtime, genres, certificate", "def lookup(title):\n\n # Contact API\n try:\n api_key = os.environ.get(\"API_KEY\")\n response = requests.get(\n f\"http://www.omdbapi.com/?s={title}&apikey=ced7be9a\")\n response.raise_for_status()\n except requests.RequestException:\n return None\n\n # parse response\n try:\n movie = response.json()\n search = movie[\"Search\"]\n search_list = []\n for i in range(len(search)):\n search_prop = {\"title\": search[i][\"Title\"],\n \"year\": search[i][\"Year\"], \n \"poster\": search[i][\"Poster\"],\n \"id\": search[i][\"imdbID\"]}\n search_list.append(search_prop)\n\n return search_list\n\n except (KeyError, TypeError, ValueError):\n return None", "def _parse_title(self, response):\n title_text = response.css(\".title > span::text\").extract_first()\n return re.sub(r\"( - )?\\d{1,4}-\\d{1,2}-\\d{1,4}$\", \"\", title_text.strip()).strip()", "def _parse_title(self, item):\n title = item[\"Title\"]\n return title", "def get_rating(text):\n movie = text\n page = requests.get('http://www.imdb.com/find?ref_=nv_sr_fn&q=' + movie + '&s=tt')\n soup1 = BeautifulSoup(page.content, 'html.parser')\n movieid = soup1.select(\".findList tr a\")[0].get('href')\n movielink = \"http://www.imdb.com\" + movieid\n mlinkpage = requests.get(movielink)\n soup2 = BeautifulSoup(mlinkpage.content, 'html.parser')\n movierating = soup2.select(\".ratingValue span\")[0].text\n metascore = soup2.select(\".metacriticScore\")\n reviewlink = movielink + 'reviews'\n linkpage = requests.get(reviewlink)\n soup3 = BeautifulSoup(linkpage.content, 'html.parser')\n \n return soup3, movierating", "def extract_names(filename):\n f = open(filename,'rU') \n name_data = f.read()\n year_data= re.search(r'Popularity\\sin\\s(\\d\\d\\d\\d)', name_data)\n if not year_data :\n print ' no year found '\n sys.exit(1)\n name_year=year_data.group(1) \n #print 'year :'\n #print name_year\n tuples=re.findall(r'<td>(\\d+)</td><td>(\\w+)</td><td>(\\w+)</td>',name_data)\n #print 'tuples'\n #print tuples\n dict_name = {}\n for a,b,c in tuples :\n #print a + ' boy name: ' + b + ' , girl name : ' + c\n if b not in dict_name :\n dict_name[b] = a\n if c not in dict_name :\n dict_name[c] = a \n #print dict_name \n lst_names = sorted(dict_name.keys()) \n result_names_sorted = []\n result_names_sorted.append(name_year)\n for name in lst_names :\n #print name + \" : \" + dict_name[name]\n result_names_sorted.append(name + ' ' + dict_name[name])\n #print result_names_sorted \n\n return result_names_sorted", "def imdb_info(title):\n try:\n if title in _imdb_cache:\n return _imdb_cache[title]\n except KeyError:\n pass\n i = imdb.IMDb()\n search_result = i.search_movie(title, results=1)\n if not search_result:\n return None\n result = search_result[0]\n i.update(result)\n _imdb_cache[title] = result\n return result", "def scrape_top_250(soup):\n # to save the urls\n movie_urls = []\n\n # take the part where all the movies are\n content = soup.find(\"tbody\", \"lister-list\").find_all(\"tr\")\n\n # for every movie take the absolute url\n for title in content:\n url = \"http://www.imdb.com\" + title.find(\"td\", \"titleColumn\").a[\"href\"]\n movie_urls.append(url)\n\n return movie_urls", "def parse():\n G.go(SITE_URL)\n articles = []\n for article in G.doc.select(\"//li[@class='regularitem']\"):\n header = article.select('h4').text()\n text = article.select('div').text()\n url = article.select('h4/a/@href').text()\n dt_string = article.select('h5').text()\n # for date format \"1 Nov 2019 00:00:00\" or \"01 Nov 2019 00:00:00\"\n article_dt = re.search(r'\\d{1,2} [a-zA-Z]+ \\d{4} \\d{2}:\\d{2}:\\d{2}', dt_string)\n if article_dt is None:\n logging.exception('Datestring format is unknown: %s', dt_string)\n continue\n article_dt = article_dt.group(0)\n article_dt = datetime.datetime.strptime(article_dt, '%d %b %Y %H:%M:%S').strftime(\"%Y-%m-%d %H:%M:%S\")\n articles.append({'header': header, 'url': url, 'text': text, 'dt': article_dt})\n return articles", "def collectTitleFromSOF(url):\n\n\tpage = requests.get(url)\n\ttree = html.fromstring(page.content)\n\ttitle = tree.xpath('//title/text()')\n\n\treturn title", "def extract_title(soup):\r\n section = soup.find(\"div\", attrs={\"class\": \"col-sm-6 product_main\"})\r\n title = section.find(\"h1\")\r\n return title.text", "def get_by_title(title):\n query = Session.query(Movie.title)\n result = query.all()\n title_list = [title for title, in result]\n one_item = process.extractOne(title, title_list)\n if one_item:\n result_title, ratio = one_item\n else:\n return None\n if ratio > 60:\n return result_title\n else:\n return None", "def parse_infotable_movie(soup, pageurl):\n\t# find the infobox (right of screen)\n\tinfo_table = soup.findAll('table', {'class': 'infobox vevent'})\n\t# if info_table doesn't exist, cannot find year and gross\n\tif len(info_table) == 0:\n\t\treturn None, None\n\tgross = None\n\tyear = None\n\t# search each row of the info_table\n\tfor row in info_table[0].findAll('tr'):\n\t\theader = row.find('th')\n\t\t# check for header for year row\n\t\tif header != None and header.find('div') != None and header.find('div').get_text() == 'Release date':\n\t\t\tdate = row.find('td')\n\t\t\tif date.find('div') != None and date.find('div').find('ul') != None:\n\t\t\t\tdate = date.find('div').find('ul').find('li').find('span')\n\t\t\t# clean the date and extract year before returning\n\t\t\tyear = parse_year_movie(date.get_text(), pageurl)\n\t\t# check for header for gross row\n\t\tif header != None and header.get_text() == 'Box office':\n\t\t\tgross = row.find('td').get_text()\n\t\t\tif gross == None:\n\t\t\t\treturn None\n\t\t\t# clean the gross number to return as int\n\t\t\tgross = parse_gross_movie(gross, pageurl)\n\treturn year, gross", "def get_movies(week):\n movies = {}\n for movie in week.find_all('div', class_='venuefilmbloc'):\n movies[movie.a.strong.text] = \"\\n\".join(movie.span.text.split('; '))\n return movies", "def extract_names(filename):\n # +++your code here+++\n f = open(filename, 'r')\n fl = read_file(filename)\n\n l = []\n lFiltFinal = []\n\n year_match = re.search(r'Popularity\\sin\\s(\\d\\d\\d\\d)', f.read())\n year = year_match.group(1)\n\n for line in fl:\n #if '<h3 align=\"center\">Popularity in' in line:\n #year = line[-10:-6]\n if '<tr align=\"right\"><td>' in line:\n rank = line[line.find('<td>')+len('<td>'):line.find('</td>')]\n boys = line[line.index('</td><td>')+len('</td><td>'):line.index('</td><td>',line.index('</td><td>')+1)]\n girls = line[line.index('</td><td>',line.index('</td><td>')+1)+len('</td><td>'):-6]\n l.append([boys,rank])\n l.append([girls,rank])\n\n lFilt = list(unique_by_first_n(1, l))\n\n lFiltFinal.append(year)\n for key in lFilt:\n lFiltFinal.append( key[0] + ' ' + key[1])\n\n lFiltFinal.sort()\n return lFiltFinal", "def return_movie_list(url: str) -> (bs4.element.NavigableString, list, list):\n soup = return_beautiful_soup_object(url)\n cell_data_list = soup.find('body').find('div', {'class': 'col-left-center'}).find('table').find_all('td')\n span_rating_list = list([cell.find_all('span', {'class': 'tMeterScore'}) for cell in cell_data_list])\n rating_list = list([span[0].text.strip() for span in span_rating_list if span != []])\n movies = list(movie[0].text.strip() for movie in list([cell.find_all('a') for cell in cell_data_list])\n if movie != [])\n return soup.title.contents[0], movies, rating_list", "def __get_title_info(movie_id):\n params = {\n 'i': movie_id,\n }\n\n response = requests.get(API_URL + API_KEY, params=params).json()\n\n clean_response = get_clean_response(response)\n\n return clean_response", "def do_imdb_title_search(query):\n url = baseurl + '/search/title?' + query\n logging.warn(url)\n return geturl2(url)", "def extract_movies(dom):\n # select all links of the page and append their names to a list\n name_list = []\n for link in dom.find_all(\"a\"):\n name = link.get_text()\n name_list.append(name)\n\n # make list of fronting and trailing useless link names, using a flag\n temp_list = []\n flag = True\n\n for i in name_list:\n # turn of flag if usefull information is reached\n if i == \"Aadorp\":\n flag = False\n\n # set flag to remove trailing information\n if i == \"Zwolle (Gelderland)\":\n flag = True\n\n # append useless information\n if flag:\n temp_list.append(i)\n\n # remove all useless information\n for i in temp_list:\n name_list.remove(i)\n\n while \"bewerken\" in name_list:\n name_list.remove(\"bewerken\")\n\n # create new list for storing final refinded product\n name_list_refined = []\n\n # remove everything between brackets using regex\n for i in name_list:\n i = re.sub(\"[\\(].*?[\\)]\", \"\", i)\n name_list_refined.append(i)\n\n return(name_list_refined)", "def get_movies_by_title(self, title: str):\n raise NotImplementedError", "def extract_title(text):\n this_feed_link = \"\"\n try:\n text_soup=BeautifulSoup(text)\n except HTMLParser.HTMLParseError:\n print \"Failed to extract feed link due to parse error\"\n this_title = text_soup.find('title').contents[0]\n return this_title", "def parse_name_movie(soup, pageurl):\n\t# find the summary class header\n\tname_tag = soup.findAll('th', {'class': 'summary'})\n\t# if this header doesn't exist, cannot retrieve name\n\tif len(name_tag) == 0:\n\t\tlogging.warn('' + pageurl + 'does not have a valid name field, parsing terminated')\n\t\treturn None\n\t# return name as a string\n\treturn name_tag[0].get_text()", "def all_titles(our_data):\n return [album['album'] for album in our_data]", "def getTitle(movieInfo):\n if \"title\" in movieInfo:\n #We remove the punctuation\n title = \"\".join(c for c in movieInfo[\"title\"] if c not in punctuation)\n #We return the title as a list of words in the right format\n return [ _format(w) for w in title.split() ]\n else:\n raise AttributeError(\"%s instance has no attribute title\" % movieInfo)", "def extract_names(filename):\n f = open(filename, 'rU')\n file_text = f.read()\n f.close()\n \n year = re.search(r'Popularity in \\d\\d\\d\\d', file_text)\n if year:\n year = year.group()[-4:]\n else:\n print 'ERROR: year not found in ' + filename\n \n html_rank_names = re.findall(r'<tr align=\"right\"><td>\\d+</td><td>\\w+</td><td>\\w+</td>', file_text)\n name_ranks = {}\n i = 0\n while i < len(html_rank_names):\n line = html_rank_names[i]\n first_tag = line.find('<td>')\n first_end_tag = line.find('</td>')\n rank = line[first_tag + 4 : first_end_tag]\n \n second_tag = first_end_tag + 9\n second_end_tag = line.find('</td>', second_tag)\n name1 = line[second_tag : second_end_tag]\n \n third_tag = second_end_tag + 9\n third_end_tag = len(line) - 5\n name2 = line[third_tag : third_end_tag]\n \n # if the names already are in the dict, skip them because they have a larger number than what is already in the dict\n if name1 not in name_ranks: name_ranks[name1] = rank\n if name2 not in name_ranks: name_ranks[name2] = rank\n i = i + 1\n \n year_name_ranks = []\n year_name_ranks.append(year)\n for name, rank in name_ranks.iteritems():\n year_name_ranks.append(name + ' ' + rank)\n year_name_ranks.sort()\n return year_name_ranks", "def get_imdb_list():\n list_file = 'imdb.txt'\n name_column = 26\n f = open(list_file, 'r')\n film_list = []\n pos = 0\n\n for line in f:\n pos += 1\n words = line.split()\n name = line[name_column:-1]\n # could be problematic is there are brackets in the film name\n year = name[name.find('(') + 1:name.find(')')]\n name = name.replace('(' + year + ')', '')\n film = {\n 'pos': pos,\n 'score': Decimal(words[2]),\n 'name': name.strip(),\n 'year': year\n }\n film_list.append(film)\n f.close()\n return film_list", "def get_site_text(year):\n url = 'https://www.billboard.com/charts/year-end/' + year + '/hot-100-songs'\n print(url)\n r = requests.get(url)\n return r.text", "def crawl_by_title(movie_name, verbose, year=None, parent_pbar=None):\n def _print(msg):\n if verbose:\n if parent_pbar is not None:\n parent_pbar.set_description(msg)\n parent_pbar.refresh()\n sys.stdout.flush()\n tqdm()\n else:\n print(msg)\n\n os.makedirs(_IMDB_DIR_PATH, exist_ok=True)\n file_name = _parse_name_for_file_name(movie_name) + '.json'\n file_path = os.path.join(_IMDB_DIR_PATH, file_name)\n if os.path.isfile(file_path):\n _print('{} already processed'.format(movie_name))\n return _result.EXIST\n\n # _print(\"Extracting a profile for {} from IMDB...\".format(movie_name))\n try:\n props = crawl_movie_profile(movie_name, year)\n # _print(\"Profile extracted succesfully\")\n # _print(\"Saving profile for {} to disk...\".format(movie_name))\n with open(file_path, 'w+') as json_file:\n # json.dump(props, json_file, cls=_RottenJsonEncoder, indent=2)\n json.dump(props, json_file, indent=2)\n _print(\"Done saving a profile for {}.\".format(movie_name))\n return _result.SUCCESS\n except Exception as exc:\n _print(\"Extracting a profile for {} failed\".format(movie_name))\n # traceback.print_exc()\n return _result.FAILURE\n # print(\"Extracting a profile for {} failed with:\".format(movie_name))\n # raise exc", "def title_comm(soup: str, nb:int):\n title = []\n for span in soup.findAll('article', attrs={'itemprop': 'review'}):\n dat = str(recovTextBetweenTags(str(span.findAll('time', attrs={\n 'itemprop': 'datePublished'})), ',')).replace(\"['[\", '').replace(\"]']\", '')\n dat = (format_date(dat))\n if (dat) > (datetime.now() - timedelta(nb)):\n top = span.findAll('h2', attrs={'class': 'text_header'})\n top = translate(recovTextBetweenTags(str(top), 'non'))\n title.append(top[0][1:len(top[0])])\n\n return title", "def get_ep_metadata(year):\n with open(f\"{path_to_root}/data/001_html/001_html_{year}.txt\", \"r\") as file:\n soup = BeautifulSoup(file, \"lxml\")\n\n metadata = {}\n\n for episode in soup.findAll(\"article\", attrs={\"data-type\": \"episode\"}):\n ep_html = episode.find(\"a\", attrs={\"class\": \"goto goto-episode\"})\n _, num, title = ep_html[\"href\"].split(\"/\")\n\n date = episode.find(\"span\", attrs={\"class\": \"date-display-single\"}).text.strip()\n\n almost = episode.find(\n \"div\",\n attrs={\n \"class\": \"field field-name-body field-type-text-with-summary field-label-hidden\"\n },\n )\n if almost:\n summary = almost.find(\n \"div\", attrs={\"class\": \"field-item even\"}\n ).text.strip()\n else:\n summary = \"\"\n d = {\"ep_title\": title, \"air_date\": date, \"ep_summary\": summary}\n\n metadata[num] = d\n\n with open(\n f\"{path_to_root}/data/002_metadata/002_ep_metadata_{year}.json\", \"w\"\n ) as file:\n file.write(json.dumps(metadata))\n print(f'Wrote file \"002_ep_metadata_{year}.json\"')", "def additional_data_dict(titles: list) -> dict or str:\n try:\n additional_data = {}\n for title in titles:\n url = \"http://www.omdbapi.com/?i=tt3896198&apikey=6b513db6&t=\" + title\n headers = {\"Accept\": \"application/json\"}\n req = requests.get(url, headers=headers)\n api_content = json.loads(req.content.decode('utf-8'))\n # Because of no BoxOffice key in API for movie 'Ben Hur' (ID 68 in db):\n api_content.setdefault('BoxOffice', 'N/A')\n additional_data[title] = {}\n if api_content['imdbRating']:\n additional_data[title]['imdb_rating'] = float(api_content['imdbRating'])\n else:\n additional_data[title]['imdb_rating'] = -1\n if api_content['Runtime'] == 'N/A':\n additional_data[title]['runtime'] = -1\n else:\n additional_data[title]['runtime'] = int(re.sub(r'[^0-9]', '', api_content['Runtime']))\n if api_content['BoxOffice'] == 'N/A':\n additional_data[title]['box_office'] = -1\n else:\n additional_data[title]['box_office'] = int(re.sub(r'[^0-9]', '', api_content['BoxOffice']))\n nominations_oscars = re.search(r'Nominated for (.+?) Oscar', api_content['Awards'])\n if nominations_oscars:\n additional_data[title]['nominations_oscars'] = int(nominations_oscars.group(1))\n else:\n additional_data[title]['nominations_oscars'] = 0\n oscars = re.search(r'Won (.+?) Oscar', api_content['Awards'])\n if oscars:\n additional_data[title]['oscars'] = int(oscars.group(1))\n else:\n additional_data[title]['oscars'] = 0\n nominations_others = re.search(r'(\\d+) nomination', api_content['Awards'])\n if nominations_others:\n additional_data[title]['nominations_others'] = int(nominations_others.group(1))\n else:\n additional_data[title]['nominations_others'] = 0\n wins_others = re.search(r'(\\d+) win', api_content['Awards'])\n if wins_others:\n additional_data[title]['wins_others'] = int(wins_others.group(1))\n else:\n additional_data[title]['wins_others'] = 0\n return additional_data\n except KeyError:\n return \"No data about some movie(s). Check data source.\"\n except requests.exceptions.ConnectionError:\n return \"No access. Check internet connection or API is down.\"", "def extract_metadata(name):\n seps = name.count(\" - \")\n artist = title = None\n\n if seps == 1:\n\n pos = name.find(\" - \")\n artist = name[:pos].strip()\n title = name[pos + 3:].strip()\n\n else:\n title = name.strip()\n\n return dict(artist=artist, title=title)", "def search_by_title(title):\n\turl = tmdb_api(\"search/movie\")+\"&query=\"+urllib.quote_plus(title)\n\tresponse = json.load(urllib2.urlopen(url))\n\treturn JSONResponse(response)", "def get_video_details(job):\n # Set out title from the job\n # return if not identified\n title = job.title\n if title == \"not identified\":\n return\n\n # strip all non-numeric chars and use that for year\n # TODO: possible need for making sure year is a str\n year = re.sub(\"[^0-9]\", \"\", job.year)\n if year is None:\n year = \"\"\n\n # needs_new_year = False\n omdb_api_key = job.config.OMDB_API_KEY\n\n # TODO: possible need for making sure str\n # TODO: possible need for making sure str\n logging.debug(\"Title: \" + title + \" | Year: \" + year)\n\n # dvd_title_clean = cleanupstring(dvd_title)\n title = title.strip()\n title = re.sub('[_ ]', \"+\", title)\n\n # TODO: possible need for making sure is a str\n logging.debug(\"Calling webservice with title: \" + title + \" and year: \" + year)\n response = callwebservice(job, omdb_api_key, title, year)\n logging.debug(\"response: \" + response)\n\n # handle failures\n # this is a little kludgy, but it kind of works...\n if (response == \"fail\"):\n\n if year:\n # first try subtracting one year. This accounts for when\n # the dvd release date is the year following the movie release date\t\n logging.debug(\"Subtracting 1 year...\")\n response = callwebservice(job, omdb_api_key, title, str(int(year) - 1))\n logging.debug(\"response: \" + response)\n\n # try submitting without the year\n if response == \"fail\":\n # year needs to be changed\n logging.debug(\"Removing year...\")\n response = callwebservice(job, omdb_api_key, title, \"\")\n logging.debug(\"response: \" + response)\n if response == \"fail\":\n # see if there is a hyphen and split it\n # if title.find(\"-\") > -1:\n while response == \"fail\" and title.find(\"-\") > 0:\n # dvd_title_slice = title[:title.find(\"-\")]\n title = title.rsplit('-', 1)[0]\n # dvd_title_slice = cleanupstring(dvd_title_slice)\n logging.debug(\"Trying title: \" + title)\n response = callwebservice(job, omdb_api_key, title, year)\n logging.debug(\"response: \" + response)\n\n # if still fail, then try slicing off the last word in a loop\n while response == \"fail\" and title.count('+') > 0:\n title = title.rsplit('+', 1)[0]\n logging.debug(\"Trying title: \" + title)\n response = callwebservice(job, omdb_api_key, title, year)\n logging.debug(\"response: \" + response)\n # Added from pull 366 but we already try without the year.\n # Possible bad/increased rate of false positives\n if response == \"fail\":\n logging.debug(\"Removing year...\")\n response = callwebservice(job, omdb_api_key, title, \"\")", "def get_year_with_links():\n response = get_response(MAIN_PAGE)\n if response.ok:\n soup = BeautifulSoup(response.text, 'html.parser')\n years_li = soup.find_all(\n 'md-card-footer'\n )\n years_dict = {}\n # Not including the last <a> tag because that is not relevant.\n for years_html in years_li[:-1]:\n year = [num for num in years_html.text.split() if num.isdigit()][0]\n relative_link = years_html.select('a')[0].get('href')\n full_link = HOME_PAGE + relative_link\n years_dict[year] = full_link\n return years_dict\n else:\n print('Something Went Wrong')\n print(f'Status Code: {response.status_code}')\n sys.exit(1)", "def find_movie(imdb_file, title_regex):\n\n process = subprocess.run([\n \"grep\",\n \"-i\", # Case insensitive\n f'movie\\t[^\\t]*{title_regex}', # Only match movies\n imdb_file\n ], stdout=subprocess.PIPE)\n hits = [dict(zip(COLUMNS, hit.decode(\"utf-8\").split(\"\\t\")))\n for hit in process.stdout.split(b\"\\n\")[:-1]]\n # Try to filter out irrelevant hits, e.g. that don't yet exist or are porn\n legitimate_hits = [hit for hit in hits\n if hit[\"startYear\"] != \"\\\\N\" and\n hit[\"isAdult\"] == \"0\"]\n return legitimate_hits", "def getYears():\n url = \"http://www.boxofficemojo.com/weekend/\"\n src = urllib.request.urlopen(url).read()\n soup = BeautifulSoup(src, 'html.parser')\n year_header = soup.find_all(name = \"b\")[1]\n year_elems = year_header.find_all([\"a\", \"font\"])\n years = [int(year.get_text()) for year in year_elems]\n return years", "def html_reader(input_dir):\r\n #read data from the html file\r\n with open(input_dir,'r') as html_file:\r\n content = html_file.read()\r\n content = (content.split('\\n'))[4:-4]\r\n num = re.compile(\"(.*\\t\\d.*)|(\\d*\\d\\.\\d*)\")\r\n information = []\r\n for i in range(len(content)):\r\n if num.match(content[i])==None:\r\n information.append(content[i])\r\n information = information[:-1]\r\n #data parsing\r\n Date = re.compile('( ?CACM|June)')\r\n Meta = re.compile(\"(CA\\d\\d\\d\\d\\d\\d|June)\")\r\n #get date and meta index\r\n for i in range(len(information)):\r\n if Date.match(information[i])!=None:\r\n index_date = i\r\n if Meta.match(information[i])!=None:\r\n index_meta =i\r\n content = information[:index_date]\r\n others = information[index_date+2:index_meta]\r\n for i in range(len(content)):\r\n if content[i]==\"\":\r\n title = content[:i]\r\n abstract = content[i+1:]\r\n break\r\n #get author and other\r\n author = []\r\n other = []\r\n for i in range(len(others)):\r\n if others[i]==\"\":\r\n if re.match(\"[A-Z].*, ?[A-Z].*\\..*\",others[0]) != None:\r\n author = others[:i]\r\n other = others[i+1:]\r\n else:\r\n other = others\r\n break\r\n for i in range(len(author)):\r\n if re.match(\"[A-Z].*, ?[A-Z].*\\..*\",author[i]) != None:\r\n name = author[i].split(\",\")\r\n author[i] = (name[1]+name[0])\r\n author[i] = author[i].replace(\" \",\"\")\r\n author[i] = author[i].replace(\"\\t\",\"\")\r\n author[i] = author[i].lower()\r\n\r\n #parse date\r\n date = []\r\n date.append(re.search(\"19\\d\\d\", information[index_date]).group())\r\n date.append(re.search(\"(January|February|March|April|May|June|JUly|July|August|September|October|November|December)\",information[index_date]).group().lower())\r\n\r\n #parse meta data\r\n meta = []\r\n meta.append(re.search(\"CA\\d\\d\\d\\d\\d\\d\\w?\",information[index_meta]).group().lower())#0\r\n meta.append(re.search(\"[a-z0-9] [A-Z]{2}[A-Z]?\",information[index_meta]).group()[2:].lower())#1\r\n meta.append(re.search(\"(January|February|March|April|May|June|JUly|July|August|September|October|November|December)\",information[index_meta]).group().lower())#2\r\n meta.append(re.search(\"\\w \\d\\d?\",information[index_meta]).group()[2:])#3\r\n meta.append(re.search(\"\\d?\\d:\\d\\d\",information[index_meta]).group())#4\r\n meta.append(re.search(\"(AM|PM)\",information[index_meta]).group().lower())#5\r\n meta.append(re.search(\"19\\d\\d\",information[index_meta]).group())#6\r\n\r\n #build corpus\r\n corpus = set()\r\n lemmatizer = WordNetLemmatizer()\r\n for i in range(len(title)):\r\n title[i] = re.sub(\"\\(|\\)|-|\\d\\d?\\d?|:|/|\\.|`|\\?\",\" \",title[i])\r\n words = word_tokenize(title[i])\r\n for word in words:\r\n normal_word = word.lower()\r\n if normal_word not in stopwords.words(\"english\"):\r\n corpus.add(lemmatizer.lemmatize(normal_word))\r\n\r\n for i in range(len(abstract)):\r\n abstract[i] = re.sub(\"\\(|\\)|-|\\d\\d?\\d?|:|/|\\.|`|\\?|,\",\" \",abstract[i])\r\n words = word_tokenize(abstract[i])\r\n for word in words:\r\n normal_word = word.lower()\r\n if normal_word not in stopwords.words(\"english\"):\r\n corpus.add(lemmatizer.lemmatize(normal_word))\r\n\r\n for i in range(len(other)):\r\n other[i] = re.sub(\"\\(|\\)|-|\\d\\d?\\d?|:|/|\\.|`|\\?|,\",\" \",other[i])\r\n words = word_tokenize(other[i])\r\n for word in words:\r\n normal_word = word.lower()\r\n if normal_word not in stopwords.words(\"english\"):\r\n corpus.add(lemmatizer.lemmatize(normal_word))\r\n\r\n corpus = list(corpus)\r\n\r\n return paper(author= author, other= other, metadata= meta,date = date,title = title,abstract = abstract,id=int(input_dir[-9:-5]),corpus = corpus)", "def extract_movies(dom):\n\n movie_csv = []\n for movie in find_div(dom):\n title = find_title(movie)\n rating = find_rating(movie)\n year = find_year(movie)\n actors = find_actors(movie)\n runtime = find_runtime(movie)\n movie_list = append_movie(title, rating, year, actors, runtime)\n movie_csv.append(movie_list)\n return movie_csv # REPLACE THIS LINE AS WELL IF APPROPRIsATE", "def _parse_title(self, response):\n title_str = re.sub(\n r\"\\s+\", \" \", \" \".join(response.css(\".soi-container h2 *::text\").extract())\n ).strip()\n return re.sub(\n r\"(Illinois Commerce Commission|(?=Committee )Committee Meeting$)\",\n \"\",\n title_str,\n ).strip()", "def tv(response):\n\n response = response.json()\n\n if response.get(\"Error\"):\n raise NotFoundError(response[\"Error\"])\n\n if response[\"Type\"] != \"series\":\n raise NotFoundError(\"Type is {}, should be series\".format(response[\"Type\"]))\n\n return [OrderedDict([(\"Title\", response[\"Title\"]),\n (\"ID\", response[\"imdbID\"]),\n (\"Rating\", response[\"imdbRating\"]),\n (\"Year\", response[\"Year\"].split(u\"\\u2013\")[0])])]", "def fetch_title(self, movie_id):\n movie = tmdbsimple.Movies(movie_id)\n request = movie.info()\n\n return movie.title", "def get_episode_details(token, url, season):\n u = url + str(season)\n headers = {'Accept': 'application/json', 'Authorization': token}\n r = requests.get(u, headers=headers)\n json_data = json.loads(r.text).get('data')\n season_details = {}\n season_details['current_season'] = season\n if len(json_data) > 1:\n for episode in json_data:\n d = episode.get('firstAired')\n date = datetime.datetime.strptime(d, \"%Y-%m-%d\")\n today = datetime.datetime.today()\n if date.date() >= today.date():\n season_details['next_ep_no'] = episode.get('airedEpisodeNumber')\n season_details['next_air_date'] = episode.get('firstAired')\n season_details['ep_title'] = episode.get('episodeName')\n season_details['ep_overview'] = episode.get('overview')\n break\n else:\n season_details['next_ep_no'] = (json_data[len(json_data) - 1].get('airedEpisodeNumber'))\n season_details['next_air_date'] = (json_data[len(json_data) - 1].get('firstAired'))\n season_details['ep_title'] = (json_data[len(json_data) - 1].get('episodeName'))\n season_details['ep_overview'] = (json_data[len(json_data) - 1].get('overview'))\n else:\n season_details['next_ep_no'] = 1\n season_details['next_air_date'] = (json_data[0].get('firstAired'))\n season_details['ep_title'] = (json_data[0].get('episodeName'))\n season_details['ep_overview'] = (json_data[0].get('overview'))\n if season_details['next_air_date'] == \"\":\n season_details['next_air_date'] = 'TBD'\n if season_details['ep_title'] == \"\" or season_details['ep_title'] is None:\n season_details['ep_title'] = 'TBD'\n if season_details['ep_overview'] == \"\" or season_details['ep_overview'] is None:\n season_details['ep_overview'] = 'TBD'\n return season_details", "def get_quote_and_movie_name():\n html_content = urlopen(MOVIE_QUOTE_SOURCE).read().decode('utf-8')\n soup = BeautifulSoup(html_content, 'html.parser')\n results = soup.find_all(attrs={'class': 'col-xs-9 col-lg-10'})\n quote_regex = re.compile('<blockquote>(.*?)</blockquote>')\n movie_regex = re.compile('</strong>(.*?)</span>')\n movie_em_regex = re.compile('<em>(.*?)</em>')\n movie_regex_second = re.compile('</strong>(.*?)</a>')\n last_results = []\n\n for result in results:\n\n quote_line = str(result.find('blockquote')).replace('\\n', '')\n quote = quote_regex.findall(quote_line)[0].strip()\n movie_line = str(result.find_all(attrs={'class': 'source'})[0])\n try:\n movie_name = movie_regex.findall(movie_line)[0].strip()\n except:\n movie_name = movie_regex_second.findall(movie_line)[0].strip()\n if '<em>' in movie_name:\n movie_name = movie_em_regex.findall(movie_name)[0].strip()\n\n last_results.append((quote, movie_name))\n\n return random.choice(last_results)", "def biological_science_news():\n\n return general_scraper(['http://mesva.univaq.it/?q=avvisi/cl-clm/52672'])", "def get_video_title_releaser_release_time(self, url):\n video_id = ' '.join(re.findall('id.*html', url))\n browser = webdriver.Chrome()\n browser.get(url)\n title = browser.find_element_by_id('subtitle').text\n releaser = browser.find_element_by_id('module_basic_sub').text\n releaser = releaser.replace('+订阅','')\n releaser = releaser.replace(' ','')\n try:\n rt_midstep = browser.find_element_by_class_name('video-status').text\n rt_midstep = rt_midstep.replace('上传于','')\n rt_midstep = rt_midstep.replace(' ','')\n release_time = int(datetime.datetime.strptime(rt_midstep,'%Y-%m-%d').timestamp()*1e3)\n except:\n release_time = 0\n fetch_time = int(datetime.datetime.timestamp(datetime.datetime.now())*1e3)\n D0 = {'video_id': video_id,\n 'title': title,\n 'release_time': release_time,\n 'url': url,\n 'fetch_time': fetch_time}\n return D0", "def get_movie_details(self):\n\n if self.isValidURL(self.url) == False:\n return None\n url = self.formatURL(self.url)\n\n response = requests.get(url)\n soup = BeautifulSoup(response.content, \"html.parser\")\n\n film = self.getJson(soup)\n more = self.getAdditionalDetails(soup)\n self.setMovieDetails(film, soup)\n self.create_mov_dict()\n return self.film", "def parse_manga_title(filename):\n print_info('Attempting to parse manga title from {0}'.format(filename))\n for regex in MANGA_TITLE_REGEX:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_title = m.group('Series')\n return clean_episode_title(extracted_title)\n return ''", "def parse_movie(self, res):\n url = res.css(SELECTORS['MOVIE_URL'])\n obj = {\n 'id': int(url.re_first(r'[/]([0-9]{1,})[/]')),\n 'title': SelectHelper.get(res, SELECTORS['MOVIE_TITLE']),\n 'description': SelectHelper.get(res, SELECTORS['MOVIE_DESCRIPTION'])[12:-10],\n 'advisory': SelectHelper.get_array(res, SELECTORS['MOVIE_ADVISORY']),\n 'image': SelectHelper.get(res, SELECTORS['MOVIE_IMAGE']),\n 'url': BASE_URL + url.extract_first(),\n }\n return Movie(obj)", "def query_imdb(movie_title):\n base_url = \"http://omdbapi.com/?t=\" # Only submitting Title\n response = urllib.urlopen(base_url + movie_title)\n if response.getcode() == 200: # HTTP status is OK\n imdb_data = json.loads(response.read()) # Deserialize into dictionary\n return imdb_data\n else: # HTTP error\n return {\"Response\" : \"False\"}", "def __metro(soup):\n news = []\n container = soup.select('.m-title')\n\n for item in container:\n a = item.a\n title = a.string\n link = a['href']\n news.append(dict(title=title, link=link))\n if len(news) == 10:\n break\n return news", "def get_top_movies(genre: Genre) -> List[Movie]:\n\n html = download_top_movies(genre)\n soup = parse_top_movies(html)\n return [extract_movie(result) for result in soup]", "def get_tags(html):\n\ttitle = re.findall('\"title\":\"(.*?)\",', html)[0]\n\ttitle = codecs.getdecoder(\"unicode_escape\")(title)[0]\n\n\tartist = re.findall('\"username\":\"(.*?)\",', html)[0]\n\tartist = codecs.getdecoder(\"unicode_escape\")(artist)[0]\n\n\tgenre = re.findall('\"genre\":\"(.*?)\",', html)[0]\n\tgenre = codecs.getdecoder(\"unicode_escape\")(genre)[0]\n\n\treturn title, artist, genre", "def scrape_title(self, body):\n container = body.find('div', {'class': 'grid_9 push_3'})\n\n title_string = container.find('h1', {'id': 'title'}).get_text()\n\n title_string = self.zapper.zap_string(title_string)\n\n title_string = self.csv_quote_escape(title_string)\n\n return title_string", "def parsed_title(self):\n return parse_pr_title(self.title)", "def articleAuthorsByYear(g = None, year = None):\n if not g:\n return\n articles = g.find(\"Article\", property_key='year', property_value=str(year))\n ret = dict()\n for article in articles:\n ret[article['title']] = article['authors'].replace('\\n','').lower()\n return ret", "def getOmdbInfo(title):\n baseUrl = \"http://www.omdbapi.com/?\"\n # parsing the API credentials to the base url\n credentialsData = urllib.parse.urlencode(credentials)\n finalUrl = baseUrl + credentialsData\n parameters = {\"t\": title} # Parameters to add a query to the url\n try:\n r = requests.get(url=finalUrl, params=parameters)\n return r.json()\n except Exception as e:\n return None", "def extract_names(filename):\n # +++your code here+++\n # Opening the file\n f = open(filename, 'rU')\n # Reading all of the lines\n lines = f.readlines()\n # Empty list to hold the year, names, and ranks\n ranks_names = []\n for line in lines:\n # search for the year\n year = re.search(r'\\s(\\d\\d\\d\\d)</h3>', line)\n # if the year is found, append it to the list\n if year: \n ranks_names.append(year.group(1))\n # search for the rank, male name, and female name\n rank_male_female = re.search(r'(\\d+)</td><td>(\\w+)</td><td>(\\w+)</td>', line)\n # If they are found then append the male name plus its rank, as well as the \n # female name plus its rank\n if rank_male_female:\n ranks_names.append(rank_male_female.group(2) + ' ' + rank_male_female.group(1))\n ranks_names.append(rank_male_female.group(3) + ' ' + rank_male_female.group(1))\n # Sort the list alphabetically\n ranks_names.sort()\n # Return the list\n return ranks_names", "def get_movie_data(movie_title, year=None, *queries):\n\n movie_info = movie_search.omdb_movie_info(movie_title, year)\n video_id = movie_search.youtube_video_id(movie_title, year, *queries)\n\n # Adding the Youtube data to the dictionary\n data = movie_info\n\n data['youtube_id'] = video_id\n data['youtube_url'] = Youtube_URL.format(video_id)\n\n return data", "def scrape_mars():\n (news_title, news_p) = scrape_news()\n\n\n mars_data = {\n \"news_title\": news_title,\n \"news_p\": news_p,\n \"jpl_url\": scrape_jpl_images(),\n \"facts_tbl\": scrape_mars_facts(),\n \"weather\": scrape_weather(),\n \"hemi_pct\": scrape_hemispheres(),\n }\n\n\n return mars_data", "def search_title(self):\n\t\tnew_name = self.removez_all(self.init_str)\n\t\tresult = self.search_ext(new_name)\n\t\tresult = self.search_encoder(result)\n\t\tresult = self.search_quality(result)\n\t\tresult = self.search_codec(result)\n\t\tresult = self.search_lang(result)\n\t\tresult = self.search_version(result)\n\t\tresult = self.search_source(result)\n\t\tresult = self.search_audio(result)\n\t\tresult = self.search_year(result)\n\t\tresult = result.replace('...', '.')\n\t\tresult = result.replace('..', '.')\n\t\tself.title = self.remove_lasts_dots(result)" ]
[ "0.6832312", "0.6682347", "0.66435677", "0.6387017", "0.637413", "0.63639027", "0.633812", "0.6296725", "0.61997265", "0.6172732", "0.6140767", "0.61179507", "0.60745186", "0.60595584", "0.59533656", "0.59527224", "0.5951921", "0.5943582", "0.5937595", "0.59366393", "0.5929587", "0.59238297", "0.59185255", "0.59182614", "0.5907752", "0.5899259", "0.58820254", "0.58790547", "0.58673507", "0.5861203", "0.5849537", "0.5844229", "0.584284", "0.584243", "0.58381057", "0.5836804", "0.5823858", "0.5816709", "0.5787972", "0.5786892", "0.5773674", "0.57647616", "0.5758445", "0.57553446", "0.5753293", "0.5747418", "0.5746912", "0.5743576", "0.57387227", "0.57379305", "0.57243365", "0.5701305", "0.5694579", "0.5691463", "0.56896204", "0.5686151", "0.5678624", "0.56688493", "0.5663386", "0.5661011", "0.5659922", "0.563744", "0.5632466", "0.5627568", "0.56250083", "0.5621039", "0.56000346", "0.5588953", "0.55864656", "0.5585244", "0.55844957", "0.55805075", "0.55764306", "0.55752313", "0.5569313", "0.5561443", "0.55612576", "0.5560929", "0.55569637", "0.55516267", "0.5531535", "0.5527774", "0.5525701", "0.552191", "0.55166453", "0.5515801", "0.5515034", "0.5512429", "0.5512319", "0.55055225", "0.55048084", "0.5500321", "0.5499034", "0.5499016", "0.54957026", "0.5484222", "0.5482909", "0.5478558", "0.54735696", "0.5460663" ]
0.7373919
0
Calculate total episode count based on neighbouring sameextension files.
def get_tv_episodes(self) -> int: return len(glob.glob(os.path.join( os.path.dirname(self.file), f"*{os.path.splitext(self.file)[-1]}" )))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def n_episodes(self):\n raise NotImplementedError", "def return_episode_num(name):\n return int(name.split(\".\")[0].split(\"ep_\")[1]) # Use split to return only the episode number needed to sort the files in increasing order", "def _get_total_games(self) -> int:\n files = get_tfr_filenames(self.config)\n total_games = 0\n for file in files:\n total_games += int(str(file).split('-')[1].split('.')[0])\n return total_games", "def get_num_episodes(self) -> int:\n return len(self.episodes)", "def get_amount_episodes(episodes: str) -> int:\n a = 0\n for ep in episodes.split(', '):\n if '-' in ep:\n start,end = ep.split('-')\n a += int(end)-int(start)\n else:\n a += int(ep)\n return a", "def episodes_done_inc(self):\n with _MonitorEnv._lock:\n self._episodes_done += 1\n return self._episodes_done", "def get_num_instances_per_file(self, f_name):\n shape = utils_classif.get_shape(os.path.join(f_name.replace('.data', '.shape')))\n file_frames = float(shape[0])\n if self.mode_last_patch == 'discard':\n # the last patch that is always incomplete is discarded\n if self.patch_len == 25 and self.patch_hop == 13 and file_frames == 51:\n num_instances_per_file = 3\n else:\n num_instances_per_file = np.maximum(1, int(np.ceil((file_frames - self.patch_len - 1) / self.patch_hop)))\n\n elif self.mode_last_patch == 'fill':\n # the last patch that is always incomplete will be filled with zeros or signal, to avoid discarding signal\n # hence we count one more patch\n if self.patch_len == 25 and self.patch_hop == 13 and file_frames == 51:\n num_instances_per_file = 3\n else:\n num_instances_per_file = np.maximum(1, 1 + int(np.ceil((file_frames - self.patch_len - 1) / self.patch_hop)))\n\n return num_instances_per_file", "def get_num_instances_per_file(self, f_name):\n shape = utils_classif.get_shape(os.path.join(f_name.replace('.data', '.shape')))\n file_frames = float(shape[0])\n if self.mode_last_patch == 'discard':\n # the last patch that is always incomplete is discarded\n if self.patch_len == 25 and self.patch_hop == 13 and file_frames == 51:\n num_instances_per_file = 3\n else:\n num_instances_per_file = np.maximum(1, int(np.ceil((file_frames - self.patch_len - 1) / self.patch_hop)))\n\n elif self.mode_last_patch == 'fill':\n # the last patch that is always incomplete will be filled with zeros or signal, to avoid discarding signal\n # hence we count one more patch\n if self.patch_len == 25 and self.patch_hop == 13 and file_frames == 51:\n num_instances_per_file = 3\n else:\n num_instances_per_file = np.maximum(1, 1 + int(np.ceil((file_frames - self.patch_len - 1) / self.patch_hop)))\n\n return num_instances_per_file", "def fileCounter(directory):", "def gather_counts(directory):\n counts_un = defaultdict(int)\n counts_bi = defaultdict(int)\n counts_tri = defaultdict(int)\n prev_prev = \"<s>\"\n prev = \"<s>\"\n for filename in os.listdir(f\"./{directory}\"):\n if \".DS_Store\" in filename:\n continue\n with open(f\"./{directory}/{filename}\", \"r\") as f:\n for line in f:\n line = line.strip()\n if len(line) == 0:\n continue\n counts_un[line+\"\\n\"] += 1\n counts_bi[prev+\"\\n\"+line+\"\\n\"] += 1\n counts_tri[prev_prev+\"\\n\"+prev+\"\\n\"+line+\"\\n\"] += 1\n prev_prev = prev\n prev = line\n counts_un[\"</s>\\n\"] += 2\n counts_bi[\"</s>\\n</s>\\n\"] += 1\n counts_bi[prev+\"\\n\"+\"</s>\\n\"] += 1\n counts_tri[prev_prev+\"\\n\"+prev+\"\\n\" + \"</s>\\n\"] += 1\n counts_tri[prev+\"\\n</s>\\n</s>\\n\"] += 1\n return counts_un, counts_bi, counts_tri", "def n_subfile(self):\n self.assert_is_dir_and_exists()\n n = 0\n for _ in self.select_file(recursive=False):\n n += 1\n return n", "def parse_episode_part(filename):\n print_info('Extracting part num from {0}'.format(filename))\n baseline = ord('a')\n\n for regex in EPISODE_PART_REGEXS:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_part = m.group('Part').lower()\n print_info('Extracted Part: {0}'.format(extracted_part))\n\n # Convert into int\n part_num = ord(extracted_part) - baseline + 1\n return part_num\n\n return 0", "def fileCount(self):\n pass", "def _calculateIterations(self):\n #iterations = self.nb_images/self.batchsize\n imgs = self.protofile.nb_test()\n batch = self.protofile.batch_test()\n iterations = imgs/batch\n if imgs % batch != 0:\n iterations += 1\n return iterations", "def find_n(self):\n metadata_files = [\n file for file in self.cfg[\"input_files\"]\n if \"tas/metadata.yml\" in file\n ]\n self.cfg[\"N\"] = {}\n for meta_file in metadata_files:\n n_identifyer = meta_file.split(\"/tas/\")[0].split(\"/tas_\")[-1]\n metadata = group_metadata(get_cfg(meta_file).values(), \"dataset\")\n self.cfg[\"N\"][n_identifyer] = len(metadata.keys()) - 1", "def countsubcatchments(inputfilename=FileSettings.settingsdict['inputfilename']):\r\n global count\r\n with open(inputfilename, 'r') as swmmput:\r\n contents = swmmput.readlines()\r\n count = len(contents)\r\n return(count)", "def countDataSize(self,filename):\n \n try:\n d = h5py.File(filename,'r')\n except:\n print(filename)\n return \n\n N = 0\n scan_edges = d['level2/Statistics/scan_edges'][:]\n for (start,end) in scan_edges:\n N += (end-start)//self.offsetLen * self.offsetLen\n d.close()\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def get_number_of_measurement(self):\n used_fragments = set()\n counter = 0\n for fragment in self.observed_fragments:\n num_of_isotope = 0\n used_counter = 0\n for i in self.mdv[fragment]:\n num_of_isotope = num_of_isotope + 1\n if self.mdv[fragment][i]['use'] == 'use':\n\n counter = counter + 1\n used_counter = used_counter + 1\n if num_of_isotope == used_counter:\n used_fragments.add(fragment)\n return counter-len(used_fragments)", "def numberFiles(self):\n with open(self.inputfile) as fin:\n for n, _ in enumerate(fin, start=1): pass\n self.n = n\n return self.n", "def analyze_files(self):\n for file in os.listdir(self.directory):\n if file[-3:] == (\".py\"):\n fopen = open(os.path.join(self.directory, file), \"r\")\n try:\n if not (py_file := fopen):\n raise FileNotFoundError\n\n with py_file: # close file after opening\n class_count: int = 0\n fun_count: int = 0\n l_count: int = 0\n ch_count: int = 0\n for line in py_file: # calculate values for the file\n if line.strip().startswith(\"class \"):\n class_count = class_count+1\n elif line.strip().startswith(\"def \"):\n fun_count = fun_count+1\n\n l_count = l_count+1\n ch_count = ch_count+len(line)\n\n self.files_summary[str(os.path.join(self.directory, file))] = {\"class\": class_count, \"function\": fun_count, \"line\": l_count,\n \"char\": ch_count}\n except FileNotFoundError:\n print(f\"File {py_file} is not found or can not be opened\")\n fopen.close()", "def get_faces_nr(self):\r\n\r\n logger.debug('Getting number of faces in each frame')\r\n\r\n if len(self.tracked_faces) == 0:\r\n\r\n # Try to load YAML file\r\n\r\n if os.path.exists(self.track_path):\r\n\r\n print 'Loading YAML file with tracking results'\r\n logger.debug('Loading YAML file with tracking results')\r\n\r\n with open(self.track_path) as f:\r\n\r\n self.tracked_faces = yaml.load(f)\r\n\r\n print 'YAML file with tracking results loaded'\r\n logger.debug('YAML file with tracking results loaded')\r\n\r\n else:\r\n\r\n print 'Warning! No tracking results found!'\r\n logger.warning('No tracking results found!')\r\n\r\n return\r\n\r\n self.faces_nr = {}\r\n\r\n for segment_dict in self.tracked_faces:\r\n\r\n frame_list = segment_dict[c.FRAMES_KEY]\r\n\r\n for frame_dict in frame_list:\r\n\r\n frame_name = frame_dict[c.SAVED_FRAME_NAME_KEY]\r\n\r\n if frame_name in self.faces_nr:\r\n\r\n self.faces_nr[frame_name] += 1\r\n\r\n else:\r\n\r\n self.faces_nr[frame_name] = 1\r\n\r\n # Save YAML file\r\n\r\n utils.save_YAML_file(self.faces_nr_path, self.faces_nr)", "def count_exsiting_data(target_dir, check_all_number=True):\n num_color = len(glob.glob(osp.join(target_dir, '*', 'color', '*png')))\n\n if not check_all_number:\n num_color\n\n num_depth = len(glob.glob(osp.join(target_dir, '*', 'depth', '*npy')))\n num_camera_info = len(glob.glob(osp.join(\n target_dir, '*', 'camera_info', '*yaml')))\n\n assert num_color == num_depth == num_camera_info,\\\n '{} num_color:{} num_depth:{}, num_camera_info:{}'.format(\n target_dir, num_color, num_depth, num_camera_info)\n\n return num_color", "def getFileCount(self) -> int:\n ...", "def day_03_a() -> int:\n return len(get_visited_houses(read_instructions('aoc/aoc2015/input/03A.txt')))", "def compute_way(episode):\n episode_classes, _ = tf.unique(episode.train_labels)\n way = tf.size(episode_classes)\n return way", "def calculate_number_of_segments(self):\n return sum(len(eg.transcript_file.segments) for eg in self.exemplars)", "def count_len(self):\n total = 0\n for filename in self.filenames:\n f = open(os.path.join(self.directory, filename))\n line_count = 0\n for _ in f:\n line_count += 1\n if line_count < self.window_size:\n continue\n else:\n total += line_count - self.window_size + 1\n return total", "def numberFiles(self):\n return self.n", "def total_number():\r\n total_number = 0\r\n file_read = read_file()\r\n for key in file_read:\r\n total_number = total_number + len(file_read[key])\r\n return total_number", "def _find_epochs(self, history):\n \n epoch_count = len(history.history['val_loss'])\n\n return epoch_count", "def test(self, filename):\n hit = 0\n total = 0\n n = self.n\n for sent in open(filename):\n samp = sent.rstrip('\\n')\n# samp = '~' + samp + '~' \n for i in range(len(samp) - n):\n total = total + 1\n prev = samp[i:i + n - 1]\n pred = self.pred(prev)\n if pred == samp[i + n - 1]:\n hit = hit + 1\n \n return hit/total", "async def num_fomod_files_to_install(self):\n n = 0\n for f in self.fomod.files_to_install:\n if f.type == \"folder\":\n n += await self.count_folder_contents(f.source)\n else:\n n += 1\n\n return n", "def compute_num_tracks(x_offset: int, y_offset: int,\n x: int, y: int, track_info: Dict[int, int]):\n x_diff = x - x_offset\n y_diff = y - y_offset\n result = 0\n for length, num_track in track_info.items():\n if x_diff % length == 0 and y_diff % length == 0:\n # it's the tile\n result += num_track\n return result", "def count():", "def _repetitions(webpage_tree):\n\n metadata = {\n \"runs\": len(webpage_tree),\n \"max_resources_run\": 0,\n # a huge number\n \"min_resources_run\": time() * 99999,\n \"avg_resources_run\": 0,\n \"static_resources\": 0,\n \"dynamic_resources\": 0,\n \"files\": {},\n }\n data = {}\n\n if len(webpage_tree) > 0:\n for run in webpage_tree:\n files_in_run = len(webpage_tree[run])\n if metadata[\"min_resources_run\"] > files_in_run:\n metadata[\"min_resources_run\"] = files_in_run\n if metadata[\"max_resources_run\"] < files_in_run:\n metadata[\"max_resources_run\"] = files_in_run\n metadata[\"avg_resources_run\"] = metadata[\"avg_resources_run\"] + files_in_run\n for f in webpage_tree[run]:\n filename = f.split(os.path.sep)[-1]\n if filename not in data:\n metadata[\"files\"][filename] = {\n \"reps\": 1,\n }\n data[filename] = {\n \"reps\": 1,\n \"hash\": webpage_tree[run][f],\n }\n else:\n metadata[\"files\"][filename][\"reps\"] = (\n metadata[\"files\"][filename][\"reps\"] + 1\n )\n data[filename][\"reps\"] = data[filename][\"reps\"] + 1\n\n metadata[\"avg_resources_run\"] = int(\n metadata[\"avg_resources_run\"] / metadata[\"runs\"]\n )\n\n for f in data:\n if metadata[\"files\"][f][\"reps\"] >= (metadata[\"runs\"] * _REP_TRESHOLD):\n metadata[\"static_resources\"] = (\n metadata[\"static_resources\"] + metadata[\"files\"][f][\"reps\"]\n )\n else:\n metadata[\"dynamic_resources\"] = (\n metadata[\"dynamic_resources\"] + metadata[\"files\"][f][\"reps\"]\n )\n\n return metadata, data", "def _get_size(self):\n sizes = [series_episode.size for series_episode in SeriesEpisode.objects.filter(series=self)]\n return reduce(lambda x, y: x + y, sizes) if len(sizes) > 0 else 0", "def iterations_in_epoch(self):\n if self._cur_epoch_itr is not None:\n return self._cur_epoch_itr.count\n elif self._next_epoch_itr is not None:\n return self._next_epoch_itr.count\n return 0", "def Results(self):\r\n try:\r\n numOfFiles = 0\r\n file = str(filenames).split(',')\r\n for file in filenames:\r\n if os.path.exists(file):\r\n numOfFiles += 1\r\n print('%d' % numOfFiles + ' videos resized!')\r\n info = 'totaltime: ' + str(datetime.timedelta(seconds=totaltime))\r\n print(info)\r\n except NameError:\r\n info = ''\r\n print('no totaltime passed')\r\n return info", "def _get_duration(self):\n durations = [series_episode.duration for series_episode in SeriesEpisode.objects.filter(series=self)]\n return reduce(lambda x, y: x + y, durations) if len(durations) > 0 else 0", "def _analyze(self):\n for _, self.subdirs, files in os.walk(self.path):\n if self.p.sort:\n self.subdirs.sort()\n files.sort()\n for f in files:\n self._analyze_file(fileextlow(f), f)\n break # stop walk() from entering subdirectories\n\n self.p.nr_dirs += 1\n if self.lossless or self.compressed or self.videos:\n if self.lossless or self.compressed:\n if not self.images:\n if self.p.warn_covers:\n print(f\"{W}{self.path}{R}: no cover file\")\n self.p.nr_no_cover += 1\n elif not have_valid_cover_name(self.images):\n if self.p.warn_covers:\n print(f\"{W}{self.path}{R}: wrong cover names\")\n self.p.nr_wrong_cover_name += 1\n if self.lossless:\n if self.compressed:\n self.p.nr_mixed_lossless_compressed += 1\n else:\n self.p.nr_lossless_dirs += 1\n\n if self.cue:\n if not self.lossless:\n if self.p.warn_cue:\n print(f\"{W}{self.path}{R}: cue but no lossless files\")\n self.p.nr_lossy_cue += 1\n elif not self.compressed:\n if len(self.cue) == 1:\n self.p.nr_cue += 1\n else:\n if self.p.warn_cue:\n print(f\"{W}{self.path}{R}: {len(self.cue)} cue files\")\n self.p.nr_multiple_cue += 1\n\n self.p.nr_media_dirs += 1\n self.p.nr_lossless += len(self.lossless)\n self.p.nr_compressed += len(self.compressed)\n self.p.nr_video_files += len(self.videos)\n self.p.nr_ignored += self.ignored\n self.p.unknown.update(self.unknown)\n else:\n if self.images and not self.subdirs:\n self.p.nr_only_images += 1\n else:\n self.p.nr_no_media_dirs += 1", "def get_number_of_measurement(self):\n num_of_meas = 0\n for time in self.mdvtc.keys():\n num_of_meas = num_of_meas + self.mdvtc[time].get_number_of_measurement()\n #\n return num_of_meas", "def number_episodes_with_special_guest(self) -> int:\n return len([ep for ep in self.entries if SPECIAL_GUEST in ep.summary])", "def day_03_b() -> int:\n instructions = read_instructions('aoc/aoc2015/input/03A.txt')\n santa_instructions = ''.join(w for i, w in enumerate(instructions) if is_odd(i))\n robo_instructions = ''.join(w for i, w in enumerate(instructions) if is_even(i))\n\n santa_houses = get_visited_houses(santa_instructions)\n robo_houses = get_visited_houses(robo_instructions)\n\n return len(merge_dicts(santa_houses, robo_houses))", "def part2(fname: dict) -> int:\n return sum(len(set.intersection(*[set(pax) for pax in group])) for group in get_data(fname))", "def get_num_words_spoken_by_character_per_episode(content):\n d = defaultdict(Counter)\n reader_list = csv.DictReader(content.splitlines())\n for row in reader_list:\n words = row['Line'].strip().split()\n d[row['Character']][row['Episode']] += len(words)\n return d", "def countDataSize(self,filename):\n \n d = h5py.File(filename,'r')\n features = d['spectrometer/features'][:]\n select = self.selectData(features.astype(float), self.ifeature, d)\n N = len(features[select])\n d.close()\n\n N = (N//self.offsetLen) * self.offsetLen\n\n N = N*self.Nfeeds\n\n self.chunks += [[int(self.Nsamples), int(self.Nsamples+N)]]\n self.datasizes += [int(N/self.Nfeeds)]\n self.Nsamples += int(N)", "def per_seq_dot_files(self):\n # The output directory #\n directory = DirectoryPath(self.a.out_dir+'per_seq_ontology/')\n directory.create_if_not_exists()\n # Main loop #\n for seq in self.a.seq_to_counts:\n dot_path = directory + seq + '.dot'\n pdf_path = directory + seq + '.pdf'\n counts = self.a.seq_to_counts[seq]\n counts = {\"ENVO:%08d\"%k:v for k,v in counts.items()}\n total = sum(counts.values())\n counts = {k:v/total for k,v in counts.items()}\n envos = counts.keys()\n graph = self.a.ontology.get_subgraph(envos)\n graph = self.a.ontology.add_weights(graph, counts)\n graph = self.a.ontology.add_style(graph)\n self.a.ontology.write_to_dot(graph, dot_path)\n self.a.ontology.add_legend(dot_path)\n self.a.ontology.draw_to_pdf(dot_path, pdf_path)", "def count_total(self):\n total = 0\n rpk_total = 0.0\n with open(self.filename, 'rU') as my_htseq:\n for line in my_htseq:\n if '_' not in line:\n line = line.rstrip('\\n').split('\\t')\n ensg_id = line[0]\n gene_len = len(set(self.gtf.gene_coords[ensg_id])) / 1000.0\n count = int(line[1])\n total += count\n rpk_total += float(count/gene_len)\n return total, rpk_total", "def get_total_link_num(th_object, start, end, filename, path):\n ln = th_object.get_link_nums(start, end, filename)\n prev = \"\"\n with open(path, 'w') as f:\n f.write(\"Time,No_Link\\n\")\n for k, v in ln.items():\n tmp = str(k.time()).split('.')[0]\n if prev != tmp:\n f.write(tmp + \",\" + str(v) + \"\\n\")\n prev = tmp", "def total_hpwl(file_name):\r\n\r\n nodes = {}\r\n netsx = {}\r\n netsy = {}\r\n counter = 0\r\n hpwl = 0\r\n\r\n with open(file_name + \".nodes\") as f:\r\n for i, line in enumerate(f):\r\n\r\n line = line.strip()\r\n if line:\r\n if re.match(r'[a-z]{1}[0-9]+', line.split()[0]):\r\n if line.split()[0] not in nodes:\r\n nodes[line.split()[0]] = []\r\n nodes[line.split()[0]].append(line.split()[1])\r\n nodes[line.split()[0]].append(line.split()[2])\r\n\r\n with open(file_name + \".pl\") as f:\r\n for i, line in enumerate(f):\r\n\r\n line = line.strip()\r\n if line:\r\n if re.match(r'[a-z]{1}[0-9]+', line.split()[0]):\r\n nodes[line.split()[0]].append(line.split()[1])\r\n nodes[line.split()[0]].append(line.split()[2])\r\n\r\n with open(file_name + \".nets\") as f:\r\n for i, line in enumerate(f):\r\n\r\n line = line.strip()\r\n if line:\r\n if \"NetDegree\" in line:\r\n num_of_nodes = int(line.split()[2])\r\n net_name = \"n\" + str(counter)\r\n counter += 1\r\n netsx[net_name] = []\r\n netsy[net_name] = []\r\n elif re.match(r'[a-z]{1}[0-9]+', line.split()[0]):\r\n if net_name in netsx:\r\n if len(netsx[net_name]) == 0:\r\n netsx[net_name].append(int(nodes[line.split()[0]][2]))\r\n netsx[net_name].append(int(nodes[line.split()[0]][2]) + int(nodes[line.split()[0]][0]))\r\n\r\n netsy[net_name].append(int(nodes[line.split()[0]][3]))\r\n netsy[net_name].append(int(nodes[line.split()[0]][3]) + int(nodes[line.split()[0]][1]))\r\n else:\r\n if int(nodes[line.split()[0]][2]) < netsx[net_name][0]:\r\n netsx[net_name][0] = int(nodes[line.split()[0]][2])\r\n\r\n if int(nodes[line.split()[0]][2]) + int(nodes[line.split()[0]][0]) > netsx[net_name][1]:\r\n netsx[net_name][1] = int(nodes[line.split()[0]][2]) + int(nodes[line.split()[0]][0])\r\n\r\n if int(nodes[line.split()[0]][3]) < netsy[net_name][0]:\r\n netsy[net_name][0] = int(nodes[line.split()[0]][3])\r\n\r\n if int(nodes[line.split()[0]][3]) + int(nodes[line.split()[0]][1]) > netsy[net_name][1]:\r\n netsy[net_name][1] = int(nodes[line.split()[0]][3]) + int(nodes[line.split()[0]][1])\r\n\r\n for net in netsx:\r\n hpwl += float(netsx[net][1] - netsx[net][0] + netsy[net][1] - netsy[net][0])\r\n\r\n return (hpwl)", "def pass1(self, verbose):\n \n for root, dirs, files in os.walk(self.dir_to_check, topdown=False):\n t_size = 0\n for f in files:\n new_f = os.path.join(root,f) #complete path in case of homonyms\n size = os.path.getsize(new_f)\n t_size += size\n self.cache[new_f] = HumanReadableSize(size)\n t_size += sum ([self.cache[os.path.join(root,d)].val for d in dirs])\n self.cache[root] = HumanReadableSize(t_size)\n if verbose:\n print ('.................... Computing size of {}!'.format(root))\n \n #print (self.cache) #debugging", "def repetitions_analysis(tree, selector=None):\n\n metadata = {}\n data = {}\n for extension_dir in tree:\n if selector:\n if extension_dir not in selector:\n continue\n extension = extension_dir.split(os.path.sep)[-1]\n metadata[extension] = {}\n data[extension] = {}\n for webpage_dir in tree[extension_dir]:\n webpage = webpage_dir.split(os.path.sep)[-1]\n metadata[extension][webpage], data[extension][webpage] = _repetitions(\n tree[extension_dir][webpage_dir]\n )\n return metadata, data", "def episodeNumber(self):\n return self.index", "def ingsuffix(self):\n file = self.read1()\n count = 0\n for line in file:\n line = line.strip()\n string = re.sub(\"[^0-9a-zA-Z]\", \" \", line).split(\" \")\n for s_i in string:\n if s_i.endswith(\"ing\"):\n count = count + 1\n self.print(count)\n logging.debug(\"Starting with to\")\n return count", "def episode_step(self):\n self.nsteps += 1", "def correct_counts():\n articles = mongo.db[app.config['ARTICLES_COLLECTION']]\n monitors = mongo.db[app.config['MONITORS_COLLECTION']]\n unique = articles.distinct('feed_source', dict())\n for link in unique:\n count = articles.count({'feed_source': link})\n monitors.update({'metadata.rss_link': link}, {'$set': {'hits': count}})", "def _get_observation_count(self):\n observation_count = 0\n for sequence in self.seq_list:\n observation_count += sequence.shape[0] \n \n return observation_count", "def count_data_group_components( data_group: Path,\n data_extensions: list,\n report_extensions: list,\n script_extensions: list,\n ):\n\n element_count ={\n 'data':0,\n 'report':0,\n 'script':0\n }\n\n # For each Raw data file extract count the number of each data elements it has\n for fil in data_group.iterdir():\n if not fil.name.startswith('.'):\n if fil.suffix in data_extensions:\n element_count['data'] += 1\n if fil.suffix in report_extensions:\n element_count['report'] += 1\n if fil.suffix in script_extensions:\n element_count['script'] += 1\n\n return element_count", "def checkSum():\n val = 0\n for ext in EXTENSION_GLOBS:\n for f in glob.glob (ext):\n stats = os.stat(f)\n val += stats[stat.ST_SIZE] + stats[stat.ST_MTIME]\n return val", "def get_number_watched(self):\n movies_watched = 0\n for movie in self.movies:\n if movie.is_watched:\n movies_watched += 1\n return movies_watched", "def count_timepoints(sc, session, files):\n tuples = zip(range(len(files)), files)\n files_sc = sc.parallelize(tuples)\n\n def count_planes(kv):\n index, path2 = kv\n try:\n from ScanImageTiffReader import ScanImageTiffReader\n img = ScanImageTiffReader(path2).data()\n except Exception:\n import tifffile\n img = tifffile.imread(path2)\n return img.shape[0]\n\n data2 = files_sc.map(count_planes).collect()\n frame_numbers = np.array(data2)\n vol_numbers = frame_numbers / len(session.fieldMask)\n return vol_numbers.astype(int)", "def get_folder_total(path):\n files = os.listdir(path)\n pythonfiles = ['%s/%s' % (path, filename) for filename in files if filename[-3:] == '.py']\n total = { 'net': 0, 'total': 0, 'nonblank': 0, 'num_inputs':0 }\n for filename in pythonfiles:\n with open(filename, 'r') as thisfile:\n blob = thisfile.read()\n # print filename\n thisloc = loc(blob)\n for k, v in thisloc.items():\n total[k] += v\n return total", "def n_file(self):\n self.assert_is_dir_and_exists()\n n = 0\n for _ in self.select_file(recursive=True):\n n += 1\n return n", "def load_data(fout, dir_in, run):\n\n path = dir_in + '/' + run + '/kdst/'\n files_all = [path + f for f in os.listdir(path) \\\n if os.path.isfile( os.path.join(path, f) )]\n dst = load_dsts(files_all, \"DST\", \"Events\")\n time_run = dst.time.mean()\n\n # count number of number of unique entries\n unique_events = ~dst.event.duplicated()\n #unique_events = dst.event.nunique()\n nunique_events = dst.event.nunique()\n\n #print(nunique_events)\n\n num_of_S2s = np.size (unique_events)\n num_of_evts = np.count_nonzero(unique_events)\n\n print(num_of_evts)\n fout.write(f\"dst_entries {str(len(dst))}\\n\")\n fout.write(f\"time_run {time_run}\\n\")\n fout.write(f\"s2_tot {num_of_S2s}\\n\")\n fout.write(f\"evt_tot {num_of_evts}\\n\")\n\n # compute number of s1 and s2\n df = dst[~dst.time.duplicated()]\n tot_ev = df.event.nunique()\n s1_num = df.nS1.values\n s2_num = df.nS2.values\n fout.write(f\"num_of_ev_check {tot_ev}\\n\")\n\n s1_0 = np.count_nonzero(s1_num == 0)\n s1_1 = np.count_nonzero(s1_num == 1)\n s1_2 = np.count_nonzero(s1_num == 2)\n s1_3 = np.count_nonzero(s1_num == 3)\n s1_4 = np.count_nonzero(s1_num == 4)\n s1_5 = np.count_nonzero(s1_num == 5)\n s1_6 = np.count_nonzero(s1_num == 6)\n\n s2_0 = np.count_nonzero(s2_num == 0)\n s2_1 = np.count_nonzero(s2_num == 1)\n s2_2 = np.count_nonzero(s2_num == 2)\n s2_3 = np.count_nonzero(s2_num == 3)\n s2_4 = np.count_nonzero(s2_num == 4)\n s2_5 = np.count_nonzero(s2_num == 5)\n s2_6 = np.count_nonzero(s2_num == 6)\n s2_7 = np.count_nonzero(s2_num == 7)\n s2_8 = np.count_nonzero(s2_num == 8)\n\n fout.write(f'eff_0s1 {s1_0 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_0s1_u {error_eff(tot_ev, s1_0 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_1s1 {s1_1 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_1s1_u {error_eff(tot_ev, s1_1 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_2s1 {s1_2 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_2s1_u {error_eff(tot_ev, s1_2 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_3s1 {s1_3 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_3s1_u {error_eff(tot_ev, s1_3 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_4s1 {s1_4 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_4s1_u {error_eff(tot_ev, s1_4 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_5s1 {s1_5 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_5s1_u {error_eff(tot_ev, s1_5 /tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_6s1 {s1_6 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_6s1_u {error_eff(tot_ev, s1_6 /tot_ev)*100:.5f}\\n')\n\n# s2 eff\n fout.write(f'eff_0s2 {s2_0 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_0s2_u {error_eff(tot_ev, s2_0/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_1s2 {s2_1 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_1s2_u {error_eff(tot_ev, s2_1/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_2s2 {s2_2 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_2s2_u {error_eff(tot_ev, s2_2/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_3s2 {s2_3 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_3s2_u {error_eff(tot_ev, s2_3/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_4s2 {s2_4 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_4s2_u {error_eff(tot_ev, s2_4/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_5s2 {s2_5 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_5s2_u {error_eff(tot_ev, s2_5/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_6s2 {s2_6 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_6s2_u {error_eff(tot_ev, s2_6/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_7s2 {s2_7 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_7s2_u {error_eff(tot_ev, s2_7/tot_ev)*100:.5f}\\n')\n\n fout.write(f'eff_8s2 {s2_8 /tot_ev*100:.5f}\\n')\n fout.write(f'eff_8s2_u {error_eff(tot_ev, s2_8/tot_ev)*100:.5f}\\n')\n\n\n return dst", "def doCountTask(filename):\n f = open(filename)\n dataDict = json.load(f)\n weridCount = 0\n unweridCount = 0\n for key in dataDict:\n if dataDict[key][\"weird\"]:\n weridCount += 1\n else:\n unweridCount += 1\n return [unweridCount, weridCount]", "def __find_session_num():\n\n # figure out number of sessions that have already been recorded\n for (root, dirs, files) in os.walk(CONFIG['recording_path'], topdown=True):\n nextSession = 1\n\n for d in dirs:\n try:\n\n if int(d.split('Session')[1]) >= nextSession:\n nextSession = int(d.split('Session')[1]) + 1\n\n except ValueError as verr:\n print('Directory ' + str(d) + ' does not end in a number!')\n\n if nextSession > 99:\n return -1\n return nextSession", "def _adjacent_blob_size(self, pos, board, visited) -> int:\n col, row = pos[0], pos[1]\n total = 0\n total += self._undiscovered_blob_size((col - 1, row), board, visited)\n total += self._undiscovered_blob_size((col, row - 1), board, visited)\n total += self._undiscovered_blob_size((col + 1, row), board, visited)\n total += self._undiscovered_blob_size((col, row + 1), board, visited)\n return total", "def count(train_dir):\r\n path = train_dir\r\n count = 0\r\n for fn in os.listdir(path): #fn 表示的是文件名\r\n count = count + 1\r\n return count", "def count(self):\n count = 0\n # get list of intermediate directories\n dirs = []\n self.__get_list_of_interm_dirs(dirs)\n # count elements in sub-directories\n for name in dirs:\n for element in os.listdir('%s/%s' % (self.path, name)):\n if _ELEMENT_REGEXP.match(element):\n count += 1\n return count", "def resultCounter(detections):\n counter = 0\n for attribute, value in classIterator(detections):\n if 'crease' in attribute:\n counter += len(value)\n return counter", "def _get_fsevent_image_files(self):\r\n # Print the header columns to the output file\r\n Output.print_columns(self.l_all_fsevents)\r\n \r\n scan_path_spec = None\r\n scanner = source_scanner.SourceScanner()\r\n scan_context = source_scanner.SourceScannerContext()\r\n scan_context.OpenSourcePath(self.meta['source'])\r\n\r\n scanner.Scan(\r\n scan_context,\r\n scan_path_spec=scan_path_spec\r\n )\r\n\r\n for file_system_path_spec, file_system_scan_node in scan_context._file_system_scan_nodes.items():\r\n t_files = 0\r\n self.all_files_count = 0\r\n self.error_file_count = 0\r\n self.all_records_count = 0\r\n self.parsed_file_count = 0\r\n \r\n try:\r\n location = file_system_path_spec.parent.location\r\n except:\r\n location = file_system_path_spec.location\r\n \r\n print(\" Processing Volume {}.\\n\".format(location))\r\n\r\n fs_event_path_spec = path_spec_factory.Factory.NewPathSpec(\r\n file_system_path_spec.type_indicator,\r\n parent=file_system_path_spec.parent,\r\n location=\"/.fseventsd\"\r\n )\r\n\r\n file_entry = resolver.Resolver.OpenFileEntry(\r\n fs_event_path_spec\r\n )\r\n \r\n if file_entry != None:\r\n\r\n t_files = file_entry.number_of_sub_file_entries\r\n for sub_file_entry in file_entry.sub_file_entries:\r\n if sub_file_entry.name == 'fseventsd-uuid':\r\n t_files -= 1\r\n\r\n self.time_range_src_mod = []\r\n prev_mod_date = \"Unknown\"\r\n prev_last_wd = 0\r\n c_last_wd = 0\r\n counter = 0\r\n\r\n # Uses file mod dates to generate time ranges by default unless\r\n # files are carved or mod dates lost due to exporting\r\n self.use_file_mod_dates = True\r\n\r\n # Iterate through each file in supplied fsevents dir\r\n for sub_file_entry in file_entry.sub_file_entries:\r\n if sub_file_entry.name == 'fseventsd-uuid':\r\n continue\r\n # Variables\r\n counter += 1\r\n self.all_files_count += 1\r\n\r\n # Call the progress bar which shows parsing stats\r\n progress(counter, t_files)\r\n\r\n buf = \"\"\r\n\r\n # Name of source fsevent file\r\n self.src_filename = sub_file_entry.name\r\n self.src_fullpath = self.meta['source'] + \": \" + location + sub_file_entry.path_spec.location\r\n\r\n stat_object = sub_file_entry.GetStat()\r\n\r\n # UTC mod date of source fsevent file\r\n self.m_time = datetime.datetime.fromtimestamp(\r\n stat_object.mtime).strftime(\r\n '%Y-%m-%d %H:%M:%S') + \" [UTC]\"\r\n\r\n # Regex to match against source fsevent log filename\r\n regexp = re.compile(r'^.*[\\][0-9a-fA-F]{16}$')\r\n\r\n # Test to see if fsevent file name matches naming standard\r\n # if not, assume this is a carved gzip\r\n if len(self.src_filename) == 16 and regexp.search(self.src_filename) is not None:\r\n c_last_wd = int(self.src_filename, 16)\r\n self.time_range_src_mod = prev_last_wd, c_last_wd, prev_mod_date, self.m_time\r\n self.is_carved_gzip = False\r\n else:\r\n self.is_carved_gzip = True\r\n file_object = sub_file_entry.GetFileObject()\r\n\r\n compressedFile = io.StringIO.BytesIO()\r\n compressedFile.write(file_object.read())\r\n compressedFile.seek(0)\r\n # Attempt to decompress the fsevent archive\r\n try:\r\n with self.skip_gzip_check():\r\n self.files = gzip.GzipFile(fileobj=compressedFile, mode='rb')\r\n buf = self.files.read()\r\n\r\n except Exception as exp:\r\n self.logfile.write(\r\n \"%s\\tError: Error while decompressing FSEvents file.%s\\n\" % (\r\n self.src_filename,\r\n str(exp)\r\n )\r\n )\r\n self.error_file_count += 1\r\n continue\r\n\r\n # If decompress is success, check for DLS headers in the current file\r\n dls_chk = FSEventHandler.dls_header_search(self, buf, self.src_filename)\r\n\r\n # If check for DLS returns false, write information to logfile\r\n if dls_chk is False:\r\n self.logfile.write('%s\\tInfo: DLS Header Check Failed. Unable to find a '\r\n 'DLS header. Unable to parse File.\\n' % (self.src_filename))\r\n # Continue to the next file in the fsevents directory\r\n self.error_file_count += 1\r\n continue\r\n\r\n self.parsed_file_count += 1\r\n\r\n # Accounts for fsevent files that get flushed to disk\r\n # at the same time. Usually the result of a shutdown\r\n # or unmount\r\n if not self.is_carved_gzip and self.use_file_mod_dates:\r\n prev_mod_date = self.m_time\r\n prev_last_wd = int(self.src_filename, 16)\r\n\r\n # If DLSs were found, pass the decompressed file to be parsed\r\n FSEventHandler.parse(self, buf)\r\n \r\n else:\r\n print('Unable to process volume or no fsevent files found')\r\n continue\r\n\r\n print('\\n\\n All Files Attempted: {}\\n All Parsed Files: {}\\n Files '\r\n 'with Errors: {}\\n All Records Parsed: {}'.format(\r\n self.all_files_count,\r\n self.parsed_file_count,\r\n self.error_file_count,\r\n self.all_records_count))", "def main_func(sources):\n art_count = 0\n word_count = 0\n for source in sources:\n titles = get_articles(source)\n art_count += len(titles)\n word_count += count_word('trump', titles)\n\n return (word_count, art_count)", "def size(**kwargs):\n mpath = kwargs['path']\n if not os.path.exists(mpath):\n print(\"Invalid path\")\n sys.exit(-1)\n\n # Basic Counter variables\n foldercount = 0\n count = 0\n\n # List containing the collected information\n elist = []\n\n # Indices for the 2 dimensional list\n iext = 0\n icount = 1\n icsums = 2\n imins = 3\n imaxs = 4\n\n start_depth = len(mpath.split('/')) - 2\n depth = 0\n\n for root, dirs, files in os.walk(mpath, topdown=True):\n\n indircount = 0\n for name in files:\n pathfile = os.path.join(root, name)\n indircount += 1\n # Extension\n ext = (os.path.splitext(name)[1]).lower()[1:]\n if ext == '': ext = 'no ext'\n # Size\n size = os.stat(pathfile).st_size\n\n # Folder depth\n cdepth = len(os.path.abspath(pathfile).split('/')) - start_depth\n if depth < cdepth: depth = cdepth\n\n # Getting the index of the current file extension using python built-in functions\n try:\n index = list(zip(*elist))[iext].index(ext)\n except IndexError:\n # The list is empty\n index = -1\n except ValueError:\n # The list doesn't contain the extension\n index = -1\n\n if index >= 0:\n elist[index][icount] += 1\n elist[index][icsums] += size\n if size < elist[index][imins]: elist[index][imins] = size\n if size > elist[index][imaxs]: elist[index][imaxs] = size\n\n else: # Adding the new extension in the list\n elist.append([ext, 1, size, size, size])\n count += indircount\n\n # Updating the directory count\n for name in dirs:\n foldercount += 1\n\n # Mapping arguments with indices in the list\n dict = {\n 'ext': iext,\n 'count': icount,\n 'size': icsums\n }\n\n # Sorting the list\n elist.sort(key=lambda x: x[dict.get(kwargs['sort'])], reverse=not kwargs['asc'])\n\n print(\"%d files in %d folders max depth: %s\\n\" % (count, foldercount, depth))\n if kwargs['human']:\n print(f\"{'Ext.':<8}{'Count':<13}{'Total':<10}{'Min':<11}{'Max':<13}{'Avg':<9}\")\n for l in elist:\n print(f\"{l[iext]:<7} {l[icount]:<12,d} {sizeformat(l[icsums]):<9} {sizeformat(l[imins]):<10} \\\n{sizeformat(l[imaxs]):<12} {sizeformat(l[icsums] / l[icount]):<9}\")\n else:\n print(f\"{'Ext.':<8}{'Count':<13}{'Total':<13}{'Min':<13}{'Max':<13}{'Avg':<2}\")\n for l in elist:\n print(f\"{l[iext]:<7} {l[icount]:<12,d} {l[icsums]:<12} {l[imins]:<12} {l[imaxs]:<12} \\\n{int(round(l[icsums] / l[icount], 0)):<12}\")", "def apply_to_all_files(basedir,func=lambda x: x,ext='.h5'):\n cnt = 0\n # iterate over all files in all subdirectories\n for root, dirs, files in os.walk(basedir):\n files = glob.glob(os.path.join(root,'*'+ext))\n # count files\n cnt += len(files)\n # apply function to all files\n for f in files :\n func(f)\n \n# if cnt > 2000:\n# break\n \n return cnt", "def compute_score(self, n_episodes=5):\n\n # Score is computed via aggregate over multiple episodes\n score = 0\n\n for _ in range(n_episodes):\n score += play_episode(self.model, self.env)\n\n return score / n_episodes", "def number_of_files_per_node(files, number_of_nodes):\n\n files_per_node = float(len(files))/float(number_of_nodes)\n if files_per_node > 0.:\n return int(math.floor(files_per_node))\n else:\n return int(math.ceil(files_per_node))", "def filesInSeries_determine():\n def du(path):\n \"\"\"disk usage in human readable format (e.g. '2,1GB')\"\"\"\n return subprocess.check_output(['du','-sh', path]).split()[0].decode('utf-8')\n\n def duRaw(path):\n root = Path(path)\n return sum(f.stat().st_size for f in root.glob('**/*') if f.is_file())\n\n series_uid = self.processDicomField(dcm_info, \"SeriesInstanceUID\")\n str_seriesMapFile = os.path.join(self.series_mapDir, '%s.json' % series_uid)\n\n try:\n with open(str_seriesMapFile, 'r') as f:\n d_seriesInfo = json.load(f)\n str_path = d_seriesInfo[series_uid]\n fileCount = len([n for n in os.listdir(str_path) \\\n if os.path.isfile(os.path.join(str_path, n))])\n str_dirSize = du(str_path)\n dirSizeRaw = duRaw(str_path)\n d_ret = {\n 'status': True,\n 'fileCount': fileCount,\n 'str_dirSize': str_dirSize,\n 'dirSizeRaw': dirSizeRaw\n }\n except:\n d_ret = {\n 'status': False,\n 'fileCount': -1,\n 'str_dirSize': \"unknown\",\n 'dirSizeRaw': -1\n }\n\n return d_ret", "def get_nb_frames_for_video(video_parts):\n train_or_test, classname, filename_no_ext, _ = video_parts\n generated_files = glob.glob(os.path.join(\"/data/niteshku001/Ravdess/data\", train_or_test, classname,\n filename_no_ext + '*.jpg'))\n return len(generated_files)", "def game_counts(n_back=20):\n all_models = gfile.Glob(os.path.join(MODELS_DIR, '*.meta'))\n model_filenames = sorted([os.path.basename(m).split('.')[0]\n for m in all_models], reverse=True)\n for m in model_filenames[:n_back]:\n games = gfile.Glob(os.path.join(SELFPLAY_DIR, m, '*.zz'))\n print(m, len(games))", "def collect_files_with_extensions(self, extension: str) -> List[str]:\n occurrences = []\n for position in os.listdir(self.directory):\n if os.path.isdir(position):\n for file in os.listdir(position):\n if os.path.isfile(os.path.join(position, file)) and file.endswith(\n extension\n ):\n occurrences.append(os.path.join(self.directory, position, file))\n return occurrences", "def get_num_of_images(self):", "def e_related_docs(self, files, storedfile=None):\n results = {}\n for f in files:\n with open(f) as df:\n daily_info = json.load(df)\n for country in daily_info:\n results.setdefault(country, {})\n for day in daily_info[country]:\n results[country].setdefault(day, 0)\n results[country][day] += daily_info[\n country][day][\"count\"]\n if storedfile:\n with open(storedfile, \"w\") as sf:\n json.dump(results, sf)\n return results", "def find_emoji_partial_multiFiles(self,bound_tuple):\n count_2_save=self.count_to_save\n save_period=count_2_save\n start=bound_tuple[0]\n limit=bound_tuple[1]\n emoji_hdf5_Info_File_address = '{}/info_{}_to_{}.hdf'.format(self.result_dir, start+1, start+limit)\n emoji_hdf5_Mat_File_address = '{}/matrix_{}_to_{}.hdf'.format(self.result_dir, start+1, start+limit)\n trace_working_file = '{}/taceWorking_{}_to_{}.txt'.format(self.result_dir, start+1, start+limit)\n \n my_col=self.get_collection()\n part_DB=my_col.find().skip(start).limit(limit)\n \n emojiList=self.emoji_list\n adjMat = np.zeros((len(emojiList), len(emojiList)), dtype = int) # The matrix containing the edges\n emojiCount=np.zeros((len(emojiList)), dtype = int) # The number of emoji in the tweet dataset\n heap_mat = np.zeros((len(emojiList), len(emojiList)), dtype = int) # The matrix containing the edges\n last_emoji_netIndex=0\n df_emoji_info = pd.DataFrame()\n df_emoji_heap = pd.DataFrame()\n count_tweet=0\n count_tweet_emoji=0\n count_total_seen_emoji=0\n count_new_emoji=0\n ####------------------------------------------------------######\n ####------------------------------------------------------######\n #### This is the part that the emoji extractor works.\n #### It reads each tweet and matches teh emoji unicodes.\n #### If the emoji unicode is in the text, it will be appended to the \"mentionedTogether\" list.\n print 'Start to extract emojis.....'\n for mytweet in part_DB:\n mentionedTogether=[] ## It stores the emojis detected from the current tweet (i.e. mytweet).\n mentionedTogether_index_in_Net=[] ## It stores the index of emojis. The indeices are defined based on the emojiList.\n mentionedTogether_position_in_Text=[] ## It stores the posision of emoji in the text for future work.\n count_tweet+=1\n if 'text' in mytweet:\n #count_tweet+=1\n for emoji in emojiList:\n emoji_str=emoji.replace('\\n','')\n match_all=re.finditer(emoji_str.decode('unicode-escape'),mytweet['text'])\n for match in match_all:\n count_total_seen_emoji+=1\n mentionedTogether.append(emoji)\n mentionedTogether_index_in_Net.append(emojiList.index(emoji))\n mentionedTogether_position_in_Text.append(int(match.start()))\n emojiCount[emojiList.index(emoji)]+=1\n\n \n if len(mentionedTogether)>0:\n ## Yoiu can uncomment the followings to see the tweets detected:\n #print 'tweet #', count_tweet, ': ', mytweet['text']\n #print mentionedTogether\n #print '-----------------------------------------------------'\n ##\n count_tweet_emoji+=1\n emoji_dict=emojiFunction.create_Emoji_info_Dictionary(mytweet,mentionedTogether, mentionedTogether_index_in_Net, \n mentionedTogether_position_in_Text)## creating the dictionary of info\n df_emoji_info = df_emoji_info.append(emoji_dict, ignore_index=True)## updating dataframe for info by emoji_info dictionary\n emoji_heap_dict=emojiFunction.create_Emoji_heap_Dictionary(count_tweet, count_tweet_emoji, count_total_seen_emoji,\n count_new_emoji, mytweet['lang'])## creating the dictionary for heap\n df_emoji_heap=df_emoji_heap.append(emoji_heap_dict, ignore_index=True)## updating dataframe for heap by heap dictionary\n \n if (len(mentionedTogether)>1):####### 2 Mentioned - If they are mentioned together they should be in this list\n #print count_tweet,': ',mentionedTogether_index_in_Net, '(NET) is/are mentioned in: ', mytweet['text']\n #print (mentionedTogether_position_in_Text, ' TEXT is/are mentioned in: ', mytweet['text'])\n adjMat=emojiFunction.update_adj_matrix(adjMat, mentionedTogether_index_in_Net, mentionedTogether_position_in_Text)\n if self.concat_tweet and count_tweet_emoji>1:\n mentionedTogether_index_in_Net.insert(0,last_emoji_netIndex)\n heap_mat=emojiFunction.update_heap_mat(heap_mat, mentionedTogether_index_in_Net)\n if len(mentionedTogether)>0:\n last_emoji_netIndex=mentionedTogether_index_in_Net.pop()\n \n if count_tweet>count_2_save:\n count_2_save+=save_period\n print 'total number of tweets: ',count_tweet, ' saving files .............'\n #print (mentionedTogether_index_in_Net, '(NET) is/are mentioned in: ', mytweet['text'])\n df_emoji_count= pd.DataFrame(data=emojiCount, index=emojiList)\n \n df_emoji_adjMatrix=pd.DataFrame(data=adjMat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList)))\n df_emoji_heapMatrix=pd.DataFrame(data=heap_mat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList)))\n \n #df_emoji_adjMatrix=pd.DataFrame(data=adjMat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList))) ## create data frame for adjacency matrix\n #df_emoji_heapMatrix=pd.DataFrame(data=heap_mat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList))) ## create dataframe for the heap matrix\n print 'Saving df_info .........'\n self.write_on_hdf(emoji_hdf5_Info_File_address, hdf_struct=df_emoji_info, hdf_key='df_info', my_mode='a')\n print 'Saving df_heap ..........'\n self.write_on_hdf(emoji_hdf5_Info_File_address, hdf_struct=df_emoji_heap, hdf_key='df_heap', my_mode='a')\n del df_emoji_info\n df_emoji_info = pd.DataFrame()\n del df_emoji_heap\n df_emoji_heap = pd.DataFrame()\n \n print 'Saving df_count .........'\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_count, hdf_key='df_count', my_mode='w')\n print 'Saving df_adjMat ..........'\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_adjMatrix, hdf_key='df_adjMat', my_mode='a')\n print 'Saving df_heapMat ..........'\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_heapMatrix, hdf_key='df_heapMat', my_mode='a') \n \n with open(trace_working_file, 'a') as the_file:\n temp='\\t'+str(count_tweet)+',\\t'+str(mytweet['created_at'])+',\\t'+str(mytweet['id'])\n the_file.write(temp)\n the_file.write('\\n')\n print 'After tweet #{}, the {}_to_{} part was saved'.format(count_tweet, start+1, start+limit)\n print 'Working on the rest........'\n if self.stop:\n break\n\n print 'Saving files of the part {}_to{} for the last time...............'.format(start+1, start+limit)\n df_emoji_count= pd.DataFrame(data=emojiCount, index=emojiList)\n df_emoji_adjMatrix=pd.DataFrame(data=adjMat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList)))\n df_emoji_heapMatrix=pd.DataFrame(data=heap_mat, index=np.arange(len(emojiList)), columns=np.arange(len(emojiList)))\n \n #df_emoji_info.to_hdf(emoji_hdf5_Mat_File_address, where='df_info, df_heap, df_count, df_adjMat, df_heapMat', mode='w')\n \n self.write_on_hdf(emoji_hdf5_Info_File_address, hdf_struct=df_emoji_info, hdf_key='df_info', my_mode='a')\n self.write_on_hdf(emoji_hdf5_Info_File_address, hdf_struct=df_emoji_heap, hdf_key='df_heap', my_mode='a')\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_count, hdf_key='df_count', my_mode='w')\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_adjMatrix, hdf_key='df_adjMat', my_mode='a')\n self.write_on_hdf(emoji_hdf5_Mat_File_address, hdf_struct=df_emoji_heapMatrix, hdf_key='df_heapMat', my_mode='a') \n\n with open(trace_working_file, 'a') as the_file:\n temp='\\t'+str(count_tweet)+',\\t'+str(mytweet['created_at'])+',\\t'+str(mytweet['id'])\n the_file.write(temp)\n the_file.write('\\n')\n print \"total emoji: \", count_total_seen_emoji\n # return {'df_emoji_info':df_emoji_info, 'df_emoji_heap':df_emoji_heap, 'df_emoji_count':df_emoji_count, 'df_emoji_adjMatrix':df_emoji_adjMatrix, 'df_emoji_heapMatrix':df_emoji_heapMatrix}", "def linesCountingAux(file_name, nProcesses):\r\n\r\n linesPerProcessesList = []\r\n\r\n with open(file_name, \"r\") as file:\r\n lineCounting = 0\r\n\r\n for line in file:\r\n lineCounting += 1 #discover the lines in the text file\r\n\r\n linesPerProcesses = lineCounting // nProcesses\r\n\r\n for number in range(nProcesses):\r\n linesPerProcessesList.append(linesPerProcesses)\r\n if sum(linesPerProcessesList) < lineCounting:\r\n for number in range (lineCounting - sum(linesPerProcessesList)):\r\n linesPerProcessesList[number] += 1\r\n\r\n return linesPerProcessesList", "def updateCounts(self):\n found = False\n fileName = \"counts\"\n if not os.access(fileName, os.F_OK):\n try:\n TFH = open(fileName, \"w\")\n TFH.close()\n except IOError as inst: # @UnusedVariable\n self.logIt(__name__ + \".updateCounts(): Unable to open \" + fileName + \" for write.\" + \" => \" + str(\n inst.errno) + \":\" + str(inst.strerror) + \"\\n\")\n raise\n\n self.logIt(__name__ + \".updateCounts(): fileName=\" + fileName + \"\\n\")\n try:\n FH = open(fileName, \"rb+\")\n # FH = posixfile.open(fileName, \"rb+\") # posixfile has been deprecated.\n # FH.lock('w|')\n data = None\n while 1:\n data = str(FH.readline())\n if data is None or data == \"\": break\n data = re.sub(\"\\n\", \"\", data)\n self.debug(__name__ + \".updateCounts(): data is \" + str(data) + \"\\n\")\n ms = str(self.msgNum) + \"=\"\n self.debug(__name__ + \".updateCounts(): ms is\" + str(ms) + \"\\n\")\n if re.search(ms, data):\n found = True\n self.debug(__name__ + \".updateCounts(): DEBUG0.5\\n\")\n break\n self.debug(__name__ + \".updateCounts(): DEBUG1\\n\")\n if data and found:\n self.debug(__name__ + \".updateCounts(): DEBUG2\\n\")\n eloc = FH.tell()\n self.debug(__name__ + \".updateCounts(): eloc=\" + str(eloc) + \"\\n\")\n sloc = eloc - len(data) - 1\n self.debug(__name__ + \".updateCounts(): sloc=\" + str(sloc) + \"\\n\")\n FH.seek(sloc, os.SEEK_SET)\n cloc = FH.tell()\n self.debug(__name__ + \".updateCounts(): cloc=\" + str(cloc) + \"\\n\")\n myList = list()\n myList = data.split('=')\n icount = int(myList[1]) + 1\n FH.write(str(self.msgNum) + \"=\" + str(icount) + \"\\n\")\n else:\n self.debug(__name__ + \".updateCounts(): DEBUG3\\n\")\n FH.write(str(self.msgNum) + \"=1\" + \"\\n\")\n FH.lock('u')\n FH.close()\n except IOError as inst: # @UnusedVariable\n pass\n # self.logIt( __name__ + \".updateCounts(): Unable to open \" + fileName + \" for write.\" + \" => \" + str( inst.errno ) + \":\" + str( inst.strerror ) + \"\\n\" )\n # Endtry", "def __wiki_counts(self):\n\n num_lines = 0\n num_valid_hyperlinks = 0\n disambiguation_ent_errors = 0\n\n print(\"Calculating Wikipedia mention/entity occurrences\")\n\n last_processed_id = -1\n exist_id_found = False\n\n wiki_anchor_files = os.listdir(\n os.path.join(self.base_url, self.wiki_version, \"basic_data/anchor_files/\")\n )\n for wiki_anchor in wiki_anchor_files:\n wiki_file = os.path.join(\n self.base_url,\n self.wiki_version,\n \"basic_data/anchor_files/\",\n wiki_anchor,\n )\n\n with open(wiki_file, \"r\", encoding=\"utf-8\") as f:\n for line in f:\n num_lines += 1\n\n if num_lines % 5000000 == 0:\n print(\n \"Processed {} lines, valid hyperlinks {}\".format(\n num_lines, num_valid_hyperlinks\n )\n )\n if '<doc id=\"' in line:\n id = int(line[line.find(\"id\") + 4 : line.find(\"url\") - 2])\n if id <= last_processed_id:\n exist_id_found = True\n continue\n else:\n exist_id_found = False\n last_processed_id = id\n else:\n if not exist_id_found:\n (\n list_hyp,\n disambiguation_ent_error,\n print_values,\n ) = self.__extract_text_and_hyp(line)\n\n disambiguation_ent_errors += disambiguation_ent_error\n\n for el in list_hyp:\n mention = el[\"mention\"]\n ent_wiki_id = el[\"ent_wikiid\"]\n\n num_valid_hyperlinks += 1\n if mention not in self.wiki_freq:\n self.wiki_freq[mention] = {}\n\n if (\n ent_wiki_id\n in self.wikipedia.wiki_id_name_map[\"ent_id_to_name\"]\n ):\n if mention not in self.mention_freq:\n self.mention_freq[mention] = 0\n self.mention_freq[mention] += 1\n\n ent_name = self.wikipedia.wiki_id_name_map[\n \"ent_id_to_name\"\n ][ent_wiki_id].replace(\" \", \"_\")\n if ent_name not in self.wiki_freq[mention]:\n self.wiki_freq[mention][ent_name] = 0\n self.wiki_freq[mention][ent_name] += 1\n\n print(\n \"Done computing Wikipedia counts. Num valid hyperlinks = {}\".format(\n num_valid_hyperlinks\n )\n )", "def analyze_files(self):\n num_file = 0\n results = dict()\n try:\n list_files = os.listdir(self.directory)\n except FileNotFoundError:\n raise FileNotFoundError(\"Can't find any file\")\n else:\n for file in list_files: #looping the files in the directly\n num_file += 1\n if file.endswith(\".py\"): # Looking for files that end with .py\n try:\n fp = open(os.path.join(self.directory, file), \"r\")\n except FileNotFoundError:\n raise FileNotFoundError(f\"Can't open file no {num_file}\")\n else:\n with fp:\n c_total = 0 #Total length of Characters for the entire file\n filename = file # Storing the file name\n t_line = 0 # Getting the total number of line\n t_def = 0 #Getting the total number of functions\n t_class = 0 #Getting the total number of classes\n \n for line in fp:\n t_line += 1 # Counting each line\n t_char = len(line) #Length of characters for each line\n n_line = line.strip() # gets rid of white spaces and new lines\n c_total += t_char # adding each total char in line to the pervious total char in line\n if n_line.startswith(\"def \"): \n t_def += 1 \n elif n_line.startswith(\"class \"):\n t_class += 1\n results[filename] = {'class': t_class, 'function': t_def, 'line': t_line, 'char': c_total }\n return results", "def get_nb_frames_for_video(video_parts):\n filename_no_ext, _ = video_parts\n generated_files = glob.glob(os.path.join(output_dir, filename_no_ext + '*.jpg'))\n return len(generated_files)", "def __len__(self) -> int:\n return len(self.episodes)", "def dailyanalysis(experiment):\n import os\n for fn in os.listdir('/network/aopp/hera/mad/bakerh/fms_tmp/' +\n experiment):\n if fn.find('exe.fms') == -1 and fn.find('mppnccombine.ifc') == -1:\n storedaily('/network/aopp/hera/mad/bakerh/fms_tmp/' + experiment +\n '/' + fn + '/combine/',\n '/network/aopp/hera/mad/bakerh/data/FMS/output/' +\n experiment + '/' + fn + '/history/')\n print('Completed ' + fn)", "def get_analyze_per_file(self):\n \"\"\"Exclude tags, exclude binary (img), count words without non literal characters and digits\"\"\"\n filename = f'{self.path}/{self.filename}'\n file = open(filename, 'r', encoding='utf-8')\n df_tmp = pd.DataFrame(columns=['word', 'cnt', 'word_low'])\n w_cnt = 0\n word_counter = {}\n data = file.read()\n head, sep, tail = data.partition('<binary')\n head = re.sub('\\\\s\\\\s*', ' ', (re.sub('\\\\W|\\\\d', ' ', re.sub('<.*?>', '', head))))\n word_list = head.split()\n for word in word_list:\n\n if word not in word_counter:\n word_counter[word] = 1\n else:\n word_counter[word] = word_counter[word] + 1\n w_cnt += 1\n\n for word, occurance in word_counter.items():\n df_tmp = df_tmp.append({'word': '{:15}'.format(word), 'cnt': '{:3}'.format(occurance),\n 'word_low': '{:15}'.format(word).lower()}, ignore_index=True)\n df_tmp = df_tmp.sort_values(by='word_low')\n df_tmp.loc[(df_tmp.word != df_tmp.word_low), 'word'] = df_tmp.cnt\n df_tmp.loc[(df_tmp.word == df_tmp.cnt), 'cnt'] = 0\n df_tmp.loc[(df_tmp.word == df_tmp.word_low), 'word'] = 0\n df_tmp['word'] = df_tmp.word.astype(int)\n df_tmp['cnt'] = df_tmp.cnt.astype(int)\n df_tmp = df_tmp.groupby(['word_low'])['cnt', 'word'].sum().reset_index()\n conn = sqlite3.connect('for_python_ht.db')\n try:\n try:\n sqlite_for_ht.CreateTableSingle.delete_table(f_3, self.filename)\n print(datetime.now(), '-', self.filename, 'Table deleted at the start point')\n except Exception:\n print(datetime.now(), '-', 'Something went wrong')\n traceback.print_exc()\n df_tmp.to_sql(name=self.filename, con=conn, index=False)\n print(datetime.now(), '-', self.filename, 'Table created and filled with data')\n except Exception:\n print(datetime.now(), '-', 'file with name {} already exists'.format(self.filename))\n traceback.print_exc()\n print(datetime.now(), '-', 'word analyse for', self.filename, 'done')\n sqlite_for_ht.HandleTemp.update_table(f_2, 'status', 'Done', self.filename)\n return None", "def __len__(self):\n return self._num_samples_per_file * len(self._files) // self._world_size", "def find_dimesion(filename):\n file = open(filename,\"r\")\n\n line = file.readline()\n file.close()\n return len(line.split())", "def get_num_words_spoken_by_character_per_episode(content):\n content = list(csv.reader(content.splitlines(), delimiter=','))\n characters = [name[2] for name in content]\n characters = list(dict.fromkeys(characters))\n del characters[0]\n res = defaultdict()\n for character in characters:\n episode = 1\n dic = {}\n count = 0\n for row in content: \n if row[2] == character:\n if str(episode) == row[1]:\n count += len(row[3].split())\n else:\n dic[str(episode)] = count\n episode = int(row[1])\n count = len(row[3].split())\n if '13' not in dic.keys():\n dic['13'] = count \n dic = Counter(dic)\n res[character] = dic\n return res", "def countgenes():\n directory = openfile('db_directory.txt')\n no_genes_file = directory+'GENES_IN_HPO.txt'\n GENES_IN_HPO = openfile(no_genes_file)\n #GENES_IN_HPO = openfile(numbergenes_file)\n return int(GENES_IN_HPO)", "def test_file_count(self, audio_store_and_expected_files):\n audio_store = audio_store_and_expected_files[0]\n expected_files = audio_store_and_expected_files[1]\n\n # Check number of files.\n assert audio_store.file_count == expected_files", "def parse_anime_episode(filename):\n print_info('Extracting episode from {0}'.format(filename))\n for regex in ANIME_EPISODE_NUM_REGEXS:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_ep = m.group('Episode')\n print_info('Extracted episode: {0}'.format(extracted_ep))\n\n ep_num = int(extracted_ep)\n if ep_num is not None and ep_num > 0:\n print_info('Episode might be: {0}'.format(ep_num))\n return 'E' + format_num(ep_num)\n\n return None", "def count_waters(self):\n n = 0\n for frag in self.iter_waters():\n n += 1\n return n", "def count(self):\n\n paths = 0\n for task in self.tasks:\n if not task or type(task) != dict:\n continue\n\n for k, v in task.items():\n\n if not k or not v:\n continue\n\n if k in ANSIBLE_MODULES_LIST and type(v) == dict:\n paths += sum(map(lambda x: x == 'path' or x == 'src' or x == 'dest', v))\n\n return paths", "def __find_dupe_size(self):\n sortedList = sorted(self.fileList, key=lambda file: file[0])\n lastSizeCaptured = 0\n file_count = 0\n total_count = len(sortedList)\n if total_count > 0:\n (curSize, curFilename, curIno) = sortedList[0]\n for size, filename, ino in sortedList[1:]:\n if (curSize == size):\n if (lastSizeCaptured != curSize):\n self.dupeSizeList.append((curSize, self.__md5_for_file(curFilename,10), curFilename, curIno))\n self.dupeSizeList.append((size, self.__md5_for_file(filename,10), filename, ino))\n lastSizeCaptured = curSize\n (curSize, curFilename, curIno) = (size, filename, ino)\n file_count += 1\n if (file_count % 100) == 0:\n print(\"Processed %s of %s files\" % (file_count, total_count))" ]
[ "0.58617145", "0.5788146", "0.5688501", "0.5621688", "0.551865", "0.5483126", "0.54120064", "0.54120064", "0.5410899", "0.5262325", "0.52515316", "0.5230109", "0.52180594", "0.52164894", "0.5211398", "0.52089113", "0.52078176", "0.5205271", "0.5205262", "0.5201948", "0.5186348", "0.51619774", "0.514821", "0.51342314", "0.5114938", "0.50808835", "0.5070604", "0.5068788", "0.5057877", "0.50557125", "0.5046763", "0.5037984", "0.5032343", "0.50106186", "0.49945655", "0.4993016", "0.4989075", "0.49659225", "0.49480578", "0.49392962", "0.49230647", "0.4917824", "0.49081498", "0.49036592", "0.4892665", "0.48836702", "0.4879712", "0.48757622", "0.48749664", "0.48686424", "0.48419037", "0.48418713", "0.4841", "0.48407125", "0.48395476", "0.48376763", "0.48330337", "0.48305032", "0.48143983", "0.4814171", "0.48102927", "0.4807968", "0.4806227", "0.48033518", "0.47983807", "0.47973806", "0.47875947", "0.4772974", "0.47722065", "0.4769911", "0.47694716", "0.47688308", "0.47662404", "0.47641316", "0.47604284", "0.47535068", "0.47505882", "0.4749529", "0.47437444", "0.47427472", "0.47343472", "0.4723451", "0.47225577", "0.4718654", "0.47183847", "0.47128996", "0.47084558", "0.47057724", "0.4704844", "0.47002622", "0.4697282", "0.4695316", "0.46923527", "0.46866718", "0.46855378", "0.46847916", "0.46817932", "0.4681627", "0.46804374", "0.46703818" ]
0.6543224
0
Retrieve the release name based on the file used during MediaInfo. If a season was specified, but an episode number was not, it presumes the release is a Pack. Hence when pack, it uses the parent folder's name as the release name.
def get_release_name(self) -> str: if self.season is not None and self.episode is None: return os.path.basename(os.path.dirname(self.file)) return os.path.splitext(os.path.basename(self.file))[0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getApplicationReleaseName(self) -> unicode:\n ...", "def title(self):\n if self.file_name is None:\n return None\n else:\n fname = os.path.split(self.file_name)[-1]\n fname, *ext = fname.rsplit('.', 1)\n procgen = ext and ext[0] in ('json', 'yaml')\n if procgen and self._seed and self._seed.spawn_key:\n # Append the spawn key as the episode number\n fname += '-e' + str(self._seed.spawn_key[-1])\n return fname", "def __get_parsed_video_file_path(season: int, episode: int) -> str:\n return rootpath.detect() + \"/\" + SAVE_FOLDER + \"s\" + str(season) + \"e\" + str(episode) + \".data\"", "def to_release_brach_name(self) -> str:\n return f\"release/{self.major}.{self.minor}\"", "def getReleaseVersion(self, workingTowerName, infixStream):\n towerInfix = iccs_apex.whatInfixIsStream(workingTowerName)\n prefixStream, postfixStream = string.split(workingTowerName, towerInfix)\n releaseVersion, postVersion = string.split(postfixStream, \"wrk\")\n releaseTowerName = infixStream + releaseVersion + \"rel\"\n \n return releaseTowerName", "def season_folder(cls, season):\r\n\r\n\t\t'''# Google Drive downloads replace these characters automatically\r\n\t\t# I'm implementing this in the code as well for convenience\r\n\t\tseason = season.replace(\"&\", \"_\")\r\n\t\tseason = season.replace(\"'\", \"_\")'''\r\n\r\n\t\t# Folder names are ANSI versions of the season name\r\n\t\t# This is important in names like \"Lé Unicorn\" which get\r\n\t\t# converted incorrectly as folder names\r\n\t\tseason = season.encode(encoding=\"utf-8\")\r\n\t\tseason = season.decode(encoding=\"cp1252\", errors=\"ignore\")\r\n\r\n\t\treturn season", "def get_package_name(self):\n return self.name + '-' + self.version + '-' + self.release", "def path(self):\n if self.filename:\n return os.path.join(self.season.path, self.filename)", "def parse_season(filename):\n print_info('Attempting to parse {0}'.format(filename))\n print_info('Extracting season from {0}'.format(filename))\n for regex in SEASON_REGEX:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_season = m.group('Season').lower()\n print_info('Extracted season: {0}'.format(extracted_season))\n\n season_num = int(extracted_season)\n if season_num is not None and season_num > 0:\n print_info('Season might be: {0}'.format(season_num))\n return 'S' + format_num(season_num)\n return 'S01'", "def _get_full_title(self):\n return \"%s - %s %d\" % (self.title, _('Season'), self.season)", "def media_series_title(self):\n if lgtv[\"pairingKey\"] == 0:\n return \"Pin not set\"\n if self._currentSourceNumber == \"0\":\n return (\"{0} - CH{1:d} - {2}\").format(self._currentSourceName, self._currentChannelNumber, self._currentChannelName)\n else:\n return \"\"", "def get_filename(self):\n return self.get_package_name() + '.' + ARCH + \".rpm\"", "def makeReleaseFileName(cls, version: str) -> str:\n\n from peek_platform import PeekPlatformConfig\n\n return os.path.join(\n PeekPlatformConfig.config.platformSoftwarePath,\n 'peek-release-%s.tar.gz' % version)", "def get_package_name(self, default=-1):\n specs = glob.glob(f'{self.distgit_repo().distgit_dir}/*.spec')\n if len(specs) != 1:\n if default != -1:\n return default\n raise IOError('Unable to find .spec file in RPM distgit: ' + self.qualified_name)\n\n spec_path = specs[0]\n with open(spec_path, mode='r', encoding='utf-8') as f:\n for line in f.readlines():\n if line.lower().startswith('name:'):\n return line[5:].strip() # Exclude \"Name:\" and then remove whitespace\n\n if default != -1:\n return default\n\n raise IOError(f'Unable to find Name: field in rpm spec: {spec_path}')", "def media_title(self):\n if lgtv[\"pairingKey\"] == 0:\n return \"Pin not set\"\n if self._currentSourceNumber == \"0\":\n return self._currentProgram\n else:\n return self._currentSourceName", "def _parse_fname_season(cmd_args):\n f_name_base = 'sfav2_CONUS_{}_to_{}'\n season_start = '093012' # Start month, day, & hour for seasonal accum\n\n date_in = adjust_date(cmd_args)\n\n if (not check_ftype(cmd_args)):\n print('{} not valid for seasonal accumulation period. Downloading as NetCDF'.format(cmd_args.f_type))\n f_type = 'nc'\n else:\n f_type = cmd_args.f_type\n\n # If we are in the new year of the winter season (i.e., Jan 2020 of the\n # 2019-2020 winter season), adjust the start year defining the winter season\n if (date_in.month < 9):\n start_yr = date_in.year - 1\n else:\n start_yr = date_in.year\n\n date_start = '{}{}'.format(start_yr, season_start)\n date_end = datetime.strftime(date_in, '%Y%m%d%H')\n\n f_name = f_name_base.format(date_start, date_end)\n\n f_name = '{}.{}'.format(f_name, f_type)\n\n return f_name", "def derive_mod_name(self):\n\n # a) if we're lucky, this is a Fomod install w/ a modname attr\n # TODO: some non-Fomod mods still include an \"info.xml\" file\n if self.has_fomod:\n fname = self.fomod.modname.name\n # fix: the fomod name often includes a version number on the end (like \"Soul Gem Things v1.4.5\")\n vmatch = _version_format.search(fname)\n if vmatch:\n fname = fname[:vmatch.start()].strip()\n\n print(\"fomod found:\")\n print(\" orig:\", self.fomod.modname.name)\n print(\" name:\", fname)\n\n # return self.fomod.modname.name\n return fname\n\n # if not, we'll have to get clever\n\n # b) if the mod includes esp/bsa/etc. files, they're often\n # labeled with the mod's \"real\" name\n bname = os.path.basename\n split = os.path.splitext\n\n # check top 2 levels\n # accumulate names\n _names = []\n ext_re = re.compile(r\".*\\.(es[pm]|bsa)$\")\n for f in filter(lambda s: ext_re.search(s.lower()),\n self.archive_files):\n # if re.search(r\".*\\.(es[pm]|bsa)$\", f.lower()):\n _names.append(split(bname(f))[0])\n\n print(f\"names from esp/bsa ({len(_names)}):\")\n for n in _names:\n print(f\" {n}\")\n\n # c) see if we can figure it out from the archive name;\n # try to ignore the version numbers\n archive_name = self.arc_path.stem\n\n # archives downloaded from the nexus generally have\n # the mod name, then a hyphen followed by the modid, then\n # (optionally) another hyphen and version info\n m = _nexus_archive_name_format.match(archive_name)\n\n if m:\n name = m['name']\n\n # TODO: if we can get the modid, we should be able to look up the mod info on the nexus...though that would of course require writing an async web-request module...\n modid = m['modid']\n ver = m['version']\n\n if name:\n # ==> eventually, this should pull the name from the nexus\n\n # sometimes there's some extra stuff like (redundant)\n # version info on the end of the name\n exm = _extra_stuff.search(name)\n if exm:\n name = name[:exm.start()]\n\n if ver:\n ver = ver.replace(\"-\", \".\")\n\n print(\"Derived from archive name:\")\n print(\" name:\", name)\n print(\" modid:\", modid)\n print(\" version:\", ver)\n return name\n\n return \"\"", "def bsw_getCurrentAssetMainFileName():\n projectShortName = ProjectNamingInputs().projectShortName\n # get asset UID from the kns_getAssetDetails function (second last return is assetUID).\n assetUID = bsw_getAssetDetails()[-2]\n if os.environ['BSW_PROJECT_TYPE'] == 'series':\n return projectShortName + '_' + assetUID.split('_')[1] + '_' + assetUID.split('_')[2] + '_' + \\\n assetUID.split('_')[-1] + '_' + assetUID.split('_')[-2] + '.ma'\n else:\n return projectShortName + '_' + assetUID.split('_')[1] + '_' + assetUID.split('_')[2] + '_' + \\\n assetUID.split('_')[-1] + '.ma'", "def GetOutputFilename(self, directory=None):\n if self.forced_filename:\n logging.debug('Forced filename or pre-computed file name = %s', self.filename)\n return self.filename\n\n tags = dict()\n\n # Base tag\n tags['base'] = f\"{self['ARTIST']} - {self['DATE_RECORDED']} - {self['TITLE']}\"\n\n # Setup version subinfo\n tags['version'] = f\" ({self['VERSION']})\" if self[\"VERSION\"] else \"\"\n\n # Setup label / release subinfo\n channels = self.channels if self.channels != '2.0' else ''\n if self[\"ORIGINAL_MEDIUM\"] == \"CD\":\n labeltag = f\"{self['LABEL']} {self['ISSUE_DATE']} {channels}\"\n else:\n labeltag = f\"{self['LABEL']} {self['ISSUE_DATE']} {self['ORIGINAL_MEDIUM']} {channels}\"\n labeltag = labeltag.strip()\n tags['label'] = labeltag and f\" ({labeltag})\"\n\n # Setup disc tag\n if self[\"PART_NUMBER\"]:\n disctag = f\" (Disc {self['PART_NUMBER']}) {self['DISC_NAME']}\"\n else:\n disctag = f\" {self['DISC_NAME']}\"\n tags['disc'] = disctag.rstrip()\n\n # Merge into filename\n filename = f\"{tags['base']}{tags['version']}{tags['disc']}{tags['label']}{ext.WAV}\"\n # Replace invalid characters with either a dash or remove them\n filename = re.compile(\"[<>:/\\\\\\\\]\").sub(\"-\", filename)\n filename = re.compile(\"[|?*]\").sub(\"\", filename)\n # Replace invalid double quotes with valid single quotes\n filename = filename.replace('\"', \"'\")\n\n if directory:\n return os.path.join(directory, filename)\n return filename", "def _prettyfilename(self):\n return f'{self.grandparentTitle} - {self.seasonEpisode} - {self.title}'", "def get_file_name(self):\n\n return \"%s - %s\" % (self.get_tags()[\"artist\"], self.get_tags()[\"title\"])", "def media_season(self):\n media_status = self._media_status()[0]\n return media_status.season if media_status else None", "def episode_title_for_tvdb(self):\n \n # strip out the year from the episode title:\n return re.sub('(Part )(?P<part>\\d+)','(\\g<part>)',self.episode_title)", "def build_album_name(album):\n try:\n name, year = album.name, album.release_year\n except AttributeError:\n name, year = album.text(0), album.text(1)\n if year:\n name += f' ({year})'\n return name", "def _get_name_constellation_specific(self) -> str:\n\n try:\n if self.is_archived:\n footprint_path = files.get_archived_path(self.path, r\".*\\.shp\")\n else:\n footprint_path = next(self.path.glob(\"*.shp\"))\n except (FileNotFoundError, StopIteration):\n raise InvalidProductError(\n \"Footprint shapefile cannot be found in the product!\"\n )\n\n # Open identifier\n name = files.get_filename(footprint_path)\n\n return name", "def _get_track_name(self, filename):\n return os.path.basename(filename)", "def get_package_repo_name(package_info):\n\n # should check that there is EXACTLY one line\n repo_lines = \\\n [line for line in package_info if line.startswith(\"From repo \")]\n\n # \"From repo : <repo name>\"\n # Get the value and remove white space.\n if len(repo_lines) > 0:\n repo_name = repo_lines[0].split(':')[1].strip()\n else:\n repo_name = None\n\n return repo_name", "def get_release_date ():\n fname = os.path.join(\"doc\", \"changelog.txt\")\n release_date = \"unknown\"\n with open(fname) as fd:\n # the release date is on the first line\n line = fd.readline()\n mo = release_ro.search(line)\n if mo:\n release_date = mo.groups(1)\n return release_date", "def get_competition_season_type(season):\n default_type = games.models.CompetitionSeason.winter\n try:\n season_name = season.name\n if season_name.find(\"/\") == -1:\n return games.models.CompetitionSeason.summer\n return default_type\n except Exception as e:\n return default_type", "def seasonEpisode(self):\n return f's{str(self.seasonNumber).zfill(2)}e{str(self.episodeNumber).zfill(2)}'", "def get_name(self) -> str:\n def _seg2():\n if self.name:\n return self.name\n else:\n try:\n return self.player.title\n except AttributeError:\n return 'No title specified'\n try:\n if self.player.title == 'translate_tts':\n return 'Speech'\n else:\n return _seg2()\n except AttributeError:\n return _seg2()", "def get_revision(name):\n #return name[-6:-4]\n base,ext = os.path.splitext(name)\n return base[-2:]", "def getSlavename():", "def filename(self):\n # Just the name of the file\n filename = self.use_name\n if self.extension:\n filename = \"{0}.{1}\".format(self.use_name, self.extension)\n # Architecture sub-folder\n arch_folder_conf = spack.config.get(\"modules:%s:arch_folder\" % self.conf.name, True)\n if arch_folder_conf:\n # include an arch specific folder between root and filename\n arch_folder = str(self.spec.architecture)\n filename = os.path.join(arch_folder, filename)\n # Return the absolute path\n return os.path.join(self.dirname(), filename)", "def media_title(self) -> str | None:\n return self._output_name", "def return_episode_num(name):\n return int(name.split(\".\")[0].split(\"ep_\")[1]) # Use split to return only the episode number needed to sort the files in increasing order", "def get_scene(videoname_):\n s = videoname_.split(\"_S_\")[-1]\n s = s.split(\"_\")[0]\n return s[:4]", "def bsw_getAssetPathFromFileName(fileName):\n projectType = ProjectNamingInputs().projectType\n assetType = {'ch': 'Character', 'pr': 'Prop', 'bg': 'Set', 'vh': 'Vehicle', 'se': 'SetElement'}\n assetDept = {'mod': 'Model', 'tex': 'Texture', 'rig': 'Rig', 'lit': 'Light'}\n astType = fileName.split('_')[1]\n astName = fileName.split('_')[2]\n versionFile = False\n if len(fileName.split('_')) == 6:\n versionFile = True\n if projectType == 'series':\n episode = fileName.split('_')[3]\n # use 3 digit after splitting because main file dept has no \"_\" after dept.\n astDept = fileName.split('_')[4][:3]\n if versionFile:\n return bsw_getAssetDeptDirs(assetType[astType], astName, episode=episode)[assetDept[astDept] + 'Version']\n return bsw_getAssetDeptDirs(assetType[astType], astName, episode=episode)[assetDept[astDept]]\n else:\n astDept = fileName.split('_')[3][:3]\n if versionFile:\n return bsw_getAssetDeptDirs(assetType[astType], astName)[assetDept[astDept] + 'Version']\n return bsw_getAssetDeptDirs(assetType[astType], astName)[assetDept[astDept]]", "def parse_volume(filename):\n print_info('Extracting volume from {0}'.format(filename))\n for regex in MANGA_VOLUME_REGEX:\n m = re.search(regex, filename)\n\n if m is None:\n continue\n\n extracted_season = m.group('Volume').lower()\n print_info('Extracted volume: {0}'.format(extracted_season))\n\n season_num = int(extracted_season)\n if season_num is not None and season_num > 0:\n print_info('Season might be: {0}'.format(season_num))\n return 'Volume ' + format_num(season_num)\n return 'Volume 0'", "def getPlayerFilename(self):\n if (self.__playerName != \"???\"):\n return self.__filename\n else:\n return \"\"", "def get_version_string():\n major, minor, micro, patch, tag, relnum, is_release = VERSION\n\n version = '%s.%s' % (major, minor)\n\n if micro or patch:\n version += '.%s' % micro\n\n if patch:\n version += '.%s' % patch\n\n if tag != 'final':\n if tag == 'rc':\n version += ' RC'\n else:\n version += ' %s ' % tag\n\n version += '%s' % relnum\n\n if not is_release:\n version += ' (dev)'\n\n return version", "def displayname(self):\n if self.path.is_dir():\n if (is_uuid(self.path.parts[-1])):\n self.is_uuid_folder = True\n return self.path.name + '/'\n elif is_proj(self.path.parts[-1]):\n return f'{bcolors.BOLD}' + self.path.name + f'{bcolors.ENDC}'\n return self.path.name", "def day_name():\n file_path = os.path.dirname(__file__)\n day_path = os.path.normpath(os.path.join(file_path, '..'))\n return os.path.basename(day_path)", "def testReleaseUseDefaultNaming(self):\n expected = (IMAGE_SERVER_PREFIX + '/stable-channel/x86-alex/0.12.433.269',\n ['chromeos', '0.12.433.269', 'x86-alex', 'ssd',\n 'stable-channel', 'mp', '.bin'])\n actual = cb_name_lib.GetReleaseName(self.board,\n self.version_string,\n 0)\n self.assertEqual(expected, actual)", "def get_title(self):\n\n if self.title: return self.title\n path = self.get_path()\n if str(path) == \"\": \n Settings.err_print(\"missing file title\")\n return \"\"\n title, ext = os.path.splitext(path)\n self.ext = ext\n self.title = \"{}{}\".format(os.path.basename(title), ext)\n return self.title", "def album_information(release):\n tags = release.get('tag-list')\n result = '{artist-credit-phrase} - '.format(**release)\n if 'date' in release:\n result += '{date} '.format(**release)\n result += '{title}'.format(**release)\n if tags:\n result += ': ' + '; '.join(titlecase(tag['name']) for tag in release['tag-list'])\n return result", "def media_folder_name(self):\n raise NotImplementedError", "def title(self):\n return os.path.basename(self.__path) if self.isdir() else self.file_title", "def testReleaseUseAlternativeNaming(self):\n expected = (IMAGE_SERVER_PREFIX + '/stable-channel/x86-alex-rc/' +\n '0.12.433.269',\n ['chromeos', '0.12.433.269', 'x86-alex', 'ssd',\n 'stable-channel', 'mp', '.bin'])\n actual = cb_name_lib.GetReleaseName(self.board,\n self.version_string,\n 1)\n self.assertEqual(expected, actual)", "def bsw_getAssetDetails(rootGrpName=None):\n if not rootGrpName:\n if pm.objExists('Texture_Group'):\n rootGrpName = 'Texture_Group'\n elif pm.objExists('rig_grp'):\n rootGrpName = 'rig_grp'\n elif pm.objExists('geo'):\n rootGrpName = 'geo'\n else:\n rootGrpName = 'None'\n astDept = {'Texture_Group': 'Texture', 'rig_grp': 'Rig', 'geo': 'Model', 'None': 'None'}\n if rootGrpName == 'None':\n return 'Not Exist', 'Not Exist', 'Not Exist', 'Not Exist', 'NotExist'\n rootGrp = pm.PyNode(rootGrpName)\n # get episode if environment is series, else return \"Not Exist\" string.\n episode = 'NotExist'\n if os.environ['BSW_PROJECT_TYPE'] == 'series':\n episode = rootGrp.assetEpisode.get()\n return astDept[rootGrpName], rootGrp.assetType.get(), rootGrp.assetName.get(), rootGrp.assetUID.get(), episode", "def get_package_name(self):\n return self.name + '-' + self.version", "def season(self, seasonnum, order='aired'):\n if order=='aired':\n seasons = self.seasons\n elif order == 'dvd':\n seasons = self.dvd_seasons\n try:\n return seasons[seasonnum]\n except KeyError:\n raise SeasonNotFoundError(\n 'Season no %s does not exists' % seasonnum\n ), None, sys.exc_info()[2]", "def getXsdVersionName(self):\n vers = self.getVersion()\n if vers is None:\n return None\n\n # Determine the filename\n bname = os.path.basename(self.__pathin)\n dname = bname.split(\".\")[0]\n\n dc = DictConfig()\n prefix = dc.get_prefix(dname)\n if prefix:\n vout = \"%s-v%s.xsd\" % (prefix, vers)\n return vout\n\n return None", "def get_scene(videoname):\n s = videoname.split(\"_S_\")[-1]\n s = s.split(\"_\")[0]\n return s[:4]", "def get_version_info_str(self, key_name='ver_sw_release'):\n version = self.get_version_info(key_name)\n if not version is None and version[3] >= 64:\n type_str = ''\n if version[3] < 128: type_str = ' (alpha)'\n elif version[3] < 192: type_str = ' (beta)'\n elif version[3] < 255: type_str = ' (RC)'\n return 'v{}.{}.{}{}'.format(version[0], version[1], version[2], type_str)\n return None", "def episode_title_for_tvdb(self):\n \n # strip out the year from the episode title:\n return \"Episode %d\"%self.episode_number[1]", "def get_current_release():\n return _CURRENT_RELEASE", "def get_package_version():\n major, minor, micro, patch, tag, relnum = __version_info__\n\n version = '%s.%s' % (major, minor)\n\n if micro or patch:\n version += '.%s' % micro\n\n if patch:\n version += '.%s' % patch\n\n if tag != 'final':\n version += '%s%s' % (\n {\n 'alpha': 'a',\n 'beta': 'b',\n }.get(tag, tag),\n relnum)\n\n return version", "def display_name(self) -> str:\n if self.is_verified:\n return f\"Verified Package {self.csharp_version}\"\n elif self.is_main:\n return \"main (unstable)\"\n else:\n return self.release_tag.replace(\"_\", \" \").title()", "def get_for_release_version_path(self):\n return self.__cICommon.get_for_release_version_path()", "def get_release_info(version='v1.1-dev', date='2021-07-22'):\n # go to the repository directory\n dir_orig = os.getcwd()\n os.chdir(os.path.dirname(os.path.dirname(__file__)))\n\n # grab git info into string\n try:\n cmd = \"git describe --tags\"\n version = subprocess.check_output(cmd.split(), stderr=subprocess.DEVNULL)\n version = version.decode('utf-8').strip()\n\n # if there are new commits after the latest release\n if '-' in version:\n version, num_commit = version.split('-')[:2]\n version += '-{}'.format(num_commit)\n\n cmd = \"git log -1 --date=short --format=%cd\"\n date = subprocess.check_output(cmd.split(), stderr=subprocess.DEVNULL)\n date = date.decode('utf-8').strip()\n except:\n pass\n\n # go back to the original directory\n os.chdir(dir_orig)\n return version, date", "def get_filename(self):\n return self.get_package_name() + '-' + self.os + '-' + self.arch + GPPKG_EXTENSION", "def get_version_string():\n\n version_string = get_version()\n if not version_string:\n version_string = \"unknown\"\n\n return \"ImageSplit version \" + version_string", "def get_info(self, wanted_info: str = TITLE):\n try:\n info = str(self.mp3[wanted_info])\n return info[2:len(info) - 2] # Removes the ['']\n except KeyError:\n return \"N/A\"", "def rename_file(f: pathlib.Path) -> str:\n m = mutagen.File(f)\n if m is None: return\n new_name_parts = []\n if \"tracknumber\" in m:\n if \"discnumber\" in m:\n new_name_parts.append(pad_num_str(m[\"discnumber\"][0]) + \".\")\n new_name_parts.append(pad_num_str(m[\"tracknumber\"][0]) + \" - \")\n new_name_parts.append(m[\"title\"][0].replace(\"/\", \"_\"))\n if \"version\" in m:\n new_name_parts.append(\" - \" + \" - \".join(m[\"version\"]).replace(\"/\", \"_\"))\n return \"\".join(new_name_parts)", "def media_title(self):\n return self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.Title", "def path_name(self):\n return u'{0}-{1}'.format(self.plugin.name, self._major_version)", "def generate_file_name(entry):\n return str_for_file(u'{name}, {year}, {title}'.format(\n year=entry['year'],\n name=get_last_name(entry['author'][0]),\n title=entry['title']\n ))", "def _get_python_info_rename(path: str) -> str:\n if path.name.endswith(\".egg-info\"):\n f = \"PKG-INFO\"\n else:\n # Assume dist-info. Are there other options?\n f = \"METADATA\"\n pkgmetainfodata = path / f\n with pkgmetainfodata.open() as f:\n for line in f:\n match = re.match(r'^Name: ([A-Z-a-z].+)', line)\n if match:\n name = match.group(1)\n break\n if not line.strip():\n # First blank line; gone too far; give up\n return\n else:\n return\n return name + path.suffix", "def get_repo_shortname(self):\n\n basename = os.path.abspath(self.get_repo_path())\n m = self.REPO_NAME_RE.match(basename)\n if m:\n return m.group('name')\n else:\n return basename", "def _retrosheet_filename(game_id, data_root):\n # game id is TTTYYYYMMDDN.\n team = game_id[:3]\n year = game_id[3:7]\n file_pattern = year + team + \".EV*\"\n file_path = os.path.join(data_root, \"retrosheet\", year, file_pattern)\n file_matches = glob.glob(file_path)\n return file_matches[0] if len(file_matches) else None", "def get_volume_name(self, vid):\n return \"cv-{0}\".format(vid)", "def get_spec_version(spec_file_name):\n\n # Read the \n spec_file = open(spec_file_name)\n lines = spec_file.readlines()\n \n # TBD check if there's only one\n version_lines = [l.strip() for l in lines if l.startswith(\"Version: \")]\n version_line = version_lines[0]\n version_string = version_line.split()[1]\n\n (major, minor) = version_string.split('.')[0:2]\n\n # TBD check if there's only one\n release_lines = [l.strip() for l in lines if l.startswith(\"Release: \")]\n release_line = release_lines[0]\n release_string = release_line.split()[1]\n\n # the release string from the spec file may have %{dist} templates or other\n # strings in it. Just take the leading numeric fields (. separated)\n num_field_re = re.compile(\"(^[0-9.]+).*\")\n num_field_match = num_field_re.match(release_string)\n if num_field_match:\n release_string = num_field_match.groups()[0]\n else:\n # should this throw an error?\n release_string = \"\"\n \n return {'version': version_string, 'release': release_string}", "def __get_url_and_name(self, arch: str):\n page = requests.get(self.releases_url)\n page_text = page.text\n soup = BeautifulSoup(page_text, features=\"html.parser\")\n regex = re.compile('frida-server-[0-9]{1,2}.[0-9]{1,2}.[0-9]{1,2}-android-' + arch, re.IGNORECASE)\n frida_server_name = soup.find(text=regex)[0:-3]\n release_version = re.findall(\"[0-9]{1,2}.[0-9]{1,2}.[0-9]{1,2}\", frida_server_name)[0]\n return (self.releases_url + '/download/' + release_version + '/' + frida_server_name + \".xz\"), frida_server_name", "def get_filename(self, file_object):\n\n valid_chars = \"-_.() %s%s\" % (string.ascii_letters, string.digits)\n\n result = \"<show> <season>x<episode> <name>.mp4\"\n result = result.replace(\"<show>\", file_object.show.name)\n result = result.replace(\"<season>\", \"%.2d\" % \\\n int(file_object.season.number))\n result = result.replace(\"<episode>\", \"%s\" % \\\n str(file_object.number))\n result = result.replace(\"<name>\", file_object.name)\n return result", "def get_package_decoder_file_name(name):\n name = get_package_name(name)\n return 'decoder_for_{0}_package'.format(name)", "def get_season_url(\n base_url: str, year: Optional[int] = None, season: Optional[str] = None\n) -> str:\n if year is None or season is None:\n return f\"{base_url}/season\"\n return f\"{base_url}/season/{year}/{season.lower()}\"", "def infer_name(self):\n if CONFIG_KEY not in self:\n return\n if hasattr(self[CONFIG_KEY], \"name\"):\n if \" \" in self[CONFIG_KEY].name:\n raise InvalidConfigFileException(\n \"Specified Project name ({}) contains whitespace\".\n format(self[CONFIG_KEY].name))\n return self[CONFIG_KEY].name.replace(\" \", \"_\")\n if not self[CONFIG_FILE_KEY]:\n raise NotImplementedError(\"Project name inference isn't supported \"\n \"on a project that lacks a config file.\")\n config_folder = os.path.dirname(self[CONFIG_FILE_KEY])\n project_name = os.path.basename(config_folder)\n if project_name == METADATA_KEY:\n project_name = os.path.basename(os.path.dirname(config_folder))\n return project_name.replace(\" \", \"_\")", "def get_file_name(self):\n return self.path.name[6:]", "def get_current_scene_name():\n\n scene_name = cmds.file(query=True, sceneName=True, shortName=True)\n scene_name = osp.splitext(scene_name)[0]\n\n return scene_name", "def get_dest_name ( self ):\n return self.filename", "def _get_filename(self, sheet_name):\n sheet_name = self._update_sheet_name_with_unique_id(sheet_name)\n if self.version and self.use_version_postfix:\n return sheet_name + '_v' + str(self.version)\n else:\n return sheet_name", "def get_file_name(self):\n return str(self.get_file())", "def name(self):\n #type: ()->Text\n return (\n os.path.splitext(os.path.basename(self.fileName))[0])", "def get_version_filename(filename):\n return re.search(r'\\d+', filename).group(0)", "def package(self):\n if self.method == 'buildNotification':\n return self.params[1]['name']\n if self.method in ('createImage', 'image', 'livecd'):\n return self.params[0]\n if self.method == 'indirectionimage':\n return self.params[0]['name']\n # params[0] is the source URL for these tasks:\n if self.method not in ('build', 'buildArch', 'buildContainer',\n 'buildMaven', 'buildSRPMFromSCM', 'maven'):\n return None\n # (I wish there was a better way to do this.)\n source = self.params[0]\n o = urlparse(source)\n # build tasks can load an SRPM from a \"cli-build\" tmpdir:\n if source.endswith('.src.rpm'):\n srpm = os.path.basename(source)\n (name, version, release) = srpm.rsplit('-', 2)\n # Note we're throwing away version and release here. They could be\n # useful eventually, maybe in a \"Package\" class.\n return name\n # or an allowed SCM:\n elif o.scheme:\n package = os.path.basename(o.path)\n if package.endswith('.git'):\n package = package[:-4]\n if self.method == 'buildContainer':\n package += '-container'\n return package\n raise ValueError('could not parse source \"%s\"' % source)", "def media_album_name(self):\n return self.coordinator.data.nowplaying[self.zone.SourceID].CurrSong.Album", "def get_package_name():\n return try_get_project_property('packageName')", "def infer_webserver_package(release: str) -> str:\n if release == \"current_branch\":\n return \"dagster-webserver\"\n else:\n if not EARLIEST_TESTED_RELEASE:\n check.failed(\"Environment variable `$EARLIEST_TESTED_RELEASE` must be set.\")\n version = packaging.version.parse(release)\n return \"dagit\" if version < packaging.version.Version(\"1.3.14\") else \"dagster-webserver\"", "def rpm_full_name(self) -> str:\n if self.full_version:\n return \"{}-{}\".format(self.name, self.full_version)\n return cast(str, self.name)", "def get_egg_name():\n global eggname\n if not eggname:\n version = local('git describe --abbrev=4', capture=True)\n if version:\n version = '%s-%s' % (version, datetime.datetime.today().strftime('%Y%m%d'))\n eggname = APP_NAME + '-%s-py%s.egg' % (version.replace('-', '_'), python_version)\n return eggname", "def get_display_name(self, short=False):\n if self.filename is None:\n return '[New file]'\n elif short:\n return os.path.basename(self.filename)\n else:\n return self.filename", "def _get_video_filename(self):\n fnd = self._get_session_dir()\n self.video_number += 1\n fn = os.path.join(fnd, 'V%4.4d.avi' % self.video_number)\n return fn", "def _get_koji_task_result_package_name(path):\n filename = path.split('/')[-1]\n trimmed = []\n for part in filename.split('-'):\n # assumes that when the next part is not a digit\n # we're past the name and at the version\n if part[0].isdigit():\n return '-'.join(trimmed)\n trimmed.append(part)\n\n return '-'.join(trimmed)", "def modpricesetter_get_file_name(self):\r\n year, month, day = self._get_market_year_month_day_as_str()\r\n interval_number = self._get_interval_number_as_str()\r\n base_name = \"NEMPriceSetter_{year}{month}{day}{interval_number}00.xml\"\r\n name = base_name.format(\r\n year=year, month=month, day=day, interval_number=interval_number\r\n )\r\n path_name = Path(self.cache_folder) / name\r\n name_OCD = name.replace(\".xml\", \"_OCD.xml\")\r\n path_name_OCD = Path(self.cache_folder) / name_OCD\r\n name_zero = name.replace(\".xml\", \"00.xml\")\r\n path_name_zero = Path(self.cache_folder) / name_zero\r\n if os.path.exists(path_name):\r\n return name\r\n elif os.path.exists(path_name_OCD):\r\n return name_OCD\r\n elif os.path.exists(path_name_zero):\r\n return name_zero\r\n else:\r\n return name", "def shpname(self):\n _, tail = os.path.split(self.url)\n return self.folder + ('/' + tail[:-4]) * 2", "def _get_releaseinfo_str(version):\n opts = {}\n f = StringIO.StringIO()\n opts['version'] = version\n opts['date'] = get_git_log_info(\"%ci\")\n opts['comments'] = get_git_log_info(\"%b%+s%+N\")\n opts['commit'] = get_git_log_info(\"%H\")\n f.write(relfile_template % opts)\n return f.getvalue()", "def get_release_info(self):\r\n return self.detail_info.get_release_info(self.version)", "def bandname(self):\n if self._properties['bandname'] is None:\n self._properties['bandname'] = \"fuv\" if \"-fd-\" in self.filename else \"nuv\" if \"-nd-\" in self.filename \\\n else \"unknown\"\n return self._properties['bandname']", "def get_name_from_metadata():\n try:\n with open(\"metadata.yaml\", \"rb\") as fh:\n metadata = yaml.safe_load(fh)\n charm_name = metadata[\"name\"]\n except (yaml.error.YAMLError, OSError, KeyError):\n return\n return charm_name" ]
[ "0.59712", "0.59365255", "0.58621913", "0.57642496", "0.5731158", "0.5726277", "0.57143974", "0.5686276", "0.5618006", "0.557107", "0.5563853", "0.5559816", "0.5543877", "0.55142844", "0.55000263", "0.5495673", "0.5444121", "0.5430501", "0.5416234", "0.54096", "0.5329431", "0.5307731", "0.5286652", "0.52817094", "0.527324", "0.5264124", "0.5260512", "0.5250874", "0.5247147", "0.524125", "0.52407694", "0.524058", "0.5207816", "0.52033824", "0.5202244", "0.5201775", "0.5200528", "0.51980716", "0.51871705", "0.5184713", "0.51759326", "0.51738185", "0.516823", "0.5165896", "0.5155838", "0.5155822", "0.51532775", "0.5145931", "0.51394135", "0.51375204", "0.51330817", "0.51276475", "0.51153773", "0.5115132", "0.51132417", "0.5112812", "0.51124346", "0.51090384", "0.510261", "0.5101749", "0.5091906", "0.509123", "0.50874984", "0.5081777", "0.50779504", "0.50773156", "0.50754243", "0.5073527", "0.507323", "0.50729805", "0.5062183", "0.5060155", "0.50588596", "0.50572133", "0.5054781", "0.50506383", "0.5047603", "0.50387436", "0.5031656", "0.50293463", "0.50271416", "0.50182", "0.501643", "0.50150466", "0.50141674", "0.500408", "0.49963453", "0.4992178", "0.49893084", "0.49798244", "0.4976535", "0.49730402", "0.4972988", "0.4972483", "0.49711862", "0.49701425", "0.49683833", "0.49621782", "0.49617395", "0.49617326" ]
0.81937546
0
Get a wide banner image from fanart.tv. Currently restricts banners to Englishonly.
def get_banner_image(self, tvdb_id: int) -> Optional[str]: if not tvdb_id: return None if not self.fanart_api_key: raise ValueError("Need Fanart.tv api key for TV titles!") r = self.session.get(f"http://webservice.fanart.tv/v3/tv/{tvdb_id}?api_key={self.fanart_api_key}") if r.status_code == 404: return None res = r.json() error = res.get("error message") if error: if error == "Not found": return None raise ValueError(f"An unexpected error occurred while calling Fanart.tv, {res}") banner = next(( x["url"] for x in (res.get("tvbanner") or []) if x["lang"] == sorted(self.audio, key=lambda x: x.streamorder)[0].language ), None) return banner
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render_banner(self, width=300, height=85):\n img_path = IMG_PATH + os.sep + CARD_BANNER\n banner_img = Image.open(img_path)\n banner_img = banner_img.resize((width, height))\n return banner_img", "def banner_wrapper(banner_url):\n # so simple\n return '{url}<img src=\"{url}\" alt=\"{alt}\">'.format(\n url=banner_url,\n alt='Banner'\n )", "def get_banner(self,context,request):\n ba = queryMultiAdapter((context,request), interfaces.IBanner)\n if not ba:\n return ''\n return ba()", "def getBanner(outputScan):\n try:\n return str(outputScan.split(\", Banner: \", 1)[1][:12])\n #banner = re.search(r\"[0-9A-F]{12}\",outputScan, re.MULTILINE).group()\n #return str(banner)\n except Exception as e:\n print '\\033[91m'+\"ERROR_BANNER\"\n return \"BANNER_ERROR\"", "def choose_banner(banners):\n # simple random\n n = random.randint(0, len(banners)-1)\n return banners[n]", "def __get_high_res_url(country) -> str:\n wiki_stem = \"https://en.wikipedia.org\"\n country_page = requests.get(f\"{wiki_stem}/wiki/{country}\")\n country_doc = HTML(country_page.content)\n [v_card] = country_doc.xpath('.//table[@class=\"infobox geography vcard\"]')\n [flag_elem] = v_card.xpath('.//a[@class=\"image\" and contains(@title, \"Flag\")]')\n flag_page_url = f\"{wiki_stem}{flag_elem.attrib['href']}\"\n flag_page = requests.get(flag_page_url)\n doc = HTML(flag_page.content)\n [flag_url_elem] = doc.xpath('.//div[@id=\"file\"]/a/img')\n return f\"https:{flag_url_elem.attrib['src']}\"", "def get_banner(conn) -> str:\n banner_data = conn.recv(1024)\n banner = banner_data.decode().strip()\n print('Banner: {}'.format(banner))\n return banner", "def banner(self):\n return self._banner", "def getImage(cardTitle, size=\"normal\"):\n page = requests.get(\"https://api.scryfall.com/cards/named?exact=\"+name)\n page_json = json.loads(page.content)\n image_link = page_json[\"image_uris\"][size]\n image_response = requests.get(image_link)\n img = Image.open(BytesIO(image_response.content))\n return img.resize((384, 535)).convert(\"1\")", "def bbs_show_banner(tn, short = True):\n lines = cmd.lban(tn, short_banner = short)\n for line in lines:\n print(filter_tags(line))", "def get_url_for_min_resolution(self, min_height, min_width, image):", "def download_banner(self, banner_path):\n serie = self._root.find('Series')\n banner = unicode(serie.find('banner').text)\n if banner != '' and not os.path.isfile(banner_path):\n urllib.urlretrieve(self.URL_BANNER + banner, banner_path)", "def page_title(id):\r\n\tswitcher = {\r\n\t\t\"404\": \"Error 404: Not Found - WWW2PNG\",\r\n\t\t\"api_activate\": \"API Key Activated - WWW2PNG\",\r\n\t\t\"api_help\": \"API Help - WWW2PNG\",\r\n\t\t\"api_request\": \"API Key Requested - WWW2PNG\",\r\n\t\t\"buried\": \"Manage Buried - WWW2PNG\",\r\n\t\t\"contact\": \"Contact Us - WWW2PNG\",\r\n\t\t\"default\": \"Free Webpage Screenshot Service API with Blockchain Anchoring - WWW2PNG\",\r\n\t\t\"error\": \"Error - WWW2PNG\",\r\n\t\t\"pp\": \"Privacy Policy - WWW2PNG\",\r\n\t\t\"tos\": \"Terms of Service - WWW2PNG\",\r\n\t}\r\n\treturn switcher.get(id, \"WWW2PNG\")", "def get_image(result):\n article_id = result['id']\n id_ = article_id[14:]\n href = article_id[:14]\n\n #FIXME: not working\n image_url = \"http://www.jpress.nli.org.il/Olive/APA/NLI_heb/get/GetImage.ashx?kind=block&href=%s&id=%s&ext=.png\" %(href, id_)\n \n return image_url", "def getThumb(url,tvdbId=None):\n\tret = None\n\tif (tvdbId is not None and Prefs['fanart'] is True):\n\t\tthumb = fanartScrapper.getRandImageOfTypes(tvdbId,['tvthumbs'])\n\t\tif thumb is None: thumb = url\n\t\turl=thumb\n\t\n\tif url==R(CRUNCHYROLL_ICON):\n\t\tret = url\n\telse:\n\t\tif url is not None:\n\t\t\ttry:\n\t\t\t\tdata = HTTP.Request(url, cacheTime=CACHE_1WEEK).content\n\t\t\t\tif url.endswith(\".jpg\"):\n\t\t\t\t\tret = DataObject(data, 'image/jpeg')\n\t\t\t\telif url.endswith(\".png\"):\n\t\t\t\t\tret = DataObject(data, 'image/png')\n\t\t\texcept Exception, arg:\n\t\t\t\tLog.Error(\"#####Thumbnail couldn't be retrieved:\")\n\t\t\t\tLog.Error(\"#####\" + repr(Exception) + repr(arg) + url)\n\t\t\t\tret = None\n\n\tif ret is None:\n\t\treturn R(CRUNCHYROLL_ICON)\n\telse:\n\t\treturn ret", "def getNewsIconURL(newsBrain):", "def banner(name):\n print \"#\"\n print \"# {0}\".format(name.encode('utf-8'))\n print \"#\"\n return name", "def Banner():\n main_banner = pyfiglet.figlet_format(\" UTM NAT\", font = \"slant\")\n sub_banner1 = pyfiglet.figlet_format(\"tool\", font = \"isometric1\")\n sub_banner2 = \" -Generate a CSV file of Sophos UTM NAT statements-\"\n sub_banner3 = \" via REST API using the power of Python\"\n\n print()\n print('=' * 62)\n print(main_banner)\n print(sub_banner1)\n print()\n print(sub_banner2)\n print(sub_banner3)\n print()\n print('=' * 62)\n print()", "def get_thumbnail(format):", "def p_banner():\n return random.choice([banner, banner_two, banner_three, banner_four, banner_five])", "def get_banner_layout(app):\n banner_layout = html.Div(className='row', id=\"banner\",\n children=[html.Div(\n html.Img(src=app.get_asset_url(\"252px-Rheem_logo.svg.png\"), style={\"width\": \"30%\",\n \"vertical-align\": \"middle\"}),\n className='two columns'),\n html.Div(html.H3(\"Odin Project: Heat Pump Water Heater Gen V Field Test\",\n className='header', id=\"title\", style={\"letter-spacing\": \"-1.6px\"}),\n className=\"ten columns\")],\n )\n return banner_layout", "def get_berlin_picture(conv: V2DialogflowConversation) \\\n -> V2DialogflowConversation:\n conv.google.tell('Hier ist ein Bild aus Berlin!')\n\n # Use the user_storage field to ensure that the user is shown a different\n # image each time they invoke this intent. User_storage works like a\n # dictionary and is serialized with json.dumps.\n if not conv.google.user.user_storage.get('fernsehturm_shown'):\n conv.google.show_basic_card(FERNSEHTURM_CARD)\n conv.google.user.user_storage['fernsehturm_shown'] = True\n else:\n conv.google.show_basic_card(BRANDENBURGER_TOR_CARD)\n conv.google.user.user_storage['fernsehturm_shown'] = False\n\n return conv", "async def olá(self):\r\n\t\tawait self.client.say('© Maddie 2017')\r\n\t\te = Embed()\r\n\t\te.set_image(url='https://cdn.discovery.pgsitecore.com/en-us/-/media/Olay_PathFinder/Images/a/OLAY%20TE%207IN1%20DEEP%20PENETRATING%20MOISTURE%20BODY%20WASH_Front.png?w=460&v=1-201705260605')\r\n\t\tawait self.client.say(embed=e)", "def get_country_image_name(country):\n\n country = country.replace(\" \", \"-\").replace(\".\", \"\").lower()\n return \"%s.png\" % (country)", "def getThumbUrl(url, tvdbId=None):\n\tif (tvdbId is not None and Prefs['fanart'] is True):\n\t\tthumb = fanartScrapper.getRandImageOfTypes(tvdbId,['tvthumbs'])\n\t\tif thumb is not None: return thumb\n\n\n\tif url==R(CRUNCHYROLL_ICON):\n\t\treturn url\n\t\n\treturn url", "def l10n_img(ctx, url):\n return static(l10n_img_file_name(ctx, url))", "def banner_url(self) -> typing.Optional[files.URL]:\n return self.make_banner_url()", "def show_banner():\n print(\"\"\"\n _ _ _ _ _____ _______\n| | | | / \\ | | |_ _\\ \\ / / ____|\n| |_| | / _ \\ | | | | \\ \\ / /| _|\n| _ |/ ___ \\| |___ | | \\ V / | |___\n|_| |_/_/ \\_\\_____|___| \\_/ |_____|\n\n\nA super fast asynchronous http and https prober, to check who is (h)alive.\nDeveloped by gnc\n \"\"\")", "def get_kegg_image(self):\n return 'http://rest.kegg.jp/get/%s/img' % self.kegg_id", "def present_banner():\n writer(BANNER, FORMAT[\"BANNER\"])\n writer(\" \" * 30 + f\"version {VERSION}\")", "def getSeasonThumb(tvdbId, season, rand=True):\n\treturn fanartScrapper.getSeasonThumb(tvdbId, season, rand)", "def my_banner(bannerString):\n print(len(bannerString) * \"!\")\n print(bannerString)\n print(len(bannerString) * \"!\")", "def get_album_art_url(html):\n\treturn re.findall('img src=\"(.*?)\" width=\"500\"', html)[0]", "def show_me():\n # Scumbag thumbnail code\n try:\n from PIL import Image\n except ImportError:\n pass\n else:\n filename = os.path.join(app.static_folder, 'img', 'badumtss.png')\n image = Image.open(filename)\n\n return render_template('show_me.html')", "def get_an_image(text):\n\n # Get the second or fourth word\n index = random.choice([1, 3])\n text = text.split()[index]\n print(text)\n sort = random.choice([\"relevance\", \"interestingness-desc\"])\n print(sort)\n\n from flickr_search_downloadr import flickr_search_downloadr\n\n filename = flickr_search_downloadr(text,\n tags=None,\n user_id=\"internetarchivebookimages\",\n sort=sort,\n quantity=1,\n number=None,\n size=\"m\",\n title=None,\n noclobber=True,\n outdir=\"E:/stufftodelete\")\n img = Image.open(filename[0])\n return img", "def small_image(self) -> Optional[str]:\n return pulumi.get(self, \"small_image\")", "def getImageSize(language=None):", "def shorten_with_is_gd(url):\n u = urllib2.urlopen(\"http://is.gd/api.php?longurl=\"+url)\n return u.read()", "def logo(self):\n from app import textify\n try:\n asset = self.app.module_map.uploader.get(self.barcamp.logo)\n except AssetNotFound:\n asset = None\n if not asset:\n return u\"\"\n v = asset.variants['logo_full']\n url = self.app.url_for(\"asset\", asset_id = v._id, _full = True)\n alt = 'Logo '+self.barcamp.name# + \" - \" + textify(self.barcamp.seo_description)\n alt = alt.replace('\"', '&quot;')\n alt = alt.replace(\"'\", '&quot;')\n return \"\"\"<a title=\"%s\" href=\"%s\"><img alt=\"%s\" class=\"img-responsive\" src=\"%s\" width=\"%s\" height=\"%s\"></a>\"\"\" %(\n self.barcamp.name,\n self.handler.url_for(\"barcamps.index\", slug = self.barcamp.slug, _full = True),\n alt,\n url,\n v.metadata['width'],\n v.metadata['height'])", "def get_hd_image_url(image_url):\n query_params = ['?w=2000', '?w=1800', '?w=1480', '?w=1380']\n for param in query_params:\n hd_image_url = image_url + param\n response = requests.get(hd_image_url)\n if response.status_code == 200:\n return hd_image_url\n return image_url", "async def bingstrict(self, *, text):\n settings = loadauth()\n operation = 'strictimagesearch'\n if settings['apikey'] == '' or settings['apikey'] == 'blank':\n return await self.bot.say(\"Missing or incorrect API key. Please \" +\n \"contact the owner to add an API key.\")\n apikey = settings['apikey']\n text, limit = self.limitget(text)\n result = self.getfrombing(apikey, text, limit, operation)\n bottext = self.obtainresult(result, operation)\n return await self.bot.say(bottext)", "def get_social_media_text(self, site):\n\n text = \"\"\n if not \"Official Site\" in site.title():\n text = site.replace(\"Official \", \"\")\n else:\n text = site\n\n if len(text) > 20:\n text = site[:14] + \"...\"\n else:\n text = text\n\n return text", "def medium_image(self) -> Optional[str]:\n return pulumi.get(self, \"medium_image\")", "async def gen_banner(self, member):\n base = deepcopy(self.images[randint(0, len(self.images) - 1)])\n\n # Draw the username\n idraw = ImageDraw.Draw(base)\n idraw.text(self.banner_cfg[\"TextPos\"], member.name, fill=tuple(self.banner_cfg[\"Text_Color\"]), font=self.font)\n \n\n # Get user avatar\n avatar_url = member.avatar_url\n if(avatar_url==None):\n avatar_url = member.default_avatar_url\n # Wow, we can really just load it asynchronously from the API now? That's dope\n avatar = await avatar_url.read()\n # We need to save it as a file in memory to get the size so we can load it as an image.\n with io.BytesIO() as fb:\n fb.write(avatar)\n fb.seek(0, 0)\n avatar = Image.open(fb)\n avatar = avatar.resize(self.banner_cfg[\"AvatarSize\"])\n if (self.banner_cfg[\"Rounded\"][\"is_rounded\"]):\n avatar = self.round_corners(avatar, self.banner_cfg[\"Rounded\"][\"px\"])\n # Now that we have our avatar, we can slap it into our banner.\n final = Image.new(\"RGBA\", base.size)\n final.paste(avatar, self.banner_cfg[\"AvatarPos\"])\n if(self.banner_cfg[\"AvatarLayer\"]==\"front\"):\n final = Image.alpha_composite(base, final)\n if(self.banner_cfg[\"AvatarLayer\"]==\"back\"):\n final = Image.alpha_composite(final, base)\n \n # Lastly, let's package it as a file to be uploaded.\n with io.BytesIO() as fb:\n final.save(fb, format=\"png\")\n fb.seek(0, 0)\n \n return discord.File(fb, filename=\"Welcome.png\")", "async def bing(self, *, text):\n settings = loadauth()\n operation = 'moderateimagesearch'\n if settings['apikey'] == '' or settings['apikey'] == 'blank':\n return await self.bot.say(\"Missing or incorrect API key. Please \" +\n \"contact the owner to add an API key.\")\n apikey = settings['apikey']\n text, limit = self.limitget(text)\n result = self.getfrombing(apikey, text, limit, operation)\n bottext = self.obtainresult(result, operation)\n return await self.bot.say(bottext)", "def make_banner_url(self, *, ext: typing.Optional[str] = None, size: int = 4096) -> typing.Optional[files.URL]:\n if self.banner_hash is None:\n return None\n\n if ext is None:\n if self.banner_hash.startswith(\"a_\"):\n ext = \"gif\"\n else:\n ext = \"png\"\n\n return routes.CDN_USER_BANNER.compile_to_file(\n urls.CDN_URL,\n user_id=self.id,\n hash=self.banner_hash,\n size=size,\n file_format=ext,\n )", "async def bingadult(self, ctx, *, text):\n settings = loadauth()\n channel = ctx.message.channel\n server = ctx.message.server\n operation = 'adultimagesearch'\n check = self.checkadult(server, channel, settings)\n if check is False:\n return await self.bot.say(\"Usage of %bingadult is disabled in \" +\n \"this server and/or channel.\")\n if settings['apikey'] == '' or settings['apikey'] == 'blank':\n return await self.bot.say(\"Missing or incorrect API key. Please \" +\n \"contact the owner to add an API key.\")\n apikey = settings['apikey']\n text, limit = self.limitget(text)\n result = self.getfrombing(apikey, text, limit, operation)\n bottext = self.obtainresult(result, operation)\n return await self.bot.say(bottext)", "def _dog():\n fh = open(\"/home/italojs/dev/python/api-flask-noalvo-demo/app/domain/live_stream/static/funny-dogs.jpg\", \"rb\")\n frame = fh.read()\n fh.close()\n return frame", "def get_thumbnail_url():", "def banner_ascii():\n print(\"\")\n print(f\"\\n{RED} Steganography Tool{RESET}\")\n print(f\"{RED} Made By {RESET}\")\n print(f\"{RED} Ehthe Samul Islam Laskar USN:1DS16CS712 {RESET}\")\n print(f\"{RED} B Padma USN:1DS19CS420{RESET}\")\n print(f\"{RED} Nikhil D Kanyal USN:1DS17CS731{RESET}\")\n print(f\"{YELLOW}Type 'help' to see commands{RESET}\")", "def get_image(self, country: str) -> PngImageFile:\n url = self.__url_dict[country]\n if self.__high_res:\n url = self.__get_high_res_url(country)\n\n file_path = f\"flag_cache/{self.__res_str}/{country}.png\"\n try:\n return Image.open(file_path)\n except IOError:\n print(f\"> Getting Flag of {country}: {url}\")\n return self.get_image_from_url(url, file_path)", "def analyze_static_image(path: str):\n image = Image.open(path)\n width, height = image.size\n image.close()\n\n media_info = {\n 'width': width,\n 'height': height,\n 'resolution': round(width * height / 1_000_000, 2),\n 'size': os.path.getsize(path),\n 'type': 'image',\n 'signature': '', # TODO\n 'signature_type': '', # TODO\n }\n\n return media_info", "def return_dispense_media():\n media = {\"50_ug/ml_Kanamycin\": \"lb_miller_50ug_ml_kan\",\n \"100_ug/ml_Ampicillin\": \"lb_miller_100ug_ml_amp\",\n \"100_ug/mL_Spectinomycin\": \"lb_miller_100ug_ml_specto\",\n \"30_ug/ml_Kanamycin\": \"lb_miller_30ug_ml_kan\",\n \"15_ug/ml_Tetracycline\": \"lb_miller_15ug_ml_tet\",\n \"50_ug/ml_Kanamycin_25_ug/ml_Chloramphenicol\":\n \"lb_miller_50ug_ml_kan_25ug_ml_cm\",\n \"25_ug/ml_Chloramphenicol\": \"lb_miller_25ug_ml_cm\",\n \"LB_miller\": \"lb_miller_noAB\",\n \"TB_100_ug/ml_Ampicillin\": \"tb_100ug_ml_amp\",\n \"TB_50_ug/ml_Kanamycin\": \"tb_50ug_ml_kan\"}\n return (media)", "def get_article():\n bbc_request = requests.get('https://www.bbc.com/ukrainian')\n soup = BeautifulSoup(bbc_request.text, \"html.parser\")\n raw_article = soup.find_all('div', {'class': \"bbc-1vo75s2-TextGridItem e19k1v2h0\"})[0].find_all(text=True, recursive=True)\n\n title = raw_article[0]\n description = raw_article[1]\n publish_time = raw_article[2]\n href = soup.find_all('div', {'class': 'bbc-1vo75s2-TextGridItem e19k1v2h0'})[0].find('a', {'class': 'bbc-11m194t-Link evnt13t0'})['href']\n link = f' https://www.bbc.com/{href}'\n article = f\"\"\"⚠️ <b>Тема</b>: {title}\n \\n📌 <b>Короткий опис</b>: {description}\n \\n🕒 <b>Опубліковано</b>: {publish_time}\n \\n➡️ <b>Повний текст</b>: {link}\"\"\"\n return article", "def set_image(self):\r\n return loader.GFX['title_box']", "def get_leagues_country(wd):\n try:\n country = wd.find_element_by_tag_name(\"img\").get_attribute(\"alt\")\n return country\n except:\n return \"N/A Country\"", "def get_image_url():", "def get_image(self, img):\n if img == \"rss\":\n img = \"feed-icon-16x16.png\"\n loc = os.path.abspath(os.path.dirname(__file__))\n img = os.path.join(loc, img)\n if not os.path.exists(img):\n raise FileNotFoundError( # pragma: no cover\n f\"Unable to find {img!r}.\")\n return img\n else:\n raise FileNotFoundError( # pragma: no cover\n f\"Unable to get image name: {img!r}.\")", "def get_url_image(artist, track, size):\n s = size.lower()\n if s not in ['small', 'medium', 'large', 'extralarge']:\n return None\n track_infos = get_infos(artist, track)\n for image in track_infos['track']['album']['image']:\n if image['size'] == s:\n return image['#text']\n return None", "def create_banner_list():\n template_vars = {\n 'title' : 'Banners - ' + sitesettings.SITE_NAME,\n 'siteurl' : sitesettings.SITE_URL,\n 'sitename' : sitesettings.SITE_NAME,\n 'meta_desc' : 'List of step-up banners in Final Fantasy Brave Exvius (FFBE)',\n 'last_four_banners' : nav.get_last_four_banners('all'),\n 'all_banner_info' : get_all_banner_info(),\n }\n\n bn_path = os.path.join(sitesettings.LOCAL_FILE_PATH, 'banner')\n\n if not os.path.exists(bn_path):\n os.makedirs(bn_path)\n\n template_file = 'bannerlist.html'\n html_file_loc = os.path.join(bn_path, 'index.html')\n generatehtml.generate_html(\n html_file_loc, template_file, template_vars, os.path.join(os.getcwd(), 'templates'))", "def imageGetAlt(soup):\n img = soup.find('img', id='main-image', class_='imageLeft0 altImage')\n img = str(img)\n imgURL = re.findall('https?://.+jpg', img)\n response = requests.get(imgURL[0])\n photo = Image.open(BytesIO(response.content))\n img = imgURL[0]\n\n return img", "def wiki_image(pagetext):\n images = [i for i in pagetext.images if i not in EXCLUDED_IMAGES]\n if len(images) > 0:\n return images[0]\n else:\n return ''", "def convert_to_high_res(url):\n return add_string_to_image_url(url, 'high-res')", "def _extract_image_short_id(scan_result: dict[str, Any]) -> str:\n\n if \"id\" not in scan_result:\n return \"sha256:unknown\"\n\n image_id: str = scan_result[\"id\"]\n\n if image_id.startswith(\"sha256:\"):\n return image_id[:17]\n return image_id[:10]", "def get_brand_name(container) -> Optional[str]:\r\n brand_container = container.find_all(\"a\", {\"class\": \"item-brand\"})\r\n # product_brand: List[] = brand_container[0].img[\"title\"]\r\n if len(brand_container) == 0:\r\n return None\r\n return brand_container[0].img[\"title\"]", "def image_to_text(filename):\n\n client = AipOcr(APP_ID, API_KEY, SECRET_KEY)\n def get_file_content(filePath):\n with open(filePath, 'rb') as fp:\n return fp.read()\n\n image = get_file_content(filename)\n #res=client.basicGeneralUrl(url);\n res = client.general(image)\n\n text = ''\n\n for item in res[\"words_result\"]:\n text += \"%s\\n\" % item[\"words\"]\n\n return text", "def background_image(self, **kwargs):\n try:\n asset = self.app.module_map.uploader.get(self.barcamp.background_image)\n except AssetNotFound:\n asset = None\n if not asset:\n return u\"\"\n v = asset.variants['full']\n url = self.app.url_for(\"asset\", asset_id = v._id)\n amap = html_params(**kwargs)\n return \"\"\"<img src=\"%s\" width=\"%s\" height=\"%s\" %s>\"\"\" %(\n url,\n v.metadata['width'],\n v.metadata['height'],\n amap)", "def banner():\n\n def random_color():\n valid_colors = (\"red\", \"green\", \"yellow\", \"blue\", \"magenta\", \"cyan\")\n return random.choice(valid_colors)\n\n autoRecon = rf\"\"\"\n _____________ ____ ________________\n /___/___ \\ / / | /___/__ \\ Mr.P-Millz _____\n O.G./ / _ \\______/__/ |______|__|_____ * \\_________________/__/ |___\n __/__/ /_\\ \\ | | \\ __\\/ _ \\| | __/ __ \\_/ ___\\/ _ \\| |\n | | ___ \\| | /| | ( |_| ) | | \\ ___/\\ \\__( |_| ) | |\n |___|____/\\__\\____|____/_|__|\\_\\____/|__|____|_ /\\___ |\\___ \\____/|___| /\n gtihub.com/Knowledge-Wisdom-Understanding \\___\\/ \\__\\/ \\__\\_/ v{V} \\___\\/\n\n\"\"\"\n\n def print_art(msg, color):\n colored_art = colored(msg, color=color)\n print(colored_art)\n\n color = random_color()\n print_art(autoRecon, color)", "def print_banner(text):\n print(Figlet(font='smslant').renderText(text))", "def get_best_image_url(self, page: MediaWikiPage, max_width: int) -> str:\n LOG.debug(\"Fetching best image for %s\", page.title)\n image = None\n\n # try direct API call\n api_url = (\n \"https://en.wikipedia.org/w/api.php?action=query&\"\n \"formatversion=2&prop=pageimages&format=json&\"\n f\"pithumbsize={max_width}&titles={page.title}\"\n )\n response = requests.get(api_url)\n if response.status_code < 300:\n page_image_data = response.json().get(\n 'query', {}).get('pages', [False])[0]\n if page_image_data:\n image = page_image_data.get('thumbnail', {}).get('source')\n\n # Else fallback to pymediawiki page scraping\n if image is None:\n thumbnails = page.logos\n if len(thumbnails) > 0:\n thumbnail = thumbnails[0]\n # Get hi-res image if available.\n # This translates image urls between the following two formats\n # https://upload.wikimedia.org/wikipedia/commons/thumb/d/d4/Sialyl_lewis_a.svg/200px-Sialyl_lewis_a.svg.png\n # https://upload.wikimedia.org/wikipedia/commons/d/d4/Sialyl_lewis_a.svg\n full_image = '/'.join(thumbnail.replace('/thumb/',\n '/').split('/')[:-1])\n image = full_image if full_image in page.images else thumbnail\n elif len(page.images) > 0:\n try:\n image = next(img for img in page.images if img.split(\n '/')[-1] not in EXCLUDED_IMAGES)\n except StopIteration:\n LOG.warning('Could not find an image for %s', page.title)\n\n LOG.debug('Image selected: %s', image)\n return image or DEFAULT_IMAGE", "def print_banner(dog=True):\n banner = \"\"\n if dog:\n banner += \" ____,'`-,\\n\"\n banner += \" _,--' ,/::.;\\n\"\n banner += \" ,-' ,/::,' `---.___ ___,_\\n\"\n banner += \" | ,:';:/ ;'\\\"';\\\"`--./ ,-^.;--.\\n\"\n banner += \" |: ,:';,' ' `. ;` `-.\\n\"\n banner += \" \\\\:.,:::/;/ -:. ` | ` `-.\\n\"\n banner += \" \\\\:::,'//__.; ,; , , :.`-. :. | ; :.\\n\"\n banner += \" \\\\,',';/O)^. :' ; : '__` ` :::`. .:' )\\n\"\n banner += \" |,' |\\\\__,: ; ; '/O)`. :::`; ' ,'\\n\"\n banner += \" |`--'' \\\\__,' , ::::( ,'\\n\"\n banner += \" ` , `--' ,: :::,'\\\\ ,-'\\n\"\n banner += \" | ,; , ,::' ,::: |,'\\n\"\n banner += \" |,: .( ,:::| `\\n\"\n banner += \" ::'_ _ :: ,::/:|\\n\"\n banner += \" ,',' `-' \\\\ `. ,:::/,:|\\n\"\n banner += \" | : _ _ | ' ,::,' :::\\n\"\n banner += \" | \\\\ O`'O ,', , :,' ;::\\n\"\n banner += \" \\\\ `-'`--',:' ,' , ,,' ::\\n\"\n banner += \" ``:.:.__ ',-',' ::'\\n\"\n banner += \" -hrr- `--.__, ,::. ::'\\n\"\n banner += \" |: ::::. ::'\\n\"\n banner += \" |: :::::: ,::'\\n\"\n banner += \"########################################################\\n\"\n banner += \"# ruffer-overflow v0.2 #\\n\"\n banner += \"# don't \\\"bark\\\" up the wrong tree. #\\n\"\n banner += \"#======================================================#\\n\"\n banner += \"# weak-sauce tool for buffer-overflow #\\n\"\n banner += \"# please don't crime with it. #\\n\"\n banner += \"########################################################\\n\"\n print(banner)", "def get_possible_thumbnail(self):\n meta = self.get_meta_data()\n print meta\n if \"og:image\" in meta:\n return meta[\"og:image\"]\n elif \"twitter:image:src\" in meta:\n return meta[\"twitter:image:src\"]\n else:\n images = self.get_image_data()\n temp_url = \"\"\n temp_width = 0\n for img in images:\n if img[\"image_width\"] > temp_width:\n temp_url = img[\"image_url\"]\n temp_width = img[\"image_width\"]\n\n return temp_url", "def band_url(scene, band):\n\n img = scene + '_B' + str(band) + '.TIF'\n url_components = scene.split('_')\n sensor, level, path, row = url_components[0], url_components[5], url_components[2][:3], url_components[2][3:]\n \n return GOOGLE_STORAGE + sensor + '/' + level + '/' + path + '/' + row + '/' + scene + '/' + img", "def full_photo():\n top = Toplevel()\n top.title(\"Full APOD Photo\")\n top.iconbitmap('10.APOD Viewer/rocket.ico')\n\n #Load the full image to the top image\n img_label = Label(top, image=full_img)\n img_label.pack()", "def getRandomLickTitle():\n wikipedia.set_rate_limiting(True)\n try:\n titles = wikipedia.random(10)\n except wikipedia.exceptions.HTTPTimeoutError as e:\n print(f\"Wikipedia timout exception: {e}\")\n time.sleep(TIMEOUT_BACKOFF)\n main()\n except wikipedia.exceptions.WikipediaException as e:\n print(f\"Wikipedia exception: {e}\")\n sys.exit(1)\n except Exception as e:\n print(f\"Exception while fetching wiki titles: {e}\")\n sys.exit(1)\n\n for title in titles:\n if words.isLick(title):\n return title\n return None", "def make_content(museum, image_url, image_name, image_artist, filename):\n message = \"From the \" + museum\n # if image_name is not None:\n # message += \" with title \" + image_name\n if image_artist is not None:\n message += \" by \" + image_artist\n\n r = requests.get(image_url)\n if r.status_code == 200:\n with open(filename, mode=\"wb\") as image:\n for chunk in r:\n image.write(chunk)\n else:\n return None\n return (message)", "async def bImage(self, ctx, query, num=1):\r\n\r\n webpage = \"http://www.bing.com/images/search?q=\" + query.replace(\" \", \"+\") + \"&view=detailv2&adlt=off&selectedIndex=0\"\r\n\r\n html_content = urllib.request.urlopen(webpage)\r\n str_html = html_content.read().decode(\"utf-8\")\r\n match = re.findall(r'src=\"http://?([^\\'\" >]+)', str_html)\r\n if match:\r\n try:\r\n await ctx.send(\"http://\" + match[num-1])\r\n except (Exception):\r\n await ctx.send(\"```No \" + str(num) + \"th Result```\")\r\n else:\r\n await ctx.send(\"```No Image Found```\")", "def image_webp():\n data = resource(\"images/wolf_1.webp\")\n return Response(data, headers={\"Content-Type\": \"image/webp\"})", "def get_art(data):\n # type: (dict) -> dict\n image = data.get(\"resources\", [{}])[0].get(\"image\")\n if not image or image == LOC_STATIC_IMAGE: # default\n return ku.art(LOC_DEFAULT_IMAGE_URI)\n if image.startswith(\"//\"): # no protocol\n return ku.art(\"http:{}\".format(image))\n return ku.art(image)", "def audience_simple(country):\r\n if country == 'US':\r\n return 'USA'\r\n elif country == 'IN':\r\n return 'India'\r\n else:\r\n return 'Other'", "def getimage(self):", "def small_image(self):\n pass", "def render_country_flag(self, width=40, height=27):\n try:\n flag_sign = self.user_info.country\n except:\n flag_sign = \"unk\"\n flag_path = f\"{IMG_FLAG_PATH}/{flag_sign}.png\"\n flag_img = Image.open(flag_path)\n flag_img = flag_img.resize((width, height))\n return flag_img", "async def dog(self):\r\n dog_api = \"https://dog.ceo/api/breeds/image/random\"\r\n json_data = requests.get(dog_api).json()\r\n dogimage = json_data['message']\r\n await self.bot.say(dogimage)", "def isccp4banner(self, text):\n # Try standard CCP4 banner\n result = self.isccp4banner_standard(text)\n if not result:\n # Try Phaser-style CCP4 banner\n result = self.isccp4banner_phaser(text)\n if not result:\n # Try old-style CCP4 banner\n result = self.isccp4banner_old(text)\n return result", "def return_large(web_detection_result):\n if web_detection_result.full_matching_images:\n return web_detection_result.full_matching_images[0].url\n elif web_detection_result.partial_matching_images:\n return web_detection_result.partial_matching_images[0].url\n else:\n return None", "def get_image(self, size):\n smallicon = self._item.get(\"icon_url\")\n\n if not smallicon:\n return \"\"\n\n fullurl = self._cdn_url + smallicon\n dims = size\n\n if size == self.ITEM_IMAGE_SMALL: dims = \"96fx96f\"\n elif size == self.ITEM_IMAGE_LARGE: dims = \"512fx512f\"\n\n return fullurl + '/' + dims", "async def cat(self):\r\n async with aiohttp.request('get', 'http://thecatapi.com/api/images/get?format=src') as resp:\r\n await self.bot.say(resp.url)", "def read_text(text=\"刘菁我爱你\"):\n\tpygame.init()\n\tfont = pygame.font.Font('/System/Library/Fonts/Supplemental/Songti.ttc', 26)\n\trtext = font.render(text, True, (0, 0, 0), (255, 255, 255))\n\n\tif os.path.exists(CHAR_IMG):\n\t\tos.remove(CHAR_IMG)\n\tpygame.image.save(rtext, CHAR_IMG)\n\t\n\timg = cv2.imread(CHAR_IMG)\n\timg = cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)/255\n\n\treturn img", "def get_image_link():\n image_links = set()\n supplemented_keyword = urllib.parse.quote(\n supplemented_keywords[random.randint(0,\n len(supplemented_keywords) - 1)],\n safe='')\n main_keyword = urllib.parse.quote(\n main_keywords[random.randint(0,\n len(main_keywords) - 1)], safe='')\n\n # print('the theme of cats: ' + supplemented_keyword)\n\n search_query = (main_keyword + ' ' + supplemented_keyword).replace(\n ' ', '%20')\n url = 'https://www.google.com/search?q=' + \\\n search_query + '&source=lnms&tbm=isch'\n image_links = image_links.union(parse_page(url))\n\n image_link = list(image_links)[random.randint(0, len(image_links) - 1)]\n # print('link:' + image_link)\n\n while 'https://' not in image_link or r'\\\\u' in image_link or '.jpg' not in image_link:\n image_link = list(image_links)[random.randint(0, len(image_links) - 1)]\n # print('link:' + image_link)\n\n return image_link", "def extractParticular(link):\n webpage = openWebsite(link).read()\n nameIndexStart = webpage.index('<title>') + 7\n nameIndexStop = webpage[nameIndexStart:].index('</title>') + nameIndexStart - 1\n name = webpage[nameIndexStart : nameIndexStop].split('-')[0]\n name = \" \".join(name.split())\n name = re.sub('/', '', name)\n\n avatarName = RESTAURANTPATH + '{}.png'.format(\"\".join(name.split()).lower())\n captureImage(link, avatarName)\n\n return name, avatarName", "def image_preview(self):\r\n h = '<img src=\"%s\" alt=\"Campaign badge\"/>' % self.image.url\r\n return mark_safe(h)", "def get_blurb(album_url: str) -> str:\n response = requests.get(album_url)\n soup = BeautifulSoup(response.text, \"html.parser\")\n meta = soup.find(\"meta\", content=True)\n blurb = html.unescape(meta.get(\"content\"))\n return blurb", "def get_label(client, label):\n image_name = get_image_name()\n image = client.images.get(image_name)\n try:\n return image.labels[label]\n except KeyError:\n raise Exception(f\"Image should have a label '{label}'\")", "def get_random_ad(self, ad_zone, ad_category=None):\r\n qs = self.get_query_set().filter(start_showing__lte=now(),\r\n stop_showing__gte=now(),\r\n zone__slug=ad_zone,\r\n sites=Site.objects.get_current().pk\r\n ).select_related('textad',\r\n 'bannerad')\r\n if ad_category:\r\n qs = qs.filter(category__slug=ad_category)\r\n try:\r\n ad = qs.order_by('?')[0]\r\n except IndexError:\r\n return None\r\n return ad", "def print_banner(description):\n banner = len(description)\n if banner > 200:\n banner = 200\n\n # First banner\n print(\"\\n\")\n for _ in range(banner):\n print(\"*\", end=\"\")\n\n # Add description\n print(\"\\n%s\" % description)\n\n # Final banner\n for _ in range(banner):\n print(\"*\", end=\"\")\n print(\"\\n\")", "def get_random_article(sourceURL):\n sourceURL = sourceURL.encode().decode()\n soup = bs.BeautifulSoup(urllib.request.urlopen(sourceURL),'lxml')\n return (('https://en.wikipedia.org/wiki/' + (str(soup.title.string))[0:-12]).replace(' ', '_'))", "def quality(value: str) -> str:\n if \"HDTV\" in value:\n return \"HDTV\"\n else:\n return \"SD\"", "def get_image():\n\n url = 'http://skyview.gsfc.nasa.gov/cgi-bin/images'\n params = dict(Position='%s,%s' % (source['ra'], source['dec']),\n Survey=source['survey'].val,\n Return='GIF')\n response = requests.get(url, params=params, stream=True)\n with open(files['image.gif'].rel, 'wb') as out_file:\n shutil.copyfileobj(response.raw, out_file)", "def get_image(article):\n image_url = None\n media = article.get('media', None)\n if media is not None:\n for m in media:\n media_type = m['media_type'].get('name', None) \n if media_type == 'image':\n image_url = m['url']\n break\n \n return image_url" ]
[ "0.63562655", "0.5907053", "0.58398026", "0.56723255", "0.55253977", "0.55232245", "0.55212253", "0.5516398", "0.532808", "0.5248593", "0.52476937", "0.52444804", "0.5240686", "0.51916087", "0.51215434", "0.5034326", "0.5020774", "0.5014858", "0.49858487", "0.4975781", "0.4969387", "0.49642497", "0.4920303", "0.48985156", "0.48932984", "0.48927313", "0.48841828", "0.4860148", "0.4858901", "0.48506686", "0.48444024", "0.47976086", "0.47895578", "0.47669291", "0.4762384", "0.47617218", "0.47575945", "0.47552404", "0.47548664", "0.47526976", "0.47437778", "0.47278827", "0.47246608", "0.47196552", "0.47007462", "0.4678757", "0.46607965", "0.4656632", "0.46484753", "0.46387354", "0.4628671", "0.46214494", "0.46157283", "0.46132424", "0.46123368", "0.46112245", "0.46043566", "0.4594605", "0.4586861", "0.45844734", "0.45821637", "0.45806965", "0.45688552", "0.4552662", "0.45516112", "0.45490578", "0.45486382", "0.4545503", "0.45325172", "0.45286527", "0.45110768", "0.45054942", "0.44885054", "0.44884393", "0.44813308", "0.44640306", "0.44596902", "0.44596845", "0.4444592", "0.44440463", "0.44438717", "0.44371805", "0.44369182", "0.4435092", "0.44299436", "0.4427421", "0.44260168", "0.44233412", "0.4419447", "0.4417107", "0.44170454", "0.44131395", "0.44113317", "0.44105744", "0.44099113", "0.4408794", "0.44056284", "0.44046047", "0.44035193", "0.44016245" ]
0.6869307
0
Return a list of a brief subtitle overview persubtitle. e.g. English, Forced, SubRip (SRT) English, SubRip (SRT) English, SDH, SubRip (SRT) Spanish, Latin American (SDH), SubRip (SRT) The bit of text between the Language and the Subtitle format is the Track Title. It can be of any format, but it is recommended to be used as shown above. It will be returned as a list of strings with the ` ` already prepended to each entry.
def get_subtitle_print(subs: List[Track]) -> List[str]: data = [] if not subs: data.append("--") for sub in subs: line_items = [] # following sub.title tree checks and supports three different language and title scenarios # The second scenario is the recommended option to choose if you are open to choosing any # The third scenario should be used if you have nothing unique to state about the track # | Language | Track Title | Output | # | ------------ | ----------------------------- | --------------------------------------------- | # | es / Spanish | Spanish (Latin American, SDH) | - Spanish (Latin American, SDH), SubRip (SRT) | # | es / Spanish | Latin American (SDH) | - Spanish, Latin American (SDH), SubRip (SRT) | # | es / Spanish | None | - Spanish, SubRip (SRT) | language = pycountry.languages.get(alpha_2=sub.language).name if sub.title: if language.lower() in sub.title.lower(): line_items.append(sub.title) else: line_items.append(f"{language}, {sub.title}") else: line_items.append(language) line_items.append(sub.format.replace("UTF-8", "SubRip (SRT)")) line = "- " + ", ".join(line_items) data += [ (" " + x if i > 0 else x) for i, x in enumerate(textwrap.wrap(line, 64)) ] return data
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_title(self) -> list:\n scanning = False # start of a title is found, this may be the second of later part of that.\n ret = [] # to return\n temp = [] # deal with mutiple line titles.\n for page in self.pdf.pages:\n text = page.extract_text()\n # it's possible that a blank page exists which will let text be None.\n if text is None:\n continue\n lines = text.split('\\n')\n\n for line in lines:\n if self.__is_part_of_title(line):\n # middle part of a title\n if scanning:\n temp.append(line)\n # find a new title\n else:\n scanning = True\n temp = [line]\n else:\n # just find an entire title\n if scanning:\n scanning = False\n ret.append(\"\".join(temp))\n # remove wrong titles ( maybe trigger words occur at other part of the document )\n for title in ret:\n if self.title_keyword not in title:\n ret.remove(title)\n return ret", "def get_subtitles(self, title):\n return library.subtitles.get_subtitle_url(title)", "def get_title(self):\n return [i['title'] for i in self]", "def getatitle(allcontent, corpus):\n for i in range(0, len(allcontent)):\n words = re.split(r'\\s+', allcontent[i])\n if words[0] == \"Title\":\n for j in range(2, len(words)):\n if len(processword(words[j])) > 0:\n corpus.append(processword(words[j]))", "def getTitle(movieInfo):\n if \"title\" in movieInfo:\n #We remove the punctuation\n title = \"\".join(c for c in movieInfo[\"title\"] if c not in punctuation)\n #We return the title as a list of words in the right format\n return [ _format(w) for w in title.split() ]\n else:\n raise AttributeError(\"%s instance has no attribute title\" % movieInfo)", "def mush_title(title):\n words = title.split(\" \")\n mushed_title = \"\"\n for word in words:\n mushed_title += word\n return [mushed_title]", "def book_title(title):\n # this will capitalize the first letter of every word\n title = title.title()\n pre_title = []\n pre_title = title.split(\" \")\n new_title = \"\"\n for word in pre_title:\n # If the word is the first word of the title it has to be capitalize\n if word != pre_title[0]:\n # If the word is in the small word list make it lower case\n if word.lower() in small_words:\n word = word.lower()\n new_title = new_title + word + ' '\n# Remove the lagging space \n return new_title.strip()", "def title_words(self):\n\n if self._title_words == []:\n for s in self.title():\n for w in s.split():\n self._title_words.append(w)\n\n return self._title_words", "def list_titles(genre):\n text = genre_html(genre)\n num_titles = text.count('title=')\n\n titles = []\n for i in range(num_titles):\n start = text.find('title=')\n end = text[start+7:].find('\">')\n title = text[start+7:start+end]\n titles.append(title)\n text = text[start+7:]\n\n return titles", "def subtitle(self, txt):\n num = len(txt)\n ticks = \"-\" * num\n print(txt)\n print(ticks)", "def getlistofpossibletitles(fileitem,shows):\n title = []\n title.append(fileitem)\n lookfor = fileitem.replace(\".\",\" \")\n title.append(lookfor)\n lookfor = fileitem.replace('-',\" \")\n title.append(lookfor)\n return title", "def extract_subtitle_track(path_to_mkv):\n handler = SubtitleHandler()\n with open(path_to_mkv, \"rb\") as fp:\n mkvparse.mkvparse(fp, handler)\n\n return handler.subs", "def getFullCourseTitle(self, brain):\n full_title = ''\n\n id = brain.getCourseId\n if id:\n full_title = '%s - ' %id\n full_title += brain.Title\n term = brain.getTerm\n if term:\n full_title += ', %s' %term\n\n return full_title", "def Titles(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('titles', default)\n return [HEP.TitleObject(i) for i in tmp]", "def job_subtitle(self, job):\n return str(job)[:max(8, self._project_min_len_unique_id())]", "def check_title(title_list):\n for w_index in range(len(title_list)):\n title_list[w_index] = title_list[w_index].replace('_', ' ')\n return [word for word in title_list if word.istitle()]", "def TitleInfo(currentFile):\n Title=[]\n with open(currentFile) as fileIn:\n print(':\\033[1;31mI\\033[1;m')\n print('PDB File:\\033[1;31m %s\\033[1;m'%currentFile)\n line = fileIn.readline()\n while line:\n if line.startswith('TITLE'):\n Title.append(line)\n\n line = fileIn.readline()\n if len(Title) == 1:\n Str = \"\".join(Title)\n x = Str.replace('TITLE', '')\n Str1 = x.lstrip()\n print('Title: %s'%Str1)\n if len(Title) > 1:\n #Str = \"\".join(l)\n t =(Title[0])\n z = (Title[1])\n t1 = t.replace('TITLE', '')\n z1 = z.replace('TITLE', '')\n z2 = z1.replace('2', '')\n t2 = t1.strip()\n z3 = z2.strip()\n print('Title:%s'%t2+z3)\n #return Title", "def all_title() -> list:\n return [i[\"title\"] for i in Blogs_Manager.TablePost.all_query()]", "def title_string(self):\n return ' '.join(self.title).replace(' - ', '')", "def get_subtitles(self, index: int):\n\n match = self.re_subs[index - 1]\n start = convert_subs_time(match[1])\n end = convert_subs_time(match[2])\n subtitles = match[3]\n subtitles = clean_text(subtitles)\n\n return (subtitles, start, end)", "def title(self):\n return asarray(title(self))", "def title(self):\n return ' '.join(self._title)", "def get_sub_title(self, article: BeautifulSoup):\n return self.get_text(article, self.parsing_template.sub_title)", "def book_title(book_text):\n search = re.search(\"Title:(.*)\", book_text)\n title = search.group(1).replace(\"\\r\", \" \").strip()\n return title", "def fix_subtitles(title):\n\n new_title = LINE_BREAK_PATTERN.sub(': ', title, 1)\n return LINE_BREAK_PATTERN.sub('; ', new_title)", "def all_titles(our_data):\n return [album['album'] for album in our_data]", "def print_title(title, subtitle=None):\n print(title)\n if subtitle:\n print(subtitle)\n bar_len = max(len(title), len(subtitle))\n else:\n bar_len = len(title)\n bar = '=' * bar_len\n print(bar)", "def show_list(site, titles): # type: (str, List[str]) -> None\n print(u\"The latest tutorials from {}\".format(site))\n for article_id, title in enumerate(titles):\n print(u\"{:>3} {}\".format(article_id, title))", "def get_book_titles(self, lib_db):\n titles = []\n conn = sqlite3.connect(lib_db)\n c = conn.cursor()\n for row in c.execute(\"SELECT ZTITLE FROM ZBKLIBRARYASSET WHERE ZTITLE <> '' AND ZTITLE <> 'none'\"):\n titles.append(row[0])\n conn.close()\n return titles", "def getSubtitles(self):\n\n self.createSoupObject()\n self.getcustomerID()\n self.getToken()\n self.getTitle()\n\n if self.debug:\n print(self.title)\n\n self.getVideoType()\n if self.debug:\n print(self.videoType)\n\n if self.videoType == \"movie\":\n\n self.getAsinID1() # Method-1\n if self.debug:\n print(self.parametersDict['asin'])\n\n returnValue = self.standardFunctionCalls()\n if returnValue != 1:\n self.videoType = \"tv\"\n\n if self.videoType != \"movie\":\n\n self.getAsinID2()\n if self.debug:\n print(self.asinList)\n\n self.parametersDict['asin'] = self.asinList\n currentTitle = self.title\n\n try:\n returnValue = self.standardFunctionCalls()\n except:\n pass\n self.title = currentTitle\n\n return returnValue", "def getSubtitleTable(date) -> str:\n return \"\"\"| Start of the day | Weeks until NIMCET |\n| ---------------- | -----------------: |\n| {time} | {weeks} weeks |\"\"\".format(time=formattedTimeNow(), weeks=round((datetime(2021, 5, 21) - date).days/7, 1))", "def getlistofpossibletitles(fileitem,fname):\n title = []\n oddtitles = open(\"oddtitles.txt\", 'r')\n content = oddtitles.read()\n oddtitles.close()\n\n content = content.split(\"\\n\")\n for line in content:\n elements = line.split(',')\n if fileitem in elements[0]:\n #print(elements[1])\n title.append(elements[1].title())\n\n \n title.append(fileitem)\n title.append(fileitem.title())\n lookfor = fileitem.replace(\".\",\" \")\n title.append(lookfor)\n title.append(lookfor.title())\n lookfor = fileitem.replace('-',\" \")\n title.append(lookfor)\n title.append(lookfor.title())\n with open(fname, \"r\") as dataf:\n for line in dataf:\n if lookfor.upper() in line.upper():\n line = line.replace(\"\\n\",\"\")\n title.append(line)\n title.append(line.title())\n return title", "def getFilmTitles(checkFolder):\n\n files = [str(x) for x in Path(checkFolder).rglob(\"*\")]\n libFilmFiles = list(map(os.path.basename,files)) # Remove the path\n libFilmTitles = [os.path.splitext(x)[0] for x in libFilmFiles]\n return libFilmTitles", "def get_short_name(self):\n split = self.name.split(' - ')\n # author, year, and first couple of words of paper title\n return \"{} ({}), {}\".format(split[0], split[1], \" \".join(split[2].split(' ')[:3]))", "def get_title():", "def make_title(words):", "def getTitle(pan: str) -> str:\n src = open(pan).read()\n lines = src.split(\"\\n\")\n if len(lines)==0: return \"\"\n t = mark.render(lines[0].strip(\" #\"))\n if t.startswith(\"<p>\"): t = t[3:]\n if t.endswith(\"</p>\"): t = t[:-4]\n return t", "def nametitles(cls) -> t.List[NameTitle]:\n return [label for label in cls.values() if isinstance(label, tuple)]", "def _subsection_titles(self, section_index):\r\n # Retrieve the subsection title for the section\r\n # Add one to the list index to get the CSS index, which starts at one\r\n subsection_css = 'nav>div.chapter:nth-of-type({0})>ul>li>a>p:nth-of-type(1)'.format(section_index)\r\n\r\n # If the element is visible, we can get its text directly\r\n # Otherwise, we need to get the HTML\r\n # It *would* make sense to always get the HTML, but unfortunately\r\n # the open tab has some child <span> tags that we don't want.\r\n return self.q(\r\n css=subsection_css).map(\r\n lambda el: el.text.strip().split('\\n')[0] if el.is_displayed() else el.get_attribute('innerHTML').strip()\r\n ).results", "def get_captions(video_id):\n try:\n transcript = YouTubeTranscriptApi.get_transcript(video_id)\n \n #parse only string and remove the time of captions\n text = []\n for txt in transcript:\n text.append(txt['text'])\n return text\n except:\n text = \"No Captions\"\n return text", "def get_subtitle(annotation, sub_duration, video_clip, seen_annotations):\n if len(annotation[\"text\"]) == 0:\n return None\n\n annotation_txt = calculate_needed_subtitle_height(annotation, seen_annotations, video_clip)\n\n txt_clip = TextClip(annotation_txt, color=\"white\", fontsize=70, font='Sans Serif')\n txt_clip = txt_clip.set_position((\"center\", get_subtitle_offset(annotation, seen_annotations, video_clip)))\n txt_clip = txt_clip.set_start(float(annotation[\"time\"]) / 1000.0)\n txt_clip = txt_clip.set_duration(sub_duration)\n\n return txt_clip", "def titleize(title):\n titleized = []\n for idx, word in enumerate(title.split()):\n if idx == 0 or word not in ['a', 'of', 'in', 'the', 'v']:\n word = word.capitalize()\n\n titleized.append(word)\n\n return ' '.join(titleized)", "def _get_overview(playlist_change):\n msg = f'{_get_date_in_words(playlist_change[\"date\"])} ('\n in_parentheses = []\n if len(playlist_change['added_playlists']) != 0:\n in_parentheses.append(f'{len(playlist_change[\"added_playlists\"])} Playlists Added')\n if len(playlist_change['removed_playlists']) != 0:\n in_parentheses.append(f'{len(playlist_change[\"removed_playlists\"])} Playlists Removed')\n if len(playlist_change['modified_playlists']) != 0:\n num_added_songs = 0\n num_removed_songs = 0\n for modified_playlist in playlist_change['modified_playlists']:\n num_added_songs += len(modified_playlist['added_songs'])\n num_removed_songs += len(modified_playlist['removed_songs'])\n if num_added_songs > 0:\n in_parentheses.append(f'{num_added_songs} Songs Added')\n if num_removed_songs > 0:\n in_parentheses.append(f'{num_removed_songs} Songs Removed')\n overview = \", \".join(in_parentheses)\n return msg + overview + ')'", "def test_parse_fasta_title_10():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title after', 'single_ends', '')\n assert seq_name == 'title'\n assert seq_end == ''", "def getSubtitleURL(self):\n\n # If it is a movie, we use this methodology -\n try:\n IndexingParameters = [\"subtitleUrls\", 0, \"url\"]\n TitleParamters = [\n \"catalogMetadata\", \"catalog\", \"title\", \"episodeNumber\"]\n subRequestObject = requests.get(self.subtitleURLContainer)\n\n parsedJsonObject = json.loads(str(subRequestObject.text))\n SubsURL = parsedJsonObject[IndexingParameters[0]][\n IndexingParameters[1]][IndexingParameters[2]]\n if self.title == \"Amazonsubtitles\":\n try:\n self.title = parsedJsonObject[TitleParamters[0]][TitleParamters[1]][TitleParamters[2]] + \"_\" + str(\n parsedJsonObject[TitleParamters[0]][TitleParamters[1]][TitleParamters[3]])\n except:\n pass\n\n return SubsURL\n\n except:\n pass\n pass", "def subtitle_header(slide, title: str):\n top = Inches(0.61)\n left = Inches(0.42)\n height = Inches(0.5)\n width = Inches(0.5)\n txt_box = slide.shapes.add_textbox(left, top, width, height)\n text_frame = txt_box.text_frame\n\n paragraph = text_frame.paragraphs[0]\n paragraph.text = title\n paragraph.font.bold = False\n paragraph.font.size = Pt(22)\n paragraph.font.name = 'Times New Roman'\n\n return slide", "def getOverview(movieInfo):\n \n if \"overview\" in movieInfo:\n overview = \"\" if movieInfo[\"overview\"] is None else movieInfo[\"overview\"]\n return _format(\"\".join(c for c in overview if c not in punctuation))\n else: \n raise AttributeError(\"The parameter has no attribute 'overview'\")", "def pull_titles(cls, soup):\n titles = []\n parents = soup.findAll('div', 'jobTitle')\n for parent in parents:\n try:\n titles.append(parent.find('span').contents[0].strip())\n except AttributeError:\n titles.append(parent.find('span'))\n return titles", "def parse_title(title, various):\n if various and \" - \" in title:\n title = title.split(\" - \", 1)[1]\n return RE_FEAT.sub(\"\", title).rstrip()", "def getSortTitle(dictList):\n\ttitle = dictList['title'].lower().strip()\n\tfirstword = title.split(\" \",1)[0]\n\tif firstword in ['a', 'an', 'the']:\n\t\ttitle = title.split(firstword, 1)[-1]\n\treturn title.strip()", "def stat_subtitle(self, release_name, movie_dir):\n ret = {}\n for root_dir, child_folders, file_names in os.walk(movie_dir):\n for file_name in file_names:\n if not file_name.startswith(release_name):\n continue\n\n subtitle_info = get_subtitle_info(file_name)\n if not subtitle_info:\n continue\n\n if 'lang' not in subtitle_info:\n continue\n\n lang = subtitle_info['lang']\n if lang not in ret:\n ret[lang] = []\n\n ret[lang].append(file_name)\n\n # Sort result\n for lang in ret:\n ret[lang] = sorted(ret[lang])\n\n return ret", "def printable(title, subtitle=None, resp=None):\n title = getfirst(title)\n subtitle = getfirst(subtitle)\n resp = getfirst(resp)\n if subtitle:\n title += \" : \" + subtitle\n if resp:\n title += \" / \" + resp\n return title", "def _get_title_and_explanation(self):\n title = \"\"\n more_lines = []\n if self.__doc__:\n # Find the first non-empty line in the docstring. If there is\n for line in self.__doc__.split(\"\\n\")[:-1]: # strip off last line, always blank\n line = line.strip()\n if line:\n if not title:\n # We don't have the title set, yet, so we know this is the first line.\n if line.endswith(\".\"):\n # Don't want a period at the end of a title to make it look\n # better.\n title = line[:-1]\n else:\n title = line\n continue\n if not line and not more_lines:\n # We don't need empty lines at the start of the explanation\n continue\n # Add up the lines of the explanation text\n if line.startswith(\"*\"):\n line = f\"&nbsp; &nbsp; {line}\"\n\n more_lines.append(line or \"<br>&nbsp;<br>\") # Empty lines become line break\n return ((title or \"A resource\"), \" \".join(more_lines))", "def Show_Titles( self ):\r\n self.system.Change_Seq( \"Title\" )", "def format_title(self, title):\n new_title = ''.join(word.lower().strip('!\"#$%&\\'()*+,-./:;<=>?@[\\\\]^_`{|}~ ') for word in title)\n return new_title", "def TitleTranslations(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('title_translations', default)\n return [HEP.TitleTranslationObject(i) for i in tmp]", "def subtitle(string):\n print(\"{}\\n{}\\n\".format(bold(string), underline(string, \"-\")))", "def __str__(self):\n len_title=75\n if len(self.description)>len_title:\n titlestring=self.description[:len_title] + '...'\n else:\n titlestring=self.description\n return titlestring", "def get_title_artist(title_element): \n \n \n title_token = title_element.text.split(\" \")\n\n word = title_token.pop(0)\n artist = ''\n title = ''\n first = True\n while(title_token != [] and word != '-' and word[-1] != '-'):\n if first:\n first = False\n artist += (word)\n else:\n artist += ' '\n artist += word\n\n word = title_token.pop(0)\n \n if word[-1] == '-':\n word = word[:-1]\n artist += word\n \n if title_token == []:\n print(\"ERROR HERE: \", title_element.text)\n return None, None\n \n word = title_token.pop(0)\n first = True\n\n while(True):\n if first:\n first = False\n title += word\n else:\n title += ' '\n title += word\n if title_token != []:\n word = title_token.pop(0)\n if word == \"ALBUM\" or (word == \"EP\" and title_token[0] == \"REVIEW\"):\n break\n else:\n break\n return title, artist", "def get_abbreviated_description(self):\n word_array = str(self.description).split()[:25]\n abbreviated_description = \" \".join(word_array)\n return abbreviated_description", "def video_title(self):\n # type: () -> string_types\n return self._video_title", "def display_tournament(self, title: str, subtitle: str = \"\\n\", datas: list = None):\n self.clean()\n print(f\"{title}\")\n print(f\"{subtitle}\\n\")\n for data in datas:\n print(f\"\\t{data}\")\n print(\"\\n\" * 2)\n self.stand_by_msg(\"\")", "def title_comm(soup: str, nb:int):\n title = []\n for span in soup.findAll('article', attrs={'itemprop': 'review'}):\n dat = str(recovTextBetweenTags(str(span.findAll('time', attrs={\n 'itemprop': 'datePublished'})), ',')).replace(\"['[\", '').replace(\"]']\", '')\n dat = (format_date(dat))\n if (dat) > (datetime.now() - timedelta(nb)):\n top = span.findAll('h2', attrs={'class': 'text_header'})\n top = translate(recovTextBetweenTags(str(top), 'non'))\n title.append(top[0][1:len(top[0])])\n\n return title", "def label_rule_for_title(text: str):\n return re.findall(LABEL_SPECIFICATION[\"RE_TITLE\"], text)", "def list_sections(self, sections):\n # issue_data['sections'][0]['titles'][0][0=idioma, 1=titulo]\n # no entanto, deveria ser\n # issue_data['sections'][0]['titles'][0][idioma] = titulo\n titles = []\n for section in sections:\n for lang, sectitle in section['titles']:\n titles.append(sectitle)\n return ' | '.join(titles)", "def _make_title(self):\n ret = self.properties['reason'].capitalize()\n ret += ' has been reported near ' + self.properties['address'].split(',')[0]\n time = datetime.strptime(self.properties['when'], '%Y-%m-%dT%H:%M:%S')\n times = [time.strftime(i).lstrip('0') for i in ('%m', '%d', '%I:%M%p')]\n ret += ' on {}/{} at {}'.format(times[0], times[1], times[2])\n return ret", "def test_full_title():\n test_string = \"title \\u2013 subtitle \\u2014 Ansible Documentation\"\n assert (\"title\", \"subtitle\") == parse_title(test_string)", "def test_get_title(double_title, single_title, empty_title):\n assert get_title(double_title) == \"Parton distributions with LHC data\"\n assert get_title(single_title) == \"The Large Hadron Collider\"\n assert get_title(empty_title) == \"\"\n\n no_title_key = {\n \"not_titles\": []\n }\n assert get_title(no_title_key) == \"\"", "def title(text, level=0):\n return '\\n' + text + '\\n' + '=-~_#%^' [level] * len(text) + '\\n\\n'", "def get_texts(book: TextIO) -> list:\n content = book.read()\n chars_limit = 970\n texts = [content[i:i + chars_limit] for i in range(0, len(content), chars_limit)]\n return [\"...\" + t + \"...\" if t != texts[0] else t + \"...\" for t in texts]", "def parsed_title(self):\n return parse_pr_title(self.title)", "def short_title(self):\n if hasattr(self, \"title\"):\n return self.title\n else:\n return \"\"", "def _split_title(self, title):\n if not title:\n return '', ''\n if re.search(r'\\(.*\\)', title):\n return re.match(r'(\\w+)\\((.*)\\)', title).groups()\n else:\n return title, ''", "def get_subtitle(self):\n return 'Remove items'", "def subject_list():\n items = []\n\n soup = abcradionational.get_soup(URL + \"/podcasts/subjects\")\n \n subject_heading = abcradionational.get_podcast_heading(soup)\n \n for subject in subject_heading:\n items.append({\n 'label': subject['title'],\n 'path': plugin.url_for('subject_item', url=subject['url']),\n })\n\n return items", "def title(self):\n return self.data.find(\n 'span', class_='briefResultsTitle'\n ).find(\n 'a'\n ).get_text()", "def _get_full_title(self):\n return \"%s - %s %d\" % (self.title, _('Season'), self.season)", "def test_parse_fasta_title_09():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title 2 after', 'mixed_ends', '')\n assert seq_name == 'title'\n assert seq_end == '2'", "def title_all(e_title: str) -> str:\n title_customizations = [\n BibEntryCustomization.title_strip,\n BibEntryCustomization.title_capitalization,\n ]\n for f in title_customizations:\n e_title = f(e_title)\n return e_title", "def getaslist(self):\n l = []\n l.append(self.title.strip().encode('utf-8'))\n l.append(self.desc.strip().encode('utf-8'));\n l.append(self.course_number.strip().encode('utf-8'))\n l.append(self.duration.strip().encode('utf-8'))\n l.append(self.difficulty.strip().encode('utf-8'))\n l.append(self.instructors.strip().encode('utf-8'))\n l.append(self.url.strip().encode('utf-8'))\n return l", "def abbreviate_title(s):\n if u'Group ' in s:\n return s.replace(u'Group ', u'')\n else:\n parts = s.split(None, 1)\n if len(parts) < 2:\n return s\n genus, rest = s.split(None, 1)\n return u'%s. %s' % (genus[0], rest)", "def eng_with_sub(self, eng: list, subword: list) -> list:\n subwords = subword + eng[0]\n while [] in subwords:\n subwords.remove([])\n out = \" \".join('%s' % id for id in subwords).split()\n return out", "def test_parse_fasta_title_11():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title_1', 'single_ends', '')\n assert seq_name == 'title_1'\n assert seq_end == ''", "def get_title(self):\n title = (None, 7)\n for text, level in self._headers:\n if level < title[1]:\n title = (text, level)\n return title[0]", "def inclusive_title(self):\n return self.title + (\" %s\" % (self.episode_to_string(self.latest_season, self.latest_episode),) if self.is_series() else \"\")", "def test_parse_fasta_title_08():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title_1', 'mixed_ends', '')\n assert seq_name == 'title'\n assert seq_end == '1'", "def get_title(self):\n\n title = ''\n doc = self.article.doc\n\n title_element = self.parser.getElementsByTag(doc, tag='title')\n # no title found\n if title_element is None or len(title_element) == 0:\n return title\n\n # title elem found\n title_text = self.parser.getText(title_element[0])\n used_delimeter = False\n\n # split title with |\n if '|' in title_text:\n title_text = self.split_title(title_text, PIPE_SPLITTER)\n used_delimeter = True\n\n # split title with -\n if not used_delimeter and '-' in title_text:\n title_text = self.split_title(title_text, DASH_SPLITTER)\n used_delimeter = True\n\n # split title with »\n if not used_delimeter and u'»' in title_text:\n title_text = self.split_title(title_text, ARROWS_SPLITTER)\n used_delimeter = True\n\n # split title with :\n if not used_delimeter and ':' in title_text:\n title_text = self.split_title(title_text, COLON_SPLITTER)\n used_delimeter = True\n\n title = MOTLEY_REPLACEMENT.replaceAll(title_text)\n return title", "def SongTitle( path ):\n p = subprocess.Popen( ['ffprobe',path], stderr=subprocess.PIPE )\n\n output = p.communicate()[1].decode()\n if 'Invalid data found' in output:\n return None\n\n # find the first occurance of \"title : stuff\" with any number of spaces.\n res = re.search( r'title\\s+:\\s+([a-zA-Z0-9,\\(\\) ]+)', output )\n\n if res is None:\n return \"\"\n\n ret = res.group(1)\n\n return ret", "def _parse_title(self, links):\n for link in links:\n if \"hearing\" in link[\"title\"].lower():\n return link[\"title\"].replace(\"Notice\", \"\").strip()\n if \"special\" in link[\"title\"].lower():\n return \"Special Meeting\"\n return \"Illinois Medical District Commission\"", "def getTitle(test:str) -> str:\n return test[5:].strip()", "def _build_title(db, place):\n descr = place.get_title()\n location = get_main_location(db, place)\n parish = location.get(PlaceType.PARISH)\n city = location.get(PlaceType.CITY)\n state = location.get(PlaceType.STATE)\n title_descr = \"\"\n if descr:\n title_descr += descr.strip()\n if parish:\n title_descr += ', ' + parish.strip() + _(\" parish\")\n if city:\n title_descr += ', ' + city.strip()\n if state:\n title_descr += ', ' + state.strip() + _(\" state\")\n return _strip_leading_comma(title_descr)", "def summary_title(tile_summary):\n return f\"Slide tile_summary.slide_name Tile Summary:\"", "def test_parse_fasta_title_12():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title 2 words', 'single_ends', '')\n assert seq_name == 'title 2'\n assert seq_end == ''", "def get_subtitleinfo(fileFullName):\n sys.stdout.write(\"Requesting subtitle info...\\n\")\n\n #接口获取字幕信息\n response = requests.post(\n \"https://www.shooter.cn/api/subapi.php\",\n verify=False,\n params= {\n 'filehash': ComputeFileHash(fileFullName),\n 'pathinfo': os.path.realpath(fileFullName),\n 'format': 'json',\n 'lang': \"Chn\",\n },\n )\n #未找到字幕处理\n if response.text == u'\\xff':\n sys.stderr.write(\"Subtitle not found.\\n\")\n sys.exit(1)\n return response", "def get_text(downgrade_titles=False):", "def title(self) -> str:\n\t\t# pylint: disable=unsubscriptable-object\n\t\treturn self.value[1]", "def test_parse_fasta_title_07():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title', 'mixed_ends', '')\n assert seq_name == 'title'\n assert seq_end == ''", "def test_parse_fasta_title_02():\n seq_name, seq_end = blast.parse_fasta_title(\n 'title1 1 after', 'end_1', '1')\n assert seq_name == 'title1'\n assert seq_end == '1'", "def getTitle(self): #$NON-NLS-1$\r", "def getTitle(self): #$NON-NLS-1$\r" ]
[ "0.6559989", "0.626816", "0.61477166", "0.59452933", "0.5862041", "0.58259624", "0.57575667", "0.5716959", "0.57007366", "0.56698", "0.566014", "0.5631774", "0.56170785", "0.5601238", "0.55479985", "0.55277705", "0.55247784", "0.55187654", "0.5466009", "0.5463526", "0.54340214", "0.54149383", "0.5401396", "0.54010487", "0.5399305", "0.53931403", "0.53860164", "0.5380978", "0.53784657", "0.5361902", "0.53616214", "0.5335472", "0.5333444", "0.53330183", "0.53295493", "0.5310152", "0.52970403", "0.52920485", "0.5288033", "0.52866906", "0.52832615", "0.52609926", "0.52517503", "0.52405643", "0.5238874", "0.52333325", "0.5225522", "0.52181625", "0.52091116", "0.5203726", "0.52017725", "0.5189376", "0.5188638", "0.5183895", "0.5180022", "0.51701576", "0.5162829", "0.5159154", "0.5146546", "0.51392734", "0.5131794", "0.5122431", "0.51131546", "0.51096404", "0.5106426", "0.5101855", "0.5097658", "0.5095585", "0.5088192", "0.5085988", "0.5084205", "0.50813144", "0.508108", "0.5079897", "0.5070145", "0.50698495", "0.5064634", "0.5062672", "0.50584394", "0.5054721", "0.50541395", "0.5051562", "0.50489634", "0.5041638", "0.5038468", "0.50349426", "0.50318396", "0.50218326", "0.5016891", "0.5014797", "0.5000633", "0.49953791", "0.4994136", "0.49905726", "0.49847317", "0.4984656", "0.4976276", "0.4975353", "0.49725357", "0.49725357" ]
0.73892254
0
The mins method returns the lower bounds of the action spaces' parameters.
def mins(self) -> Tensor: return self._ranges[:, 0]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def mins(self):\n return self._mins", "def bounds(self) -> Tensor:\n return torch.cat([self.mins, self.mins + self.ranges], dim=-2)", "def mins(self):\n return self.intervals[:, 0]", "def argminX( self ):\n min = 1e30\n minX = None\n for i in range( 0, self.GetN() ):\n p = ( ROOT.Double(), ROOT.Double() )\n self.GetPoint( i, p[0], p[1] )\n if p[1] < min:\n min = p[1]\n minX = p[0]\n return minX", "def minmin_maxmax( *args ):\n rmin = min( [ mv.min() for mv in args ] )\n rmax = max( [ mv.max() for mv in args ] )\n rmv = cdms2.createVariable( [rmin,rmax] )\n return rmv", "def calculate_min_max_tiles(self):", "def action_space(self):\n lower_bounds = np.array([])\n upper_bounds = np.array([])\n for joint in self._used_joints:\n joint_idx = self._joint_limits.joint_names.index(joint)\n if self._control_mode == 'position':\n lower_bounds = np.concatenate(\n (lower_bounds,\n np.array(self._joint_limits.position_lower[\n joint_idx:joint_idx + 1])))\n upper_bounds = np.concatenate(\n (upper_bounds,\n np.array(self._joint_limits.position_upper[\n joint_idx:joint_idx + 1])))\n elif self._control_mode == 'velocity':\n velocity_limit = np.array(\n self._joint_limits.velocity[joint_idx:joint_idx + 1]) * 0.1\n lower_bounds = np.concatenate((lower_bounds, -velocity_limit))\n upper_bounds = np.concatenate((upper_bounds, velocity_limit))\n elif self._control_mode == 'effort':\n effort_limit = np.array(\n self._joint_limits.effort[joint_idx:joint_idx + 1])\n lower_bounds = np.concatenate((lower_bounds, -effort_limit))\n upper_bounds = np.concatenate((upper_bounds, effort_limit))\n else:\n raise ValueError(\n 'Control mode %s is not known!' % self._control_mode)\n return gym.spaces.Box(\n np.concatenate((lower_bounds, np.array([0]))),\n np.concatenate((upper_bounds, np.array([100]))),\n dtype=np.float32)", "def get_parameters_min(self):\n minValues = numpy.zeros(self.get_num_parameters())\n i = 0\n for p in self.parameters:\n minValues[i] = p.get_min_value()\n i += 1\n return minValues", "def _get_observation_lower_bound(self):\n lower_bound = -self._get_observation_upper_bound()\n lower_bound[-7] = 0.0\n lower_bound[-2:] = [self.min_speed, self.min_side_speed]\n return lower_bound", "def get_minmax(self, stmt, slist):\n minel = maxel = None\n for s in slist:\n if s.keyword == \"min-elements\":\n minel = s.arg\n elif s.keyword == \"max-elements\":\n maxel = s.arg\n if minel is None:\n minst = stmt.search_one(\"min_elements\")\n if minst:\n minel = minst.arg\n else:\n minel = \"0\"\n if maxel is None:\n maxst = stmt.search_one(\"max_elements\")\n if maxst:\n maxel = maxst.arg\n return (minel, maxel)", "def get_bounds(self):\n return ([self.t_min] * self.dim,[self.t_max] * self.dim)", "def min(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"min\")", "def min(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"min\")", "def potential_min(self):\n\n return self._args.min", "def min_values(self, lower, upper): \n if not self.lower_bounds is None:\n return self.lower_bounds\n\n minus = np.clip(self.coeffs,-math.inf,0)\n plus = np.clip(self.coeffs,0,math.inf)\n self.lower_bounds = plus.dot(lower) + minus.dot(upper) + self.const\n \n return self.lower_bounds", "def _min_in_bounds(self, min):\n if min <= self.valmin:\n if not self.closedmin:\n return self.val[0]\n min = self.valmin\n\n if min > self.val[1]:\n min = self.val[1]\n return self._stepped_value(min)", "def return_extents(self):\n\n return [qm.tree.mins, qm.tree.maxs]", "def min_range(self):\n return self._min_range", "def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:\n return [(-1.0, 1.0)] * len(list(self.params()))", "def cmin(self):\n return self[\"cmin\"]", "def input_bounds(self):\n return self._min_input, self._max_input", "def get_bounds(self):\n occupied_locations = self.board.keys()\n min_x = min(p[0] for p in occupied_locations)\n max_x = max(p[0] for p in occupied_locations)\n min_y = min(p[1] for p in occupied_locations)\n max_y = max(p[1] for p in occupied_locations)\n return ((min_x, max_x), (min_y, max_y))", "def getCurrentAnimRange():\n return int(oma.MAnimControl.minTime().value), int(oma.MAnimControl.maxTime().value)", "def min_mireds(self):\n return 175", "def get_minimum():\n return [\n convert_variables([0.78547, 0.78547, 0.78547]),\n ]", "def min_(*args, **kwargs):\n ...", "def bounds(self):\n return self.min_col, self.min_row, self.max_col, self.max_row", "def get_model_parameter_bounds():\n minf = float(\"-inf\")\n inf = float(\"inf\")\n params = dict(mu=(minf,inf), rho=(0.0 ,inf))\n return params", "def _get_minth(self):\n return self.__minth", "def _get_minth(self):\n return self.__minth", "def _get_minth(self):\n return self.__minth", "def _get_minth(self):\n return self.__minth", "def _get_minth(self):\n return self.__minth", "def _get_minth(self):\n return self.__minth", "def cmin(self):\n return self['cmin']", "def min_and_max(i, j, op, m, M):\n max_val = float('-inf')\n min_val = float(\"inf\")\n for k in range(i, j):\n a = evalt(M[i][k], M[k+1][j], op[k])\n b = evalt(M[i][k], m[k+1][j], op[k])\n c = evalt(m[i][k], M[k+1][j], op[k])\n d = evalt(m[i][k], m[k+1][j], op[k])\n max_val = max(max_val, a, b, c, d)\n min_val = min(min_val, a, b, c, d)\n return min_val, max_val", "def optimal_min(board):\n if terminal(board):\n return [None, utility(board)]\n\n available_actions = list(actions(board))\n\n # Naive baseline comparison is positive infinity\n global_optimum = [None, math.inf]\n\n for action in available_actions:\n # Anticipates optimal adversarial moves.\n local_optimum = optimal_max(result(board, action))\n\n if global_optimum[1] >= local_optimum[1]:\n global_optimum = [action, local_optimum[1]]\n\n return global_optimum", "def bounds(self): # -> tuple[()]:\n ...", "def get_minmax(self):\n x_minmax = [np.min(self.grid['x']), np.max(self.grid['x'].max())]\n z_minmax = [np.min(self.grid['z']), np.max(self.grid['z'].max())]\n return x_minmax, z_minmax", "def getMinMax(self,arr):\n # not implemented for Template SED yet\n return arr[\"z\"], arr[\"z\"]", "def min_max(my_list):\n print(\"Min = \",min(my_list,key = abs))\n print(\"Max = \",max(my_list,key = abs))", "def argmin(self, array):\n minvalue = inf\n minindex = None\n for i in range(len(array)):\n if array[i] < minvalue:\n minvalue = array[i]\n minindex = i\n return minvalue, minindex", "def minMaxLoc(img):\n\tmaskVar = mask(img)\n\tpt = cv2.minMaxLoc(grayscale(img), maskVar)\n\td = {\"minVal\": pt[0], \"maxVal\":pt[1], \"minLoc\":pt[2], \"maxLoc\":pt[3]}\n\treturn d", "def shutter_min_times(self):\n otime, ctime = ct.c_int(), ct.c_int()\n self.lib.GetShutterMinTimes(ct.pointer(ctime), ct.pointer(otime))\n return (otime.value, ctime.value)", "def minmax(self):\r\n vx = [v[0] for v in self.vl]\r\n vy = [v[1] for v in self.vl]\r\n self.xmax, self.xmin = max(vx), min(vx)\r\n self.ymax, self.ymin = max(vy), min(vy)", "def findMin(img):\n\td = minMaxLoc(img)\n\treturn {\"minVal\":d[\"minVal\"], \"minLoc\":d[\"minLoc\"]}", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n legalActions = gameState.getLegalPacmanActions()\n scoresForAction = {}\n maxAction = \"\"\n maxScore = -99999999999999\n for action in legalActions:\n scoresForAction[action] = self.minimax2(gameState.generatePacmanSuccessor(action), self.depth, False)\n for x,y in scoresForAction.iteritems():\n if(maxScore < y):\n maxScore = y\n maxAction = x\n return maxAction\n # return self.miniMax(gameState, self.depth, 0)", "def get_hyperparameter_bounds():\n minf = float(\"-inf\")\n inf = float(\"inf\")\n params = dict(mu=(minf,inf), nu=(0.0 ,inf), r=(0.0, inf), s=(0.0, inf))\n return params", "def _get_minimum(self):\n return self._minimum", "def min(self):\n return self._min_coords", "def get_min_max(self):\n\n mr = np.sqrt(2 * np.log(1/self.mth)) * self.ms\n mr[:] = np.max(mr)\n\n mxmin = self.mx - mr\n mxmax = self.mx + mr\n mymin = self.my - mr\n mymax = self.my + mr\n mzmin = self.mz - mr\n mzmax = self.mz + mr\n\n mb_xmin_idx = np.argmin(mxmin[self.ma > 0])\n mb_xmax_idx = np.argmax(mxmax[self.ma > 0])\n mb_ymin_idx = np.argmin(mymin[self.ma > 0])\n mb_ymax_idx = np.argmax(mymax[self.ma > 0])\n mb_zmin_idx = np.argmin(mzmin[self.ma > 0])\n mb_zmax_idx = np.argmax(mzmax[self.ma > 0])\n\n xmin0 = self.mx[mb_xmin_idx] - mr[mb_xmin_idx]\n xmax0 = self.mx[mb_xmax_idx] + mr[mb_xmax_idx]\n ymin0 = self.my[mb_ymin_idx] - mr[mb_ymin_idx]\n ymax0 = self.my[mb_ymax_idx] + mr[mb_ymax_idx]\n zmin0 = self.mz[mb_zmin_idx] - mr[mb_zmin_idx]\n zmax0 = self.mz[mb_zmax_idx] + mr[mb_zmax_idx]\n\n xmin = xmin0 - (xmax0 - xmin0) * 0.25\n xmax = xmax0 + (xmax0 - xmin0) * 0.25\n ymin = ymin0 - (ymax0 - ymin0) * 0.25\n ymax = ymax0 + (ymax0 - ymin0) * 0.25\n zmin = zmin0 - (zmax0 - zmin0) * 0.25\n zmax = zmax0 + (zmax0 - zmin0) * 0.25\n\n return xmin, xmax, ymin, ymax, zmin, zmax", "def _get_act_minmax(self, layer_name):\n if (\n layer_name in self.config[\"layers\"]\n and \"minmax\" in self.config[\"layers\"][layer_name]\n ):\n return self.config[\"layers\"][layer_name][\"minmax\"]\n else:\n # FIXME: uses -2,2 for unbounded, unknown range values\n layer = self[layer_name]\n if layer.__class__.__name__ == \"Flatten\":\n in_layer = self.incoming_layers(layer_name)[0]\n return self._get_act_minmax(in_layer.name)\n elif self._get_layer_type(layer_name) == \"input\":\n return (-2, +2)\n else: # try to get from activation function\n activation = self._get_activation_name(layer)\n if activation in [\"tanh\", \"softsign\"]:\n return (-1, +1)\n elif activation in [\"sigmoid\", \"softmax\", \"hard_sigmoid\"]:\n return (0, +1)\n elif activation in [\"relu\", \"elu\", \"softplus\"]:\n return (0, +2)\n elif activation in [\"selu\", \"linear\"]:\n return (-2, +2)\n else: # default, or unknown activation function\n return (-2, +2)", "def get_params_bounds(self) -> np.array:\n pass", "def minimum ( self ,\n xmin = None , xmax = None ,\n ymin = None , ymax = None , x0 = () ) :\n \n if xmin is None : xmin = self.xminmax()[0]\n if xmax is None : xmax = self.xminmax()[1]\n if self.xminmax() :\n xmin = max ( xmin , self.xminmax()[0] )\n xmax = min ( xmax , self.xminmax()[1] )\n\n if ymin is None : ymin = self.yminmax()[0]\n if ymax is None : ymax = self.yminmax()[1]\n if self.yminmax() :\n ymin = max ( ymin , self.yminmax()[0] )\n ymax = min ( ymax , self.yminmax()[1] )\n \n if not x0 : x0 = 0.5 * ( xmin + xmax ) , 0.5 * ( ymin + ymax )\n \n if not xmin <= x0[0] <= xmax :\n self.error(\"Wrong xmin/x0[0]/xmax: %s/%s/%s\" % ( xmin , x0[0] , xmax ) )\n\n if not ymin <= x0[1] <= ymax : \n self.error(\"Wrong ymin/x0[1]/ymax: %s/%s/%s\" % ( ymin , x0[1] , ymax ) )\n \n from ostap.math.minimize import sp_minimum_2D\n return sp_minimum_2D ( self ,\n xmin , xmax ,\n ymin , ymax , x0 )", "def getRange(self) -> Tuple[int, int]:\n return self.validator().bottom(), self.validator().top()", "def argmin(self, values):\n return self.aggregate(values, \"argmin\")", "def minimax(board):\n actionset = actions(board)\n retaction = []\n if player(board) == X:\n v = -10\n else:\n v = 10\n for action in actionset:\n if player(board) == X:\n retvalue = minvalue(result(board, action))\n if retvalue > v:\n v = retvalue\n retaction.clear()\n retaction.extend(list(action))\n else:\n retvalue = maxvalue(result(board, action))\n if retvalue < v:\n v = retvalue\n retaction.clear()\n retaction.extend(list(action))\n\n return tuple(retaction)", "def spec_min_max(self):\n spec_max = np.asarray([2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,2048,\n 2048,2048,2048,2048,2048])\n spec_min = np.asarray([ 500, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0])\n return np.vstack((spec_min, spec_max))", "def get_mines(self):\n\t\treturn ((x, y) for x in range(self.width)\n\t\t for y in range(self.height) if self.mines[x][y])", "def diff_mins(start_min, end_min):\n total_min = abs(int(end_min) - int(start_min))\n return total_min", "def optimization_bounds(self, topology):\n bounds_low = np.zeros(self.number_of_parameters())\n bounds_up = np.zeros(self.number_of_parameters())\n\n for pkey, parameter in self.parameters.items():\n bounds_low[pkey] = parameter.bound_low(topology)\n bounds_up[pkey] = parameter.bound_up(topology)\n\n return bounds_low, bounds_up", "def min(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"min\")", "def min(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"min\")", "def get_mins(self, with_abundance=False):\n mins = self.hashes\n if not with_abundance:\n return mins.keys()\n return mins", "def minMaxFonc(liste):\n\n return min(liste), max(liste)", "def get_lower_bound(self):\n return self._lower_bound", "def get_lower_bound(self):\n return self._lower_bound", "def printLimits():\n print(\"MinX:\",Drawable._minX)\n print(\"MaxX:\",Drawable._maxX)\n print(\"MinY:\",Drawable._minY)\n print(\"MaxY:\",Drawable._maxY)", "def atmin(a,lowerlimit=None,dimension=None,inclusive=1):\r\n if inclusive: lowerfcn = N.greater\r\n else: lowerfcn = N.greater_equal\r\n if dimension == None:\r\n a = N.ravel(a)\r\n dimension = 0\r\n if lowerlimit == None:\r\n lowerlimit = N.minimum.reduce(N.ravel(a))-11\r\n biggest = N.maximum.reduce(N.ravel(a))\r\n ta = N.where(lowerfcn(a,lowerlimit),a,biggest)\r\n return N.minimum.reduce(ta,dimension)", "def min_val(board):\n v = math.inf\n if terminal(board):\n return utility(board)\n for action in actions(board):\n v = min(v,max_val(result(board,action)))\n return v", "def min():\n return KeeperOfMinOrMax(int.__gt__)", "def get_lower_bound(self, method):\n sol = method.bound(self)\n print('The lower bound is {}'.format(sol['primal objective']))\n self.lower_bounds[method.__class__.__name__] = sol['primal objective']\n return sol", "def convertMinMaxIntoError(m, name, minName, maxName):\n minVal = m.__dict__.get(minName, None)\n maxVal = m.__dict__.get(maxName, None)\n if maxVal == None or minVal == None:\n return None\n value = m.__dict__[name]\n return [[value - minVal], [maxVal - value]]", "def xminmax ( self ) :\n return self.xvar.minmax()", "def input_bounds(self):\n return self.__input_bounds", "def get_min_max(self, groups, key):\n group = groups.get_group(key)\n min = group.loc[group[\"dif\"].idxmin()]\n max = group.loc[group[\"dif\"].idxmax()]\n minmax = {\"min\": min, \"max\": max}\n return minmax", "def get_max_and_min(self):\n max_x = float('-inf')\n min_x = float('inf')\n max_y = float('-inf')\n min_y = float('inf')\n max_z = float('-inf')\n min_z = float('inf')\n ans = max_x, max_y, max_z, min_x, min_y, min_z\n counter = 0\n for src, node in self._graph.get_all_v().items():\n if node.location is not None:\n x = node.location.x\n y = node.location.y\n z = node.location.z\n counter += 1\n max_x = x if x > max_x else max_x\n min_x = x if x < min_x else min_x\n max_y = y if y > max_y else max_y\n min_y = y if y < min_y else min_y\n max_z = z if z > max_z else max_z\n min_z = z if z < min_z else min_z\n if counter > 4:\n ans = max_x, max_y, max_z, min_x, min_y, min_z\n return ans", "def produce_min(self, meta, raven_vars, dispatch, t):\n if self._minimum:\n request, meta = self.get_minimum(meta, raven_vars, dispatch, t)\n else:\n request = {next(iter(self.get_inputs())): 0.0} # TODO is this a good choice when no min var avail?\n return request, meta", "def min_max(items):\n return min(items), max(items)", "def get_suffstat_bounds():\n minf = float(\"-inf\")\n inf = float(\"inf\")\n params = dict(sum_x=(minf,inf), sum_x_squared=(0.0 ,inf))\n return params", "def getLimit(self) :\n\t\treturn (self.modelSizeX, self.modelSizeY)", "def minim(self) -> int:\n\t\treturn 2", "def min(x):\n pass", "def min_nodes(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"min_nodes\")", "def minmax ( self , nshoots = 100000 ) :\n ## try to get minmax directly from pdf/function \n if self.tricks and hasattr ( self.pdf , 'function' ) :\n if hasattr ( self.pdf , 'setPars' ) : self.pdf.setPars() \n f = self.pdf.function()\n if hasattr ( f , 'minmax' ) :\n try :\n mn , mx = f.minmax()\n if 0<= mn and mn <= mx and 0 < mx : \n return mn , mx\n except :\n pass\n if hasattr ( f , 'max' ) :\n try :\n mx = f.max()\n if 0 < mx : return 0 , mx\n except :\n pass\n\n ## check RooAbsReal functionality\n code = self.pdf.getMaxVal( ROOT.RooArgSet ( self.xvar , self.yvar ) )\n if 0 < code :\n mx = self.pdf.maxVal ( code )\n if 0 < mx : return 0 , mx\n \n ## not try to use random\n \n mn , mx = -1 , -10\n if hasattr ( self.pdf , 'min' ) : mn = self.pdf.min()\n if hasattr ( self.pdf , 'max' ) : mx = self.pdf.max()\n if 0 <= mn and mn <= mx and 0 < mx : return mn , mx\n \n if not self.xminmax() : return ()\n if not self.yminmax() : return ()\n \n mn , mx = -1 , -10\n xmn , xmx = self.xminmax()\n ymn , ymx = self.yminmax()\n for i in range ( nshoots ) : \n xx = random.uniform ( xmn , xmx )\n yy = random.uniform ( ymn , ymx )\n with SETVAR ( self.xvar ) :\n with SETVAR ( self.yvar ) :\n self.xvar.setVal ( xx )\n self.yvar.setVal ( yy )\n vv = self.pdf.getVal()\n if mn < 0 or vv < mn : mn = vv\n if mx < 0 or vv > mx : mx = vv\n \n return mn , mx", "def fetchbounds(self):\n pnts = [x for x in [self.out_start, self.start, self.in_start, \\\n self.in_end, self.end, self.out_end] \\\n if x is not None]\n return min(pnts), max(pnts)", "def argminY( self ):\n min = 1e30\n for i in range( 0, self.GetN() ):\n p = ( ROOT.Double(), ROOT.Double() )\n self.GetPoint( i, p[0], p[1] )\n if p[1] < min: min = p[1]\n return min", "def local_max_to_min(node):\r\n if node.op == T.neg and node.inputs[0].owner:\r\n max = node.inputs[0]\r\n if (max.owner and\r\n isinstance(max.owner.op, CAReduce)\r\n and max.owner.op.scalar_op == scal.maximum):\r\n neg = max.owner.inputs[0]\r\n if neg.owner and neg.owner.op == T.neg:\r\n return [CAReduce(scal.minimum,\r\n max.owner.op.axis)(neg.owner.inputs[0])]\r\n\r\n return False", "def joint_limits_lower_constraint(q,ee_pos):\n return q - self.min_angles", "def bounds(self):\n return self.xmin, self.xmax, self.ymin, self.ymax", "def MINET(self):", "def min(self):\n\n return time_stat(self, stat=\"min\")", "def argmin_margin(v, **kw) -> np.ndarray:\n return argmax_margin(-v, **kw)", "def location_bounds(glimpse_w, input_w):\n offset = float(glimpse_w) / input_w\n lower = (-1 + offset)\n upper = (1 - offset)\n\n assert lower >= -1 and lower <= 1, 'lower must be in (-1,1), is {}'.format(lower)\n assert upper >= -1 and upper <= 1, 'upper must be in (-1,1), is {}'.format(upper)\n\n return lower, upper", "def bound(x, m, M=None):\n if M is None:\n M = m[1]\n m = m[0]\n # bound x between min (m) and Max (M)\n return min(max(x, m), M)", "def find_min(self):\n return self.min", "def find_min(self):\n return self.min", "def _rect_left(self):\n\treturn min(self.x, self.x + self.w)", "def get_bounds():\n return [0.00], [1.00]", "def argmin2(self, cvars=None, ctuple=None):\n if (cvars is None):\n return self.v.ind2sub(self.t.argmin())\n ax = tuple(map(lambda x:ctuple[cvars.index(x)] if x in cvars else slice(None) ,self.v))\n return self.v.ind2sub(self.t[ax].argmin())" ]
[ "0.65231216", "0.63490486", "0.6329805", "0.6140933", "0.604615", "0.5979384", "0.59507585", "0.5943575", "0.5881969", "0.5867249", "0.58545846", "0.5735611", "0.5735611", "0.5695982", "0.568224", "0.5630734", "0.56240106", "0.55890894", "0.556751", "0.55503213", "0.55368114", "0.55276316", "0.5522249", "0.5487285", "0.5485602", "0.5477757", "0.5475168", "0.5452155", "0.54489595", "0.54489595", "0.54489595", "0.54489595", "0.54489595", "0.54489595", "0.54443544", "0.54299563", "0.5386711", "0.53746086", "0.53665817", "0.5360309", "0.5359732", "0.5356498", "0.5351184", "0.53446096", "0.5340123", "0.53319573", "0.5328191", "0.53208953", "0.531257", "0.5305732", "0.5299688", "0.5292722", "0.5287374", "0.5284151", "0.528286", "0.5282329", "0.5281113", "0.5278141", "0.52677757", "0.5266936", "0.52582073", "0.5256796", "0.5256796", "0.52470654", "0.5246703", "0.52464706", "0.52464706", "0.52387524", "0.52341765", "0.52203864", "0.5217116", "0.5216171", "0.52106327", "0.5205567", "0.5202975", "0.5195531", "0.51938057", "0.51928264", "0.51922727", "0.5191803", "0.5191778", "0.5191036", "0.518991", "0.5189672", "0.5171479", "0.5166031", "0.5164817", "0.51642984", "0.5164048", "0.51635224", "0.5160706", "0.51601905", "0.51591706", "0.5157997", "0.51552373", "0.5149907", "0.5149907", "0.51496416", "0.51475465", "0.5146566" ]
0.6706375
0
The maxs method returns the upper bounds of the action spaces' parameters.
def maxs(self) -> Tensor: return self._ranges[:, 1]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def maxs(self):\n return self._maxs", "def get_parameters_max(self):\n maxValues = numpy.zeros(self.get_num_parameters())\n i = 0\n for p in self.parameters:\n maxValues[i] = p.get_max_value()\n i += 1\n return maxValues", "def get_bounds(self):\n x_max = self.data['x'].max()\n y_max = self.data['y'].max()\n z_max = self.data['z'].max()\n print(\"x={}; y={}; z={}\".format(x_max, y_max, z_max))\n return (x_max, y_max, z_max)", "def maxQ(self,state):\r\n \r\n maxQ = float('-inf')\r\n maxA = 0\r\n \r\n for a in self.actions:\r\n q = self.Q(state,a)\r\n #print(q,a)\r\n if q > maxQ:\r\n maxQ = q\r\n maxA = a\r\n return(maxQ,maxA)", "def maxs(self):\n return self.intervals[:, 1]", "def max_grains(self):\n index = self._ordered_input_names.index('max_grains')\n return self._inputs[index]", "def max_positions(self):\n return self.args.max_positions", "def maxQ(self,state):\r\n maxA = 0\r\n maxQ = float(\"-inf\")\r\n for aCurr in self.actions:\r\n qCurr = self.Q[(state,aCurr)]\r\n if qCurr > maxQ:\r\n maxA = aCurr\r\n maxQ = qCurr \r\n return(maxQ,maxA)", "def maxpoints(self):\n return self[\"maxpoints\"]", "def maxQ(self,feat):\r\n \r\n maxQ = float('-inf')\r\n maxA = 0\r\n for a in self.actions:\r\n q = self.Q(feat,a)\r\n print(q,a)\r\n if q > maxQ:\r\n maxQ = q\r\n maxA = a\r\n return(maxQ,maxA)", "def max_values(self, lower, upper):\n if not self.upper_bounds is None:\n return self.upper_bounds\n\n minus = np.clip(self.coeffs,-math.inf,0)\n plus = np.clip(self.coeffs,0,math.inf)\n self.upper_bounds = plus.dot(upper) + minus.dot(lower) + self.const\n \n return self.upper_bounds", "def max(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"max\")", "def max(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"max\")", "def return_parameter_bounds(maximum_luminosity=20):\n return [(maximum_luminosity, maximum_luminosity + 3),\n (3 * 10 ** -4, 8 * 10 ** -3), (2., 350), (-8., -0.2),\n (-400, 400)]", "def max_positions(self):\r\n return (self.args.max_source_positions, self.args.max_target_positions)", "def potential_max(self):\n\n return self._args.max", "def max_positions(self):\n return self.student.max_positions() # also needed in validation runs.", "def max_range(self):\n return self._max_range", "def input_bounds(self):\n return self._min_input, self._max_input", "def maxTargets(self):\n return self._getAttribute(Attribute.maxTargets)", "def get_max_q(self, actions, q2_state):\n\n action_values = [ qtron.forward_pass(q2_state) for qtron in actions.values() ]\n\n maxQ = max(action_values)\n\n return maxQ", "def _max_in_bounds(self, max):\n if max >= self.valmax:\n if not self.closedmax:\n return self.val[1]\n max = self.valmax\n\n if max <= self.val[0]:\n max = self.val[0]\n return self._stepped_value(max)", "def argmaxY( self ):\n max = -1e30\n for i in range( 0, self.GetN() ):\n p = ( ROOT.Double(), ROOT.Double() )\n self.GetPoint( i, p[0], p[1] )\n if p[1] > max: max = p[1]\n return max", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n maxScore = {}\n bestVal = -(float(\"inf\"))\n maxAction = \"\"\n for action in gameState.getLegalPacmanActions():\n maxScore[action] = self.expectiminimax(gameState.generatePacmanSuccessor(action), self.depth)\n for x,y in maxScore.iteritems():\n if(bestVal< y):\n bestVal = y\n maxAction = x\n return maxAction", "def max_positions(self):\n return (self.args.max_source_positions, self.args.max_target_positions)", "def argMax(self):\n if len(self.keys()) == 0: return None\n all = list(self.items())\n values = [x[1] for x in all]\n maxIndex = values.index(max(values))\n return all[maxIndex][0]", "def argMax(self):\n if len(list(self.keys())) == 0:\n return None\n all = list(self.items())\n values = [x[1] for x in all]\n maxIndex = values.index(max(values))\n return all[maxIndex][0]", "def bounds(self):\n return self.min_col, self.min_row, self.max_col, self.max_row", "def max_value(board): # the X player wants to maximize the score\n if terminal(board):\n return utility(board), None\n else:\n v = -math.inf\n move = None\n for action in actions(board):\n val, _ = min_value(result(board, action))\n # Check if returned Value is less than v if not return v and current action\n if val > v:\n # Assign v the maximum value for future evaluation\n v = max(v,val)\n # Keep track of action\n move = action\n # If best move then return it\n if v == 1:\n return v, move\n return v, move", "def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]:\n return [(-1.0, 1.0)] * len(list(self.params()))", "def get_actions(self) -> int:\r\n if self.discrete_actions:\r\n return self.actions.discrete_space[0]\r\n else:\r\n # continuous space array has (min, max) values for each variable\r\n return self.actions.continuous_space.size // 2", "def max_(*args, **kwargs):\n ...", "def get_bounds(self):\n return ([self.t_min] * self.dim,[self.t_max] * self.dim)", "def max_diffs(state):\n return best_action(state, pig_actions, Q_pig, win_diff)", "def zmax(self):\n # Extract parameters\n pzs = self.params[0]\n return max([pz.zmax for pz in pzs])", "def zmax(self):\n # Extract parameters\n pzs = self.params[0]\n return max([pz.zmax for pz in pzs])", "def get_max_param(self):\r\n\r\n sql_str = \"SELECT jsonb_array_length(parameters) as length FROM alarm_condition\"\r\n\r\n param = self.postgres.query_fetch_all(sql_str)\r\n\r\n if param:\r\n\r\n max_param = max([p['length'] for p in param])\r\n else:\r\n\r\n max_param = 0\r\n\r\n return max_param", "def get_params_bounds(self) -> np.array:\n pass", "def _get_colorbar_limits(self):\n if self.boundaries is not None:\n C = self.boundaries\n if self.extend in [\"min\", \"both\"]:\n C = C[1:]\n\n if self.extend in [\"max\", \"both\"]:\n C = C[:-1]\n return min(C), max(C)\n else:\n return self.get_clim()", "def maximums(self):\n # The maximums are at the peaks position but might be swallowed by \n # other peaks\n maximums = list()\n for func, pos, height, width in zip(self.peaks_function,\n self.peaks_position,\n self.peaks_height,\n self.peaks_width):\n val = func(pos, pos, height, width)\n if val >= self.__call__(pos, count=False)[0]:\n maximums.append((val, pos))\n return sorted(maximums, reverse=True)", "def max(self):\n assert self.__stack\n return self.__max_values[-1]", "def _get_maxth(self):\n return self.__maxth", "def _get_maxth(self):\n return self.__maxth", "def _get_maxth(self):\n return self.__maxth", "def _get_maxth(self):\n return self.__maxth", "def _get_maxth(self):\n return self.__maxth", "def _get_maxth(self):\n return self.__maxth", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n legalActions = gameState.getLegalPacmanActions()\n scoresForAction = {}\n maxAction = \"\"\n maxScore = -99999999999999\n for action in legalActions:\n scoresForAction[action] = self.minimax2(gameState.generatePacmanSuccessor(action), self.depth, False)\n for x,y in scoresForAction.iteritems():\n if(maxScore < y):\n maxScore = y\n maxAction = x\n return maxAction\n # return self.miniMax(gameState, self.depth, 0)", "def acq_max(f_acq, gp, y_max, bounds, space, num_warmup, num_starting_points):\n\n # Warm up with random points\n x_tries = [space.random_sample()\n for _ in range(int(num_warmup))]\n ys = f_acq(x_tries, gp=gp, y_max=y_max)\n x_max = x_tries[ys.argmax()]\n max_acq = ys.max()\n\n\n # Explore the parameter space more throughly\n x_seeds = [space.random_sample() for _ in range(int(num_starting_points))]\n\n bounds_minmax = np.array(\n [[bound['_value'][0], bound['_value'][-1]] for bound in bounds])\n\n for x_try in x_seeds:\n # Find the minimum of minus the acquisition function\n res = minimize(lambda x: -f_acq(x.reshape(1, -1), gp=gp, y_max=y_max),\n x_try.reshape(1, -1),\n bounds=bounds_minmax,\n method=\"L-BFGS-B\")\n\n # See if success\n if not res.success:\n continue\n\n # Store it if better than previous minimum(maximum).\n if max_acq is None or -res.fun[0] >= max_acq:\n x_max = _match_val_type(res.x, bounds)\n max_acq = -res.fun[0]\n\n # Clip output to make sure it lies within the bounds. Due to floating\n # point technicalities this is not always the case.\n return np.clip(x_max, bounds_minmax[:, 0], bounds_minmax[:, 1])", "def action_space(self):\n lower_bounds = np.array([])\n upper_bounds = np.array([])\n for joint in self._used_joints:\n joint_idx = self._joint_limits.joint_names.index(joint)\n if self._control_mode == 'position':\n lower_bounds = np.concatenate(\n (lower_bounds,\n np.array(self._joint_limits.position_lower[\n joint_idx:joint_idx + 1])))\n upper_bounds = np.concatenate(\n (upper_bounds,\n np.array(self._joint_limits.position_upper[\n joint_idx:joint_idx + 1])))\n elif self._control_mode == 'velocity':\n velocity_limit = np.array(\n self._joint_limits.velocity[joint_idx:joint_idx + 1]) * 0.1\n lower_bounds = np.concatenate((lower_bounds, -velocity_limit))\n upper_bounds = np.concatenate((upper_bounds, velocity_limit))\n elif self._control_mode == 'effort':\n effort_limit = np.array(\n self._joint_limits.effort[joint_idx:joint_idx + 1])\n lower_bounds = np.concatenate((lower_bounds, -effort_limit))\n upper_bounds = np.concatenate((upper_bounds, effort_limit))\n else:\n raise ValueError(\n 'Control mode %s is not known!' % self._control_mode)\n return gym.spaces.Box(\n np.concatenate((lower_bounds, np.array([0]))),\n np.concatenate((upper_bounds, np.array([100]))),\n dtype=np.float32)", "def _get_prod_bounds(self, comp):\n cap_res = comp.get_capacity_var() # name of resource that defines capacity\n maximum = comp.get_capacity(None, None, None, None)[0][cap_res]\n # TODO minimum!\n # producing or consuming the defining resource?\n if maximum > 0:\n return 0, maximum, pyo.NonNegativeReals\n else:\n return maximum, 0, pyo.NonPositiveReals", "def getOptimalSolution(self):\n max_index = np.argmax(self.Ws)\n self.Wmax = self.Ws[max_index]\n self.Emax = self.subsets[max_index]\n return (self.Wmax, self.Emax)", "def produce_max(self, meta, raven_vars, dispatch, t):\n request, meta = self.get_capacity(meta, raven_vars, dispatch, t)\n return request, meta", "def max_val(board):\n v = -math.inf\n if terminal(board):\n return utility(board)\n for action in actions(board):\n v = max(v,min_val(result(board,action)))\n return v", "def get_max_score(location_list, grid, shape):", "def hopMax(channels, offset=0) :\n\tabschannels = abs(channels)\n\treturn (\n\t\tabschannels.max(axis=0),\n\t\tabschannels.argmax(axis=0)+offset,\n\t\t)", "def find_max(self):\n\n max_x = -10\n max_y = -10\n k = len(self.__col_lista)\n for i in range(k):\n x, y = self.__col_lista[i]\n if x > max_x:\n max_x = x\n if y > max_y:\n max_y = y\n return max_x, max_y", "def optimization_bounds(self, topology):\n bounds_low = np.zeros(self.number_of_parameters())\n bounds_up = np.zeros(self.number_of_parameters())\n\n for pkey, parameter in self.parameters.items():\n bounds_low[pkey] = parameter.bound_low(topology)\n bounds_up[pkey] = parameter.bound_up(topology)\n\n return bounds_low, bounds_up", "def getMaximumDistances(self):\n pass", "def compute_bounds(self, space):\n bounds = np.zeros((len(space), 2))\n\n for idx, param in enumerate(space):\n\n if TYPE[param[\"type\"]] is TYPE.FLOAT or \\\n TYPE[param[\"type\"]] is TYPE.INTEGER:\n bounds[idx] = (param[\"min\"], param[\"max\"])\n\n elif TYPE[param[\"type\"]] is TYPE.DISCRETE or \\\n TYPE[param[\"type\"]] is TYPE.DISCRETE:\n bounds[idx] = (0, len(param['values']))\n\n return bounds", "def maximum ( self , xmin = None , xmax = None , x0 = None ) :\n if xmin is None : xmin = self.xminmax()[0]\n if xmax is None : xmax = self.xminmax()[1]\n if self.xminmax() :\n xmin = max ( xmin , self.xminmax()[0] )\n xmax = min ( xmax , self.xminmax()[1] )\n\n if ymin is None : ymin = self.yminmax()[0]\n if ymax is None : ymax = self.yminmax()[1]\n if self.yminmax() :\n ymin = max ( ymin , self.yminmax()[0] )\n ymax = min ( ymax , self.yminmax()[1] )\n \n if not x0 : x0 = 0.5 * ( xmin + xmax ) , 0.5 * ( ymin + ymax )\n\n if not xmin <= x0[0] <= xmax :\n self.error(\"Wrong xmin/x0[0]/xmax: %s/%s/%s\" % ( xmin , x0[0] , xmax ) )\n\n if not ymin <= x0[1] <= ymax : \n self.error(\"Wrong ymin/x0[1]/ymax: %s/%s/%s\" % ( ymin , x0[1] , ymax ) )\n\n from ostap.math.minimize import sp_maximum_2D\n return sp_maximum_2D ( self ,\n xmin , xmax ,\n ymin , ymax , x0 )", "def absmax(self):\n raise NotImplementedError", "def _maximize(self, board, possible_actions, depth_limit, alpha, beta):\r\n pass", "def argmax2(self, cvars=None, ctuple=None):\n if (cvars is None):\n return self.v.ind2sub(self.t.argmax())\n ax = tuple(map(lambda x:ctuple[cvars.index(x)] if x in cvars else slice(None) ,self.v))\n return self.v.ind2sub(self.t[ax].argmax())", "def f_max(cls):\n return cls.params[\"f_max\"]", "def get_max_gains(self):\n return tuple([lib.is_SetHWGainFactor(self.hcam,0x800c+i,100)/100 for i in range(4)])", "def get_gridpoint_max(self):\n ind_array = np.indices(self.results_array.shape)\n maxes = []\n\n def get_max(x, y, z):\n \"\"\"\n Would be funnier if I knew a Max.\n \"\"\"\n if isinstance(self.results_array[x][y][z], tuple):\n num_zeros = self.tup_max_length - len(self.results_array[x][y][z])\n if num_zeros != 0:\n print('Number of zeros: ', num_zeros)\n hist_arr = np.array(self.results_array[x][y][z])\n maxes.append(max(hist_arr))\n\n vget_max = np.vectorize(get_max, otypes=[list])\n vget_max(ind_array[0], ind_array[1], ind_array[2])\n return maxes", "def limits(self):\n return self._limits", "def max_diffs(state):\n # your code here\n return best_action(state, pig_actions, Q_pig, win_diff)", "def getValue(self, state):\n \"\"\"Description:\n first get legal actions of current state and find the max q-value among all legalaction. \n \"\"\"\n \"\"\" YOUR CODE HERE \"\"\"\n legalActions = self.getLegalActions(state)\n if len(legalActions) == 0:\n return 0.0\n maxValues = max([ self.getQValue(state, a) for a in legalActions])\n return maxValues\n \n \"\"\" END CODE \"\"\"", "def acq_max(ac, gp, y_max, bounds, random_state, n_warmup=100000, n_iter=250):\n\n # Warm up with random points\n x_tries = random_state.uniform(bounds[:, 0], bounds[:, 1],\n size=(n_warmup, bounds.shape[0]))\n ys = ac(x_tries, gp=gp, y_max=y_max)\n x_max = x_tries[ys.argmax()]\n max_acq = ys.max()\n\n # Explore the parameter space more throughly\n x_seeds = random_state.uniform(bounds[:, 0], bounds[:, 1],\n size=(n_iter, bounds.shape[0]))\n for x_try in x_seeds:\n # Find the minimum of minus the acquisition function\n res = minimize(lambda x: -ac(x.reshape(1, -1), gp=gp, y_max=y_max),\n x_try.reshape(1, -1),\n bounds=bounds,\n method=\"L-BFGS-B\")\n\n # See if success\n if not res.success:\n continue\n \n # Store it if better than previous minimum(maximum).\n if max_acq is None or -res.fun[0] >= max_acq:\n x_max = res.x\n max_acq = -res.fun[0]\n\n # Clip output to make sure it lies within the bounds. Due to floating\n # point technicalities this is not always the case.\n return np.clip(x_max, bounds[:, 0], bounds[:, 1])", "def getLimit(self) :\n\t\treturn (self.modelSizeX, self.modelSizeY)", "def max_positions(self):\n return (self.cfg.max_source_positions, self.cfg.max_target_positions)", "def getAction(self, gameState):\n \"*** YOUR CODE HERE ***\"\n legalActions = gameState.getLegalPacmanActions()\n scoresForAction = {}\n maxAction = \"\"\n maxScore = -99999999999999\n for action in legalActions:\n scoresForAction[action] = self.alphaBeta(gameState.generatePacmanSuccessor(action), self.depth, -(float(\"inf\")), float(\"inf\"), True)\n for x,y in scoresForAction.iteritems():\n if(maxScore < y):\n maxScore = y\n maxAction = x\n return maxAction", "def search_for_maximum(self):\n return self.maximise_aquisition(self.expected_improvement)", "def get_max(self):\n current = self\n while current.hasRight(): # This is the belief that the max has to be to the right. If you can't go right either in the begining or any more\n # if current has a right this line will be set and will keep going from line 129 to 130 until there are no more rights.\n current = current.right\n # this line returns as soon there is no more rights. breaking out of the loop.\n return current.value", "def worst_atom(self, g_u, g_v, active_set):\n\n max_w = None\n max_m_w = None\n max_n_w = None\n max_score = -float('inf')\n\n for w in active_set:\n m_w, n_w = self.polytope.vertex(w)\n score_w = np.sum(g_u * m_w) + np.sum(g_v * n_w)\n\n if score_w > max_score:\n max_w = w\n max_m_w = m_w\n max_n_w = n_w\n max_score = score_w\n\n return max_w, max_m_w, max_n_w", "def _get_observation_upper_bound(self):\n upper_bound = np.zeros(self._get_observation_dimension())\n num_motors = self.rex.num_motors\n upper_bound[0:num_motors] = math.pi # Joint angle.\n upper_bound[num_motors:2 * num_motors] = motor.MOTOR_SPEED_LIMIT # Joint velocity.\n upper_bound[2 * num_motors:3 * num_motors] = motor.OBSERVED_TORQUE_LIMIT # Joint torque.\n upper_bound[3 * num_motors:-7] = 1.0 # Quaternion of base orientation.\n upper_bound[-7] = 1.0 # ratio in [0,1]\n upper_bound[-6:-2] = [1.0, 1.0, 1.0, 1.0] # sin in [-1, 1]\n upper_bound[-2:] = [self.max_speed, self.max_side_speed]\n\n return upper_bound", "def max_positions(self):\n if self.embed_positions is None:\n return self.max_target_positions\n return min(self.max_target_positions, self.embed_positions.max_positions())", "def max_positions(self):\n if self.embed_positions is None:\n return self.max_target_positions\n return min(self.max_target_positions, self.embed_positions.max_positions())", "def max_positions(self):\n return self.encoder.max_positions()", "def max_positions(self):\n if self.embed_positions is None:\n return self.max_target_positions\n return min(self.max_target_positions, self.embed_positions.max_positions)", "def max_positions(self):\n if self.embed_positions is None:\n return self.max_target_positions\n return min(self.max_target_positions, self.embed_positions.max_positions)", "def max_positions(self):\n if self.embed_positions is None:\n return self.max_target_positions\n return min(self.max_target_positions, self.embed_positions.max_positions)", "def find_max_score_location(grid, shape):", "def _get_max_estimated_bandit(self)->Bandit:\n # print(\"mus - \", self.mu)\n # print(\"actions - \", np.argmax(self.mu))\n unique, counts = np.unique(self.mu, return_counts=True)\n lens = counts[np.argmax(unique)] \n if lens>1: # if two actions have same argmax\n # then return arbitrarily from those max ones\n maxs = list(np.array(self.bandits)[self.mu==unique[np.argmax(unique)]])\n return np.random.choice(maxs)\n # otherwise return the max one\n return self.bandits[np.argmax(self.mu)]", "def find_max_gap(self, free_space_ranges):\n # mask the bubble\n masked = np.ma.masked_where(free_space_ranges==0, free_space_ranges)\n # get a slice for each contigous sequence of non-bubble data\n slices = np.ma.notmasked_contiguous(masked)\n max_len = slices[0].stop - slices[0].start\n chosen_slice = slices[0]\n # I think we will only ever have a maximum of 2 slices but will handle an\n # indefinitely sized list for portablility\n for sl in slices[1:]:\n sl_len = sl.stop - sl.start\n if sl_len > max_len:\n max_len = sl_len\n chosen_slice = sl\n return chosen_slice.start, chosen_slice.stop", "def getMaximum(self, arguments, maximum):\n\n self.sequence.append({\"type\": \"findingMaximum\", \"coords\": arguments})\n\n self.sequence.append({\"type\": \"foundMaximum\", \"coord\": maximum})", "def max():\n return KeeperOfMinOrMax(int.__lt__)", "def get_max(self):\n return self.max[-1]", "def get_bounds(self):\n occupied_locations = self.board.keys()\n min_x = min(p[0] for p in occupied_locations)\n max_x = max(p[0] for p in occupied_locations)\n min_y = min(p[1] for p in occupied_locations)\n max_y = max(p[1] for p in occupied_locations)\n return ((min_x, max_x), (min_y, max_y))", "def port_range_max(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"port_range_max\")", "def max(self):\n\n return time_stat(self, stat=\"max\")", "def return_the_maximum(self):\n\n return self.__max_stack[-1]", "def argmax(self, values):\n return self.aggregate(values, \"argmax\")", "def getUpperBound(self) -> int:\n return self.upper_bound", "def find_max_bin(self):\n x = self.local['clip']\n midrange = x[int(len(x)*0.2):int(len(x)*.2 + int(len(x)*.5))]\n self.max_bin = max(midrange)", "def fuction_call(chest):\n\n for i in chest:\n max_i = maximum(chest,i)\n if max_i >= 2:\n print(\"The maximum size of a set Matyoshka Dolls with outermost doll\",i,\"is\",max_i)", "def return_loose_bounds(maxlum=None):\n return[(None,None), (10**-6, None), (2., 350),\n (None, -10**-6), (None, None)]", "def get_max_action(self, s):\r\n return 0.0" ]
[ "0.6918863", "0.66844064", "0.6673597", "0.6575035", "0.6552709", "0.6513679", "0.6467321", "0.6414459", "0.62708175", "0.62123346", "0.620865", "0.62085176", "0.62085176", "0.6191484", "0.6156986", "0.6142866", "0.6112747", "0.6100505", "0.6096278", "0.60705197", "0.60639876", "0.60580313", "0.60451984", "0.6045136", "0.60360354", "0.6034074", "0.6019618", "0.6010511", "0.601019", "0.5999817", "0.5999704", "0.5991145", "0.596524", "0.596099", "0.59354466", "0.59354466", "0.5933791", "0.59142137", "0.59068346", "0.58933717", "0.589185", "0.5887538", "0.5887538", "0.5887538", "0.5887538", "0.5887538", "0.5887538", "0.5886272", "0.5871158", "0.5850033", "0.584171", "0.5837671", "0.5833876", "0.58306015", "0.5815361", "0.58132344", "0.58069587", "0.5804866", "0.58010787", "0.58007234", "0.5796837", "0.57940954", "0.57845694", "0.5773016", "0.5768179", "0.5767688", "0.57614404", "0.5759451", "0.5751912", "0.57464564", "0.5744125", "0.5740404", "0.5739193", "0.5737878", "0.57362926", "0.57353264", "0.57343715", "0.5733121", "0.57293373", "0.57293373", "0.57273537", "0.5724116", "0.5724116", "0.5724116", "0.5723477", "0.5721822", "0.5720979", "0.5719468", "0.57187474", "0.57148355", "0.5701709", "0.5699702", "0.5697537", "0.56901103", "0.5681286", "0.5678094", "0.56746155", "0.56717056", "0.5669751", "0.56678563" ]
0.6770987
1
The _generate_iterator method creates an iterator which runs over all possible parameter combinations
def _generate_iterator(self) -> Iterable: params: List[Tensor] = [] for angle_range in self._ranges: lin_space: Tensor = linspace(angle_range[0], angle_range[1], steps=self._num_steps) params.append(lin_space) power: int dims: int for i in range(0, self._num_params): power = len(self._ranges) - 1 - i dims = i params[i] = params[i].repeat_interleave(self._num_steps ** power) params[i] = params[i].broadcast_to((self._num_steps ** dims, self._num_steps ** (power + 1))).flatten() return zip(*params)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __iter__(self):\n for p in self.param_grid:\n # Always sort the keys of a dictionary, for reproducibility\n items = sorted(p.items())\n if not items:\n yield {}\n else:\n keys, values = zip(*items)\n for v in product(*values):\n params = dict(zip(keys, v))\n yield params", "def __iter__(self):\n leaf_paths, leaf_vals = self._find_combinatorial_leaves()\n return self._combinations_generator(leaf_paths, leaf_vals)", "def __iter__(self):\n for p in self.param_grid:\n # Always sort the keys of a dictionary, for reproducibility\n modstr = '%s__' % self.modality\n items = sorted([(k.replace('clf__'+modstr, ''), v) for k, v in p.items() if modstr in k])\n if not items:\n yield {}\n else:\n keys, values = zip(*items)\n for v in product(*values):\n params = dict(zip(keys, v))\n yield params", "def __iter__(self):\n return self._product_generator()", "def __http_requests_generator(request_template, parameters):\n for payload in itertools.product(*parameters):\n yield request_template.format(*payload), payload", "def __iter__(self):\n yield from self.gen", "def generator(self) -> Iterator[Tuple[int, int, complex]]:\n for inda in range(self._core.lena()):\n alpha_str = self._core.string_alpha(inda)\n for indb in range(self._core.lenb()):\n beta_str = self._core.string_beta(indb)\n yield alpha_str, beta_str, self.coeff[inda, indb]", "def parameters(self):\n for parameters in self:\n for parameter in parameters:\n yield parameter", "def generate_assignment(parameters):\n if len(parameters) == 0:\n yield []\n raise StopIteration()\n cp_pars = copy.deepcopy(parameters)\n par, values = cp_pars.popitem()\n for val in values:\n for r in generate_assignment(cp_pars):\n yield r + [(par,val)]", "def map(_, params):\n import numpy as np\n from itertools import product\n from random import shuffle\n\n if 'param_set' in params:\n parameter_sets = params['param_set']\n else:\n alphas = params['alphas']\n Vs = params['Vs']\n gammas = params['gammas']\n parameter_sets = [item for item in product(alphas, gammas, Vs)]\n shuffle(parameter_sets)\n\n ## discretize the parameter configurations and equitably distribute\n ## them for the next map instance to deal with.\n chunk_length = len(parameter_sets)/params['nprocs']\n leftover = len(parameter_sets) % params['nprocs']\n for n in xrange(params['nprocs']):\n if n < leftover:\n left = n*(1+chunk_length)\n to_yield = parameter_sets[left:left+1+chunk_length]\n else:\n left = leftover*(1+chunk_length) + (n-leftover)*chunk_length\n to_yield = parameter_sets[left:left+chunk_length]\n #print n, to_yield, len(to_yield)\n yield (n, to_yield)", "def generator(func):\n\n @fn\n @wraps(func)\n def gen(*args, **kwargs):\n return Iter(func(*args, **kwargs))\n\n return gen", "def __iter__(self):\n from sage.combinat.posets.posets import FinitePosets_n\n n = 0\n while True:\n for P in FinitePosets_n(n):\n yield P\n n += 1", "def __iter__():", "def __iter__():", "def __iter__():", "def __iter__():", "def __iter__(self):\n return iproduct(*self.sets)", "def iterator(self):\n yield", "def __iter__(self):\n for o in self._iter:\n yield o", "def _get_param_iterator(self, modality):\n return ParameterGridSeparate(self.param_grid, modality)", "def iterparams(params: Dict[str, List[Any]]) -> Dict[str, Any]:\n for set in product(*params.values()):\n yield dotdict(zip(params.keys(), set))", "def _generate_combinations(self, param_idx, params):\n\n if param_idx == len(self.grid) - 1:\n # last parameter, just return list of values for this parameter\n return [[value] for value in self.grid[params[param_idx]]]\n else:\n subcombinations = self._generate_combinations(param_idx + 1, params) # returns list of param combinations\n result = []\n\n # iterate over all values of current parameter\n for value in self.grid[params[param_idx]]:\n for subcombination in subcombinations:\n result.append([value] + subcombination)\n\n return result", "def __iter__(self):\n for batch in self.iterator:\n yield Batch.from_iterator_batch(batch, self.pad_index, self.sos_index, self.eos_index)", "def __iter__(self):\n return self.new_generator()", "def iter_jobs(self):\n for param in self._parameters:\n yield param", "def iter_params(self):\n for var, val in self._params.iteritems():\n yield var, val", "def args_generator(args, num_exps, grid_search):\n od = OrderedDict(args)\n\n if grid_search:\n keys = [param for param in od]\n\n for i, vals in enumerate(itertools.product(*[value for param, value in od.items()])):\n yield zip(keys + ['-exp-id'], [str(val) for val in vals] + [str(i)])\n else:\n for _ in range(num_exps):\n args_setting = [(pname, str(next(pvalue))) for pname, pvalue in od.items()]\n\n yield args_setting", "def _build_iterable(self):", "def __iter__(self):\n from sage.misc.mrange import cartesian_product_iterator\n\n if self._cd._length == 1:\n if self._cd._degree == 1:\n yield self([[0]])\n return\n\n S = self._cd._sym\n profile = list(self._profile)[:-1]\n for p in cartesian_product_iterator([S.conjugacy_class(pi)\n for pi in profile]):\n if self._cd._connected and not perms_are_connected(p, self._cd._degree):\n continue\n c = self._cd(list(p) + [None], check=False)\n if c.profile() == self._profile:\n yield c", "def _iterator_codegen(resty):\n\n def codegen(context, builder, sig, args):\n [d] = args\n [td] = sig.args\n iterhelper = context.make_helper(builder, resty)\n iterhelper.parent = d\n iterhelper.state = iterhelper.state.type(None)\n return impl_ret_borrowed(\n context,\n builder,\n resty,\n iterhelper._getvalue(),\n )\n\n return codegen", "def generate_operations(self):\n combinations = self.COMBINATIONS.items()[:self.limit]\n for (term1, term2), type in combinations:\n yield (term1, term2, type)", "def __iter__(self):\n for x in self.innings:\n yield x", "def test_nested_gen(n):\n for a in range(n):\n yield (b for b in range(a))", "def iterate(self):\n yield self\n for x in self:\n for y in x.iterate():\n yield y", "def __iter__(self):\n return self.params.values().__iter__()", "def dsn_param_iter(self) -> Iterable[Tuple[str, Dict[str, Any]]]:\n for combo in self.swp_combo_iter():\n yield self.get_design_name(combo), self._get_params(combo)", "def _get_param_iterator(self):\n return model_selection.ParameterGrid(self.param_grid)", "def _get_param_iterator(self):\n return model_selection.ParameterGrid(self.param_grid)", "def __iter__(self):\n for i in range(self.n):\n yield self.get(i, i + 1)", "def __iter__(self):\n return iter((self.r, self.g, self.b))", "def __iter__(self):\n pt = (self.x, self.y)\n for i in pt:\n yield i", "def __iter__(self):\n for i in range(self.m):\n for j in range(self.n):\n yield self[i, j]", "def __iter__(self):\n for x in self.seq: yield x", "def get_params_iter(self):\n return []", "def __iter__(self):\n for b in self.x:\n yield b", "def iterate_layers(self, *args):\n for layer in range(self.num_layers):\n yield layer, (\n (\n direction,\n tuple(arg[self.num_directions * layer + direction] for arg in args),\n )\n for direction in range(self.num_directions)\n )", "def test_generator_method(self):\n for i in range(0, 4):\n yield self.try_odd, i", "def all(self):\n datapoint_params = self._make_datapooint_param_iter()\n if datapoint_params is None:\n return iter([])\n params_list = list(datapoint_params) # construct param list\n return self._gen(params_list)", "def __iter__(self):\n for i in range(len(self.ks)):\n yield self.get_neighs([i]), self.get_sp_rel_pos([i]),\\\n [self.ks[i]], self.iss", "def __iter__(self):\r\n return self._iterate()", "def _get_iterator(self, dataset_type, eval_mode, **kwargs):", "def get_params_iter(self):\n return itertools.chain(np.nditer(self.W, op_flags=['readwrite']),\n np.nditer(self.b, op_flags=['readwrite']))", "def params(self) -> Iterable[sympy.Symbol]:\n for i in range(self.iterations):\n for p in range(len(self.qubits)):\n if (self.include_all_z or not\n numpy.isclose(self.orbital_energies[p], 0)):\n yield LetterWithSubscripts('U', p, i)\n for p, q in itertools.combinations(range(len(self.qubits)), 2):\n if (self.include_all_cz or not\n numpy.isclose(self.hamiltonian.two_body[p, q], 0)):\n yield LetterWithSubscripts('V', p, q, i)", "def option_registrations_iter(self):\n\n def normalize_kwargs(orig_args, orig_kwargs):\n nkwargs = copy.copy(orig_kwargs)\n dest = self.parse_dest(*orig_args, **nkwargs)\n nkwargs[\"dest\"] = dest\n if not (\"default\" in nkwargs and isinstance(nkwargs[\"default\"], RankedValue)):\n type_arg = nkwargs.get(\"type\", str)\n member_type = nkwargs.get(\"member_type\", str)\n default_val = self.to_value_type(nkwargs.get(\"default\"), type_arg, member_type)\n if isinstance(default_val, (ListValueComponent, DictValueComponent)):\n default_val = default_val.val\n nkwargs[\"default\"] = RankedValue(Rank.HARDCODED, default_val)\n return nkwargs\n\n # Yield our directly-registered options.\n for args, kwargs in self._option_registrations:\n normalized_kwargs = normalize_kwargs(args, kwargs)\n yield args, normalized_kwargs", "def origami_H2_iterator(n, reducible=False, output=\"coordinates\"):\n from itertools import chain\n return chain(\n origami_H2_1cyl_iterator(n,reducible,output),\n origami_H2_2cyl_iterator(n,reducible,output))", "def __iter__(self):\n return iter(())", "def _for_each_generator(self,\n func: Callable[..., Any],\n *args: Iterable[Any]) -> List[Any]:\n return [func(gen, *args_for_func) for gen, args_for_func in zip(\n self._generators, zip(*args))]", "def __iter__(self):\n for key in itertools.chain(list(self._opts.keys()),\n list(self._groups.keys())):\n yield key", "def __iter__(self):\n yield self.x\n yield self.y\n # Or, you could also do:\n # return iter([self.x, self.y])", "def _get_param_iterator(self):\n return model_selection.ParameterSampler(self.param_distributions,\n self.n_iter, random_state=self.random_state)", "def vytvorit_generator():\n mylist = range(3)\n print 'mylist = ', mylist\n for element in mylist:\n yield element", "def __iter__(self):\n if self.use_dic:\n for data in sorted(self.dic):\n self.data = data\n for activity in sorted(self.dic[data]):\n self.activity = activity\n for imsize in sorted(self.dic[data][activity]):\n self.imsize = imsize\n self.allimgs, self.alllabels = [], []\n for img in sorted(self.dic[data][activity][imsize]):\n self.img = img\n self.labels = self.dic[data][activity][imsize][img]\n if self.imlist:\n self.allimgs.append(self.img)\n self.alllabels.append(self.labels)\n else:\n yield data, activity, imsize, img, self.labels\n self.i += 1\n if self.imlist:\n yield data, activity, imsize, self.allimgs, self.alllabels\n self.i += 1\n else:\n for data in sorted(self.dic):\n self.img = data\n self.labels = self.dic[data]\n yield self.img, self.labels\n self.i += 1", "def __iter__(self) -> Generator:\r\n yield from self.sequence", "def __iter__(self):\n\n # For each key in set of keys\n for key in self.keys_set:\n\n # Yield that key and associated value\n yield key, self.__getitem__(key)", "def iterate_inputs(function, type_to_vars):\n if isinstance(function.input_type, tuple):\n input_types = list(function.input_type)\n else:\n input_types = [function.input_type]\n\n argslists = []\n for input_type in input_types:\n argslists.append(type_to_vars[input_type])\n for args in itertools.product(*argslists):\n yield args", "def _get_iter(self, url, params):\n for current_page_index in itertools.count():\n result_dict = self._get_page(url, params, current_page_index)\n for document in result_dict['entries']:\n yield document\n if not result_dict['isNextPageAvailable']:\n break", "def mix_iterator(self):\n self.job = OrderedDict()\n for list_i in self.grid_iterator():\n # Pick the values to be used in this run\n for (k, i) in zip(self.table.keys(), list_i):\n self.job[k] = self.table[k][i]\n # Do the string replace operations on the values themselves\n self.expand_values()\n yield self.job", "def __iter__(self):\n for val in self.value:\n yield val", "def _get_param_iterator(self):\n return model_selection.ParameterSampler(\n self.param_distributions, self.n_iter, random_state=self.random_state\n )", "def __iter__(self):\n for index in range(len(self)):\n yield self[index]", "def __iter__(self):\n from itertools import product\n\n if self._length == 1:\n if self._degree == 1:\n yield self([[0]])\n return\n\n S = self._sym\n for p in product(S, repeat=self._length - 1):\n if self._connected and not perms_are_connected(p, self._degree):\n continue\n yield self(list(p) + [None], check=False)", "def __iter__(self):\n for sample in self.samples:\n yield sample", "def __iter__(self):\n yield self", "def main(start, end):\n for i in range(start, end+1):\n yield i, square(i), cube(i)", "def __iter__(self):\n for i in range(len(self)):\n yield self[i]", "def __iter__(self):\n for i in range(len(self)):\n yield self[i]", "def generate_objects_of_size(\n self, n: int, **parameters: int\n ) -> Iterator[CombinatorialObjectType]:", "def generator():\n mygenerator = (x for x in range(3))\n for element in mygenerator:\n print 'poprve = ', element\n\n for element in mygenerator:\n print 'podruhe = ', element", "def _get_index_iterator(indexes, length):\n return combinations(indexes, length)", "def __iter__(self):\r\n \r\n return iter(self._by_number)", "def __iter__(self):\n yield self._x\n yield self._y", "def __iter__(self):\n for run in self.runs:\n yield run", "def _combinations_generator(self, leaf_paths, leaf_vals):\n template_tree = CombinatorialTree(self._d)\n\n # All leaf values must be CombinatorialLeafs at this point\n assert all(isinstance(leaf_val, CombinatorialLeaf) for leaf_val in leaf_vals)\n\n # generating all combinations\n for combination in itertools.product(*leaf_vals):\n # update values of template tree\n for leaf_path, leaf_val in zip(leaf_paths, combination):\n template_tree[leaf_path] = leaf_val\n yield copy.deepcopy(template_tree._d)", "def __iter__(self):\n for x in self._order:\n yield x", "def generator(self):\n global_index = 0\n n_params = len(self.params)\n while (global_index < self.NXFLTEXP*self.paramspace):\n # skip row that have data already\n while (np.sum(self.spectra_hdu.data[global_index][1]) > 0.0): \n global_index += self.NXFLTEXP\n if (global_index >= self.NXFLTEXP*self.paramspace): break\n if (global_index >= self.NXFLTEXP*self.paramspace): break\n\n # get indexes in each grid; the last grid changing the fastest\n param_indexes = np.zeros(n_params, dtype=int)\n param_values = np.zeros(n_params)\n N0 = self.paramspace\n for i in range(n_params):\n (p_name, p_grid, p_log, p_frozen) = self.params[i]\n N = len(p_grid)\n N0 /= N\n p_index = int((global_index/3)//N0 % N)\n #print('global_index',global_index)\n #print('p_index',p_index)\n #print('p_grid[p_index]',p_grid[p_index])\n #print('p_grid',p_grid)\n param_indexes[i] = p_index\n param_values[i] = p_grid[p_index]\n\n # write parameter values (repeat the same parameters for each spectrum of the set) \n for i in range(self.NXFLTEXP):\n self.spectra_hdu.data[global_index+i][0] = param_values\n #end for\n\n # return total index, array of grid indexes, and array of grid values\n #sys.stderr.write(\"> generator: passing spectrum index %d (%s %s)\\n\" % (global_index, str(param_indexes), str(param_values)))\n yield (global_index, param_values, param_indexes, self.energies)\n global_index += self.NXFLTEXP\n #end while", "def __iter__(self):\n for id in self.order():\n inputs = [w for w in self.wires if w['target'][0] == id]\n yield id, inputs", "def _search_generator(self, search_params):\n atlas_endpoint = self.endpoint_url + \"/search/advanced\"\n offset = 0\n\n while True:\n postSearchResults = requests.post(\n atlas_endpoint,\n json=search_params,\n headers=self.authentication.get_authentication_headers()\n )\n results = self._handle_response(postSearchResults)\n return_values = results[\"value\"]\n return_count = len(return_values)\n\n if return_count == 0:\n raise StopIteration\n\n offset = offset + return_count\n search_params[\"offset\"] = offset\n yield return_values", "def _create_node_iterator(self) -> Iterator[GraphNode]:\n return\n yield", "def gen_resources(resource: Callable, **list_params) -> Generator[List, None, None]:\n print(\"Generating resources.\")\n if \"maxResults\" not in list_params.keys():\n list_params[\"maxResults\"] = DEFAULT_MAX_RESULTS\n\n next_page_token = None\n while True:\n if next_page_token:\n list_params[\"pageToken\"] = next_page_token\n\n request = resource().list(**list_params)\n # print(\"\\t\\tRequest made successfully.\")\n response = request.execute()\n # print(f\"\\t\\tRaw response: {response}\")\n\n data = response[\"items\"]\n print(f\"\\tRetrieved {len(data)}\")\n\n yield data\n\n if \"nextPageToken\" in response.keys():\n next_page_token = response[\"nextPageToken\"]\n else:\n print(\"\\tReached last page.\")\n break\n\n return None", "def __iter__(self):\n for point in self.points:\n yield point", "def iter(self):\n\t\tfor element in self.elements:\n\t\t\tyield element", "def iter(self):\n\t\tfor element in self.elements:\n\t\t\tyield element", "def __iter__(self):\n indices = []\n for i, size in enumerate(self.group_sizes):\n if size == 0:\n continue\n indice = np.where(self.flag == i)[0]\n if not len(indice) == size:\n raise ValueError('the length of the indice should be equal to the size')\n np.random.shuffle(indice)\n num_extra = int(np.ceil(size / self.samples_per_gpu)\n ) * self.samples_per_gpu - len(indice)\n indice = np.concatenate([indice, indice[:num_extra]])\n indices.append(indice)\n indices = np.concatenate(indices)\n indices = [\n indices[i * self.samples_per_gpu:(i + 1) * self.samples_per_gpu]\n for i in np.random.permutation(\n range(len(indices) // self.samples_per_gpu))\n ]\n indices = np.concatenate(indices)\n indices = torch.from_numpy(indices).long()\n if not len(indices) == self.num_samples:\n raise ValueError(\"the length of the indices should be equal to num_samples\")\n return iter(indices)", "def __iter__(self):\n for label, coord_seq in self.coords.items():\n for coordinate in coord_seq:\n yield (label, tuple(coordinate),)", "def gen_parameters(generator=2,key_size=2048,backend=backend):\n\treturn dh.generate_parameters(generator,key_size,backend)", "def __iter__(self):\n\n if self.output_mode:\n process_atom = self._process_atom_output\n self.output_names = self.names[:]\n else:\n process_atom = self._process_atom\n\n if self.output_mode is False:\n getLogger(\"problog_lfi\").debug(\"\\nProcessed Atoms:\")\n for clause in self.source:\n if isinstance(clause, Clause):\n if clause.head.functor == \"query\" and clause.head.arity == 1:\n continue\n extra_clauses = process_atom(clause.head, clause.body)\n for extra in extra_clauses:\n if self.output_mode is False:\n getLogger(\"problog_lfi\").debug(\"\\t\" + str(extra))\n yield extra\n elif isinstance(clause, AnnotatedDisjunction):\n extra_clauses = process_atom(Or.from_list(clause.heads), clause.body)\n for extra in extra_clauses:\n if self.output_mode is False:\n getLogger(\"problog_lfi\").debug(\"\\t\" + str(extra))\n yield extra\n else:\n if clause.functor == \"query\" and clause.arity == 1:\n continue\n # Fact\n extra_clauses = process_atom(clause, None)\n for extra in extra_clauses:\n if self.output_mode is False:\n getLogger(\"problog_lfi\").debug(\"\\t\" + str(extra))\n yield extra\n\n if self.leakprob is not None:\n leakprob_atoms = self._get_leakprobatoms()\n for example_atom in leakprob_atoms:\n yield example_atom.with_probability(Constant(self.leakprob))", "def _product_generator(self):\n categories = ColesCategoryIterator(self._base_url)\n self.total = len(categories)\n for category in categories:\n # print(\"Searching Category: {}\".format(category))\n # print self._get_url(category)\n self._params['beginIndex'] = \"0\"\n while self._has_next_page():\n self._get_data(category)\n self._update_search_info()\n for product in self._product_data:\n # record the product in the database\n yield product\n self.current += 1", "def __iter__(self):\n for runspec in self.runspecs:\n yield runspec", "def __iter__(self):\n yield from self.calls", "def generate(self, batch_size, s=\"train\"):\n while True:\n pairs, targets = self.get_batch(batch_size,s)\n yield (pairs, targets)" ]
[ "0.7333558", "0.6785712", "0.67566705", "0.67345154", "0.66711015", "0.66702646", "0.66659456", "0.64822334", "0.6465031", "0.64270854", "0.64160955", "0.6409563", "0.638886", "0.638886", "0.638886", "0.638886", "0.6387711", "0.6350889", "0.6341186", "0.6333276", "0.6317058", "0.63053447", "0.6263301", "0.62535733", "0.62523204", "0.6237615", "0.621541", "0.6198058", "0.6197711", "0.6158161", "0.61561996", "0.6155784", "0.6149895", "0.6148126", "0.61000663", "0.6074231", "0.6066024", "0.6066024", "0.6051245", "0.60423476", "0.6029669", "0.60287684", "0.602749", "0.6022905", "0.6012304", "0.600886", "0.5994109", "0.59858865", "0.5980283", "0.5979541", "0.5972105", "0.5960275", "0.5957515", "0.59452045", "0.59452", "0.5939458", "0.5931372", "0.5920569", "0.59129643", "0.59124064", "0.5912374", "0.5911495", "0.5903588", "0.5902496", "0.58941764", "0.58858305", "0.588063", "0.58796465", "0.58730227", "0.586746", "0.58542275", "0.58512074", "0.58499604", "0.58349997", "0.58295286", "0.58295286", "0.58292043", "0.58246464", "0.5824014", "0.58233505", "0.5810631", "0.58100736", "0.58048993", "0.5802351", "0.5798427", "0.579676", "0.57960135", "0.5790933", "0.5789518", "0.5785454", "0.57829505", "0.57829505", "0.5782386", "0.57730186", "0.5771929", "0.57568383", "0.57535017", "0.5751914", "0.57501835", "0.5748725" ]
0.73833257
0
computes and returns a complex rotation matrix given by the angles in params.
def operator(self, params: Tensor) -> Tensor: theta, phi = params # calculate entries a: Tensor = exp(1j * phi) * cos(theta / 2) b: Tensor = sin(theta / 2) c: Tensor = -b d: Tensor = exp(-1j * phi) * cos(theta / 2) # construct the rows of the rotation matrix r1: Tensor = cat((a.view(1), b.view(1))) r2: Tensor = cat((c.view(1), d.view(1))) # build and return the rotation matrix rot: Tensor = cat((r1, r2)).view(2, 2) return rot
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def rotation_matrix(theta=0, phi=0, psi=0, units='deg'):\n\n rpy = Rpy(theta,units)\n rmx = Rmx(phi, units)\n rpz = Rpz(psi, units)\n\n return np.matmul(rpy, np.matmul(rmx, rpz))", "def rotation_matrix3(angle_x=0, angle_y=0, angle_z=0):\n if angle_x != 0:\n c, s = cos(angle_x), sin(angle_x)\n r = np.array([[1, 0, 0], [0, c, -s], [0, s, c]])\n else:\n r = np.identity(3)\n\n if angle_y != 0:\n c, s = cos(angle_y), sin(angle_y)\n r = r.dot(np.array([[c, 0, s], [0, 1, 0], [-s, 0, c]]))\n\n if angle_z != 0:\n c, s = cos(angle_z), sin(angle_z)\n r = r.dot(np.array([[c, -s, 0], [s, c, 0], [0, 0, 1]]))\n\n return r", "def make_sample_rot_matrix(self, angles):\n (phi, chi) = angles[0:2]\n omega = np.deg2rad(self.omega)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def make_sample_rot_matrix(self, angles):\n (phi, chi, omega) = angles[0:3]\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def make_sample_rot_matrix(self, angles):\n (phi, omega) = angles[0:2]\n chi = np.deg2rad(self.chi)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def make_sample_rot_matrix(self, angles):\n (phi, omega) = angles[0:2]\n chi = np.deg2rad(self.chi)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def generate_rotation_matrix(x_angle, y_angle, z_angle):\n return np.array([\n [1, 0, 0],\n [0, np.cos(x_angle), -np.sin(x_angle)],\n [0, np.sin(x_angle), np.cos(x_angle)],\n ]).dot([\n [np.cos(y_angle), 0, np.sin(y_angle)],\n [0, 1, 0],\n [-np.sin(y_angle), 0, np.cos(y_angle)],\n ]).dot([\n [np.cos(z_angle), -np.sin(z_angle), 0],\n [np.sin(z_angle), np.cos(z_angle), 0],\n [0, 0, 1],\n ]).tolist()", "def rotation_matrices_from_angles(angles):\n\n angles = np.atleast_1d(angles)\n npts = len(angles)\n\n sina = np.sin(angles)\n cosa = np.cos(angles)\n\n R = np.zeros((npts, 2, 2))\n R[:, 0, 0] = cosa\n R[:, 1, 1] = cosa\n\n R[:, 0, 1] = -sina\n R[:, 1, 0] = sina\n\n return R", "def make_sample_rot_matrix(self, angles):\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def make_sample_rot_matrix(self, angles):\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def make_sample_rot_matrix(self, angles):\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n return numpy_utils.rotation_matrix(phi, chi, omega)", "def create_rotation_matrix_3d(angles) -> np.array:\n\n mat1 = np.array([[1., 0., 0.],\n [0., math.cos(angles[0]), math.sin(angles[0])],\n [0., -math.sin(angles[0]), math.cos(angles[0])]],\n dtype='float')\n\n mat2 = np.array([[math.cos(angles[1]), 0., -math.sin(angles[1])],\n [0., 1., 0.],\n [math.sin(angles[1]), 0., math.cos(angles[1])]],\n dtype='float')\n\n mat3 = np.array([[math.cos(angles[2]), math.sin(angles[2]), 0.],\n [-math.sin(angles[2]), math.cos(angles[2]), 0.],\n [0., 0., 1.]],\n dtype='float')\n\n mat = (mat1 @ mat2) @ mat3\n return mat", "def rotation_matrix(angle, axis):\n about_z = rotation_about_z(angle)\n z_to_axis = z_to_vector(axis)\n axis_to_z = np.linalg.inv(z_to_axis)\n return reduce(np.dot, [z_to_axis, about_z, axis_to_z])", "def rotation_matrix(xangle, yangle, zangle, order='zxy', degrees=False):\r\n if degrees:\r\n xangle = math.radians(xangle)\r\n yangle = math.radians(yangle)\r\n zangle = math.radians(zangle)\r\n\r\n # Here we assume we rotate z, then x then y.\r\n c1 = math.cos(xangle) # The x angle\r\n c2 = math.cos(yangle) # The y angle\r\n c3 = math.cos(zangle) # the z angle\r\n s1 = math.sin(xangle)\r\n s2 = math.sin(yangle)\r\n s3 = math.sin(zangle)\r\n\r\n # see http://en.wikipedia.org/wiki/Rotation_matrix for\r\n # additional info.\r\n\r\n if order=='zxy':\r\n rot_mat = np.array([[c2*c3-s1*s2*s3, c2*s3+s1*s2*c3, -s2*c1],[-c1*s3, c1*c3, s1],[s2*c3+c2*s1*s3, s2*s3-c2*s1*c3, c2*c1]])\r\n else:\r\n rot_mat = np.eye(3)\r\n for i in range(len(order)):\r\n if order[i]=='x':\r\n rot_mat = np.dot(np.array([[1, 0, 0], [0, c1, s1], [0, -s1, c1]]),rot_mat)\r\n elif order[i] == 'y':\r\n rot_mat = np.dot(np.array([[c2, 0, -s2], [0, 1, 0], [s2, 0, c2]]),rot_mat)\r\n elif order[i] == 'z':\r\n rot_mat = np.dot(np.array([[c3, s3, 0], [-s3, c3, 0], [0, 0, 1]]),rot_mat)\r\n\r\n return rot_mat", "def rotation_matrix( axis, angle ):\n\n # Trig factors.\n ca = cos(angle)\n sa = sin(angle)\n C = 1 - ca\n\n # Depack the axis.\n x, y, z = tuple( axis )\n\n # Multiplications (to remove duplicate calculations).\n xs = x*sa\n ys = y*sa\n zs = z*sa\n xC = x*C\n yC = y*C\n zC = z*C\n xyC = x*yC\n yzC = y*zC\n zxC = z*xC\n\n # Update the rotation matrix.\n matrix \t = np.zeros( (3,3) )\n matrix[0, 0] = x*xC + ca\n matrix[0, 1] = xyC - zs\n matrix[0, 2] = zxC + ys\n matrix[1, 0] = xyC + zs\n matrix[1, 1] = y*yC + ca\n matrix[1, 2] = yzC - xs\n matrix[2, 0] = zxC - ys\n matrix[2, 1] = yzC + xs\n matrix[2, 2] = z*zC + ca\n return matrix", "def getRotationMatrix(x, y, z, angle):\n # impossible to have a rotational matrix around (0, 0 ,0)\n if x == 0 and y == 0 and z == 0:\n raise Exception(\"Cannot have a rotation matrix around (0, 0, 0)\")\n\n # normalize vector\n vec = MatrixExtended([x, y, z])\n length = np.linalg.norm(vec)\n x /= length\n y /= length\n z /= length\n\n # some shortcuts for readability\n xx = x * x\n yy = y * y\n zz = z * z\n C = math.cos\n S = math.sin\n\n # calculate matrix elements\n e11 = xx + (1 - xx) * C(angle)\n e12 = x * y * (1 - C(angle)) - z * S(angle)\n e13 = x * z * (1 - C(angle)) + y * S(angle)\n e21 = x * y * (1 - C(angle)) + z * S(angle)\n e22 = yy + (1 - yy) * C(angle)\n e23 = y * z * (1 - C(angle)) - x * S(angle)\n e31 = x * z * (1 - C(angle)) - y * S(angle)\n e32 = y * z * (1 - C(angle)) + x * S(angle)\n e33 = zz + (1 - zz) * C(angle)\n\n return MatrixExtended([\n [e11, e12, e13, 0],\n [e21, e22, e23, 0],\n [e31, e32, e33, 0],\n [0, 0, 0, 1]])", "def RotationMatrix(theta, x, y, z, point=None):\n\treturn mach.rotation_matrix(theta, [x, y, z])", "def rotation_matrix(rotate):\n tx, ty, tz = rotate\n Rx = np.array([[1, 0, 0], [0, np.cos(tx), -np.sin(tx)], [0, np.sin(tx), np.cos(tx)]])\n Ry = np.array([[np.cos(ty), 0, -np.sin(ty)], [0, 1, 0], [np.sin(ty), 0, np.cos(ty)]])\n Rz = np.array([[np.cos(tz), -np.sin(tz), 0], [np.sin(tz), np.cos(tz), 0], [0, 0, 1]])\n return np.dot(Rx, np.dot(Ry, Rz))", "def axisAnglesToRotMat(xrot, yrot, zrot):\n\n xmat = np.eye(3)\n ymat = np.eye(3)\n zmat = np.eye(3)\n\n xmat[1, 1] = np.cos(xrot)\n xmat[1, 2] = -np.sin(xrot)\n xmat[2, 1] = np.sin(xrot)\n xmat[2, 2] = np.cos(xrot)\n\n ymat[0, 0] = np.cos(yrot)\n ymat[0, 2] = np.sin(yrot)\n ymat[2, 0] = -np.sin(yrot)\n ymat[2, 2] = np.cos(yrot)\n\n zmat[0, 0] = np.cos(zrot)\n zmat[0, 1] = -np.sin(zrot)\n zmat[1, 0] = np.sin(zrot)\n zmat[1, 1] = np.cos(zrot)\n\n return concat(zmat, ymat, xmat)", "def axisAnglesToRotMat(xrot, yrot, zrot):\n\n xmat = np.eye(3)\n ymat = np.eye(3)\n zmat = np.eye(3)\n\n xmat[1, 1] = np.cos(xrot)\n xmat[1, 2] = -np.sin(xrot)\n xmat[2, 1] = np.sin(xrot)\n xmat[2, 2] = np.cos(xrot)\n\n ymat[0, 0] = np.cos(yrot)\n ymat[0, 2] = np.sin(yrot)\n ymat[2, 0] = -np.sin(yrot)\n ymat[2, 2] = np.cos(yrot)\n\n zmat[0, 0] = np.cos(zrot)\n zmat[0, 1] = -np.sin(zrot)\n zmat[1, 0] = np.sin(zrot)\n zmat[1, 1] = np.cos(zrot)\n\n return concat(zmat, ymat, xmat)", "def rotation_mat(self) -> np.ndarray:\n rot = np.zeros((3, 3))\n\n txx = 2 * self.x * self.x\n tyy = 2 * self.y * self.y\n tzz = 2 * self.z * self.z\n twx = 2 * self.w * self.x\n twy = 2 * self.w * self.y\n twz = 2 * self.w * self.z\n txy = 2 * self.x * self.y\n txz = 2 * self.x * self.z\n tyz = 2 * self.y * self.z\n\n rot[0, 0] = 1. - tyy - tzz\n rot[0, 1] = txy - twz\n rot[0, 2] = txz + twy\n rot[1, 0] = txy + twz\n rot[1, 1] = 1. - txx - tzz\n rot[1, 2] = tyz - twx\n rot[2, 0] = txz - twy\n rot[2, 1] = tyz + twx\n rot[2, 2] = 1. - txx - tyy\n\n return rot", "def matrix_rotate_3d_z(deg: float) -> np.matrix:\n from numpy import cos, sin, pi\n rad_z = -deg * pi/180\n c_z = cos(rad_z)\n s_z = sin(rad_z)\n return np.matrix([[c_z, -s_z, 0], [s_z, c_z, 0], [0, 0, 1]])", "def _rmatrix(theta):\n r = np.zeros((4, 4), np.complex128)\n\n cos_term = np.cos(theta / 2.0) * complex(1, 0)\n sin_term = np.sin(theta / 2.0) * complex(1, 0)\n\n r[0, 0] = cos_term\n r[1, 1] = cos_term\n\n r[0, 2] = sin_term\n r[1, 3] = sin_term\n\n r[2, 0] = -sin_term\n r[3, 1] = -sin_term\n\n r[2, 2] = cos_term\n r[3, 3] = cos_term\n\n return r", "def multi_rot_Z(angle_rads: numpy.ndarray) -> numpy.ndarray:\n rz = numpy.empty((angle_rads.shape[0], 4, 4))\n rz[...] = numpy.identity(4)\n rz[:, 0, 0] = rz[:, 1, 1] = numpy.cos(angle_rads)\n rz[:, 1, 0] = numpy.sin(angle_rads)\n rz[:, 0, 1] = -rz[:, 1, 0]\n return rz", "def z_rotmat(theta):\n cos_t = np.cos(theta)\n sin_t = np.sin(theta)\n return np.array([[cos_t, -sin_t, 0],\n [sin_t, cos_t, 0],\n [0, 0, 1]])", "def _rotate(self, angles, dj_matrix=None):\n if dj_matrix is None:\n dj_matrix = djpi2(self.lmax + 1)\n self.coeffs = SHRotateRealCoef(self.coeffs, angles, dj_matrix)", "def _rotate(self, angles, dj_matrix=None):\n if dj_matrix is None:\n dj_matrix = djpi2(self.lmax + 1)\n self.coeffs = SHRotateRealCoef(self.coeffs, angles, dj_matrix)", "def rotation_matrix(rx, ry, rz):\n # Convert from degrees to radians.\n rx = np.pi * rx / 180\n ry = np.pi * ry / 180\n rz = np.pi * rz / 180\n\n # Pre-compute sine and cosine of angles.\n cx, cy, cz = np.cos([rx, ry, rz])\n sx, sy, sz = np.sin([rx, ry, rz])\n\n # Set up euler rotations.\n Rx = np.array([[1, 0, 0, 0],\n [0, cx, -sx, 0],\n [0, sx, cx, 0],\n [0, 0, 0, 1]])\n\n Ry = np.array([[cy, 0, sy, 0],\n [0, 1, 0, 0],\n [-sy, 0, cy, 0],\n [0, 0, 0, 1]])\n\n Rz = np.array([[cz, -sz, 0, 0],\n [sz, cz, 0, 0],\n [0, 0, 1, 0],\n [0, 0, 0, 1]])\n\n return Rz.dot(Ry.dot(Rx))", "def rot_z(angle):\n sangle = math.sin(angle)\n cangle = math.cos(angle)\n rz = np.array([[cangle, sangle, 0.0],\n [-sangle, cangle, 0.0],\n [0.0, 0.0, 1.0]])\n return rz", "def rotationMatrix(self):\n\n R = Compute3DRotationMatrix(self.exteriorOrientationParameters[3], self.exteriorOrientationParameters[4],\n self.exteriorOrientationParameters[5])\n\n return R", "def _rotationMatrix(self, n_dim, theta):\n i = np.identity(n_dim)\n c, s = np.cos(theta)*i, np.sin(theta)*i\n rotation = np.bmat([[c, s], [-s, c]])\n return rotation", "def rotation_matrix(angle) -> np.array:\n return np.array([\n [np.cos(angle), np.sin(angle)],\n [-np.sin(angle), np.cos(angle)]])", "def _rot(axis, angle):\n if axis == 1:\n return Matrix([[1, 0, 0],\n [0, cos(angle), -sin(angle)],\n [0, sin(angle), cos(angle)]])\n elif axis == 2:\n return Matrix([[cos(angle), 0, sin(angle)],\n [0, 1, 0],\n [-sin(angle), 0, cos(angle)]])\n elif axis == 3:\n return Matrix([[cos(angle), -sin(angle), 0],\n [sin(angle), cos(angle), 0],\n [0, 0, 1]])", "def make_sample_rot_matrix(self, angles):\n (phi, kappa, omega) = self.get_phi_kappa_omega(angles)\n return numpy_utils.kappa_rotation_matrix(phi, np.deg2rad(self.alpha), kappa, omega)", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, chi, omega) = angles[0:3]\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, chi) = angles[0:2]\n omega = np.deg2rad(self.omega)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def rotate_matrix(angle):\n c = np.cos(angle)\n s = np.sin(angle)\n return np.array([[c, s],\n [-s, c]])", "def _rotation_matrix(theta):\n c, s = np.cos(theta), np.sin(theta)\n return np.array(((c, -s), (s, c)))", "def _calc_rotation_matrix(self, inds=None):\n if inds is None:\n inds = range(self.Ncomponents)\n\n n_inds = len(inds)\n\n lon, lat = self.get_lon_lat()\n # Find mathematical points and vectors for RA/Dec\n theta_frame = np.pi / 2.0 - lat.rad[inds]\n phi_frame = lon.rad[inds]\n frame_vec = sct.r_hat(theta_frame, phi_frame)\n assert frame_vec.shape == (3, n_inds)\n\n # Find mathematical points and vectors for Alt/Az\n theta_altaz = np.pi / 2.0 - self.alt_az[0, inds]\n phi_altaz = self.alt_az[1, inds]\n altaz_vec = sct.r_hat(theta_altaz, phi_altaz)\n assert altaz_vec.shape == (3, n_inds)\n\n R_avg = self._calc_average_rotation_matrix()\n\n R_exact = np.zeros((3, 3, n_inds), dtype=np.float64)\n\n for src_i in range(n_inds):\n intermediate_vec = np.matmul(R_avg, frame_vec[:, src_i])\n\n R_perturb = sct.vecs2rot(r1=intermediate_vec, r2=altaz_vec[:, src_i])\n\n R_exact[:, :, src_i] = np.matmul(R_perturb, R_avg)\n\n return R_exact", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, omega) = angles[0:2]\n chi = np.deg2rad(self.chi)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, omega) = angles[0:2]\n chi = np.deg2rad(self.chi)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def Rpy(angle=0, units='deg'):\n\n if(units=='deg'):\n angle = angle*pi/180\n\n C = np.cos(angle)\n S = np.sin(angle)\n\n M = np.identity(3)\n\n M[0,0] = +C\n M[0,2] = +S\n M[2,0] = -S\n M[2,2] = +C\n\n return M", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def homog_rot_mtx(angle_rads: float, axis: str) -> numpy.array:\n cosang = numpy.cos(angle_rads)\n sinang = numpy.sin(angle_rads)\n\n if \"z\" == axis:\n return numpy.array(\n (\n (cosang, -sinang, 0, 0),\n (sinang, cosang, 0, 0),\n (0, 0, 1, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )\n elif \"y\" == axis:\n return numpy.array(\n (\n (cosang, 0, sinang, 0),\n (0, 1, 0, 0),\n (-sinang, 0, cosang, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )\n else:\n return numpy.array(\n (\n (1, 0, 0, 0),\n (0, cosang, -sinang, 0),\n (0, sinang, cosang, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, chi, omega) = self.get_phi_chi_omega(angles)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.opposite_rotation_matrix(phi, chi, omega)", "def rotation_matrix(phi):\n return np.asmatrix([\n [np.cos(phi), -np.sin(phi), 0],\n [np.sin(phi), np.cos(phi), 0],\n [0, 0, 1]\n ])", "def generate_rotation_matrix(phi: float, the: float, psi: float) -> np.matrix:\n # Transfer the angle to Euclidean\n phi = -float(phi) * np.pi / 180.0\n the = -float(the) * np.pi / 180.0\n psi = -float(psi) * np.pi / 180.0\n sin_alpha = np.sin(phi)\n cos_alpha = np.cos(phi)\n sin_beta = np.sin(the)\n cos_beta = np.cos(the)\n sin_gamma = np.sin(psi)\n cos_gamma = np.cos(psi)\n\n # Calculate inverse rotation matrix\n Inv_R = np.zeros((3, 3), dtype='float32')\n\n Inv_R[0, 0] = cos_alpha * cos_gamma - cos_beta * sin_alpha \\\n * sin_gamma\n Inv_R[0, 1] = -cos_alpha * sin_gamma - cos_beta * sin_alpha \\\n * cos_gamma\n Inv_R[0, 2] = sin_beta * sin_alpha\n\n Inv_R[1, 0] = sin_alpha * cos_gamma + cos_beta * cos_alpha \\\n * sin_gamma\n Inv_R[1, 1] = -sin_alpha * sin_gamma + cos_beta * cos_alpha \\\n * cos_gamma\n Inv_R[1, 2] = -sin_beta * cos_alpha\n\n Inv_R[2, 0] = sin_beta * sin_gamma\n Inv_R[2, 1] = sin_beta * cos_gamma\n Inv_R[2, 2] = cos_beta\n #Inv_R[3, 3] = 1\n\n return np.matrix(Inv_R)", "def rotation_2d(points, angles):\n rot_sin = np.sin(angles)\n rot_cos = np.cos(angles)\n rot_mat_T = np.stack([[rot_cos, -rot_sin], [rot_sin, rot_cos]])\n return np.einsum('aij,jka->aik', points, rot_mat_T)", "def rotation_matrix_cp(axis, theta):\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2.0)\n b, c, d = -axis*math.sin(theta/2.0)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.matrix([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac), 0.],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab), 0.],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc, 0.],\n [0., 0., 0., 1.]])", "def make_q_rot_matrix(self, angles):\n #For other instruments, this method may be different.\n (phi, kappa, omega) = self.get_phi_kappa_omega(angles)\n\n #In Q space, detector coverage rotates OPPOSITE to what the real space rotation is.\n #Because that is where the detectors and incident beam go, AS SEEN BY THE SAMPLE.\n\n #So wee need to invert the sample orientation matrix to find the one that will apply to the Q vector.\n return numpy_utils.kappa_opposite_rotation_matrix(phi, np.deg2rad(self.alpha), kappa, omega)", "def matrix_rotate_3d_x(deg: float) -> np.matrix:\n from numpy import cos, sin, pi\n rad_x = -deg * pi/180\n c_x = cos(rad_x)\n s_x = sin(rad_x)\n return np.matrix([[1, 0, 0], [0, c_x, -s_x], [0, s_x, c_x]])", "def rotmat(axis, angle):\n mat = np.eye(3)\n if angle is None or np.isclose(angle, 0.0):\n return mat\n cang = np.cos(angle*radians)\n sang = np.sin(angle*radians)\n if axis == 1:\n mat = np.array(((1, 0, 0), (0, cang, -sang), (0, sang, cang)))\n elif axis == 2:\n mat = np.array(((cang, 0, sang), (0, 1, 0), (-sang, 0, cang)))\n else:\n mat = np.array(((cang, -sang, 0), (sang, cang, 0), (0, 0, 1)))\n return np.matrix(mat)", "def rotZ(theta, mode = 'radians'):\n\n\tif mode != 'radians' and mode != 'degrees':\n\t\traise ValueError('Mode should either be ``radians`` or ``degrees``.')\n\tif mode == 'degrees':\n\t\ttheta = np.deg2rad(theta)\n\treturn np.matrix([[np.cos(theta), -np.sin(theta), 0], [np.sin(theta), np.cos(theta), 0], \\\n\t\t[0., 0., 1.]])", "def rotation3D_z(angle: float) -> np.array:\n c = np.cos(angle)\n s = np.sin(angle)\n return np.array([[c, -s, 0.0], [s, c, 0.0], [0.0, 0.0, 1.0]])", "def rotator(angle):\n c = np.cos(angle)\n s = np.sin(angle)\n return np.array([[c,-s],[s,c]])", "def compose_mat(rot):\n trans_mat = oMa.MTransformationMatrix()\n trans_mat.setRotation(rot)\n\n mat = trans_mat.asMatrix()\n\n return mat", "def rotateZMatrix(self, radians):\n\n c = np.cos(radians)\n s = np.sin(radians)\n return np.array([[c,-s, 0, 0],\n [s, c, 0, 0],\n [0, 0, 1, 0],\n [0, 0, 0, 1]])", "def angle_to_rotation_matrix(angle: torch.Tensor) -> torch.Tensor:\n ang_rad = deg2rad(angle)\n cos_a: torch.Tensor = torch.cos(ang_rad)\n sin_a: torch.Tensor = torch.sin(ang_rad)\n return torch.stack([cos_a, sin_a, -sin_a, cos_a], dim=-1).view(*angle.shape, 2, 2)", "def rotation_matrix(axis,theta):\n\taxis = np.asarray(axis)\n\ttheta = np.asarray(theta)\n\tif np.all(axis==0): return np.identity(3) \n\taxis = axis/np.sqrt(np.dot(axis,axis))\n\ta = np.cos(theta/2)\n\tb, c, d = -axis*np.sin(theta/2)\n\taa, bb, cc, dd = a*a, b*b, c*c, d*d\n\tbc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n\treturn np.array([[aa+bb-cc-dd,2*(bc+ad),2*(bd-ac)],[2*(bc-ad),aa+cc-bb-dd,2*(cd+ab)],\n\t\t[2*(bd+ac),2*(cd-ab),aa+dd-bb-cc]])", "def rotation3Dz(theta):\n rmat = np.zeros((3,3))\n rmat[0,0] = rmat[1,1] = np.cos(theta)\n rmat[0,1] = np.sin(theta)\n rmat[1,0] = -rmat[0,1]\n rmat[2,2] = 1\n return rmat", "def complex_from_polar(abs, angle):", "def rotation_matrix2(angle):\n c, s = cos(angle), sin(angle)\n return np.array([[c, -s], [s, c]])", "def rotate_points(points,quaternions):\n \n res = np.zeros((quaternions.shape[0],points.shape[0],4)) \n res[:,:,1:] = points \n conjugates = conjugate(quaternions) \n \n for ix in range(len(points)):\n res[:,ix,:] = multiply_quaternions(quaternions,res[:,ix,:])\n res[:,ix,:] = multiply_quaternions(res[:,ix,:],conjugates)\n return res[:,:,1:]", "def rot_angle2rot_mat(angle):\n rot_mat = np.asarray([\n [np.cos(angle), -np.sin(angle)],\n [np.sin(angle), np.cos(angle)]\n ])\n return rot_mat", "def rotation(self, angle, axis):\r\n\r\n sqr_a = axis.x*axis.x\r\n sqr_b = axis.y*axis.y\r\n sqr_c = axis.z*axis.z\r\n len2 = sqr_a+sqr_b+sqr_c\r\n\r\n k2 = math.cos(angle)\r\n k1 = (1.0-k2)/len2\r\n k3 = math.sin(angle)/math.sqrt(len2)\r\n k1ab = k1*axis.x*axis.y\r\n k1ac = k1*axis.x*axis.z\r\n k1bc = k1*axis.y*axis.z\r\n k3a = k3*axis.x\r\n k3b = k3*axis.y\r\n k3c = k3*axis.z\r\n\r\n return mat4( k1*sqr_a+k2, k1ab-k3c, k1ac+k3b, 0.0,\r\n k1ab+k3c, k1*sqr_b+k2, k1bc-k3a, 0.0,\r\n k1ac-k3b, k1bc+k3a, k1*sqr_c+k2, 0.0,\r\n 0.0, 0.0, 0.0, 1.0)", "def rotator(angle):\n\n c = np.cos(2*angle)\n s = np.sin(2*angle)\n return np.array([[1,0,0,0],[0,c,-s,0],[0,s,c,0],[0,0,0,1]])", "def axangle2rotmat(axangles):\r\n\r\n if type(axangles) is not np.ndarray:\r\n raise ValueError('Rodrigues only works on numpy arrays')\r\n \r\n # store original shape\r\n shape = axangles.shape\r\n assert shape[-1] % 3 == 0, \"inputs are not axis angles\"\r\n axangles = axangles.reshape((-1, 3))\r\n\r\n rotmats = []\r\n for i in range(axangles.shape[0]):\r\n rotmat, _ = cv2.Rodrigues(axangles[i])\r\n rotmats.append(rotmat)\r\n\r\n # restore original shape\r\n new_shape = shape[:-1] + (shape[-1]//3*9,)\r\n return np.array(rotmats).reshape(new_shape)", "def rotation_matrix(dt, omega):\n R = np.array([\n [np.cos(omega * dt), -np.sin(omega * dt)],\n [np.sin(omega * dt), np.cos(omega * dt)]\n ])\n return R", "def rot_z(theta):\n theta_rad = np.radians(theta)\n rotation_matrix = [[np.cos(theta_rad), -np.sin(theta_rad), 0],\n [np.sin(theta_rad), np.cos(theta_rad), 0],\n [0, 0, 1]]\n return np.matrix(rotation_matrix)", "def AffineRz(theta, units='deg'):\n if units == 'deg':\n theta = np.deg2rad(theta)\n\n return np.mat([[np.cos(theta), -np.sin(theta), 0, 0],\n [np.sin(theta), np.cos(theta), 0, 0],\n [0, 0, 1, 0],\n [0, 0, 0, 1]\n ],\n dtype='f8'\n )", "def getEllipsYZRotMatrix(a1, a2):\n adir = a2 - a1\n amid = a1 + 0.5 * adir\n kath = np.sqrt((adir[0] * adir[0] + adir[1] * adir[1]) / 4.0)\n octantA2 = octant(a2)\n theta = np.arctan( abs( (adir[2]/2) / kath) )\n #[1, 4, 6, 7 ] => left rotation\n #[2, 3, 5, 8 ] => right rotation\n if octantA2 in [2, 3, 5, 8]: \n theta = -theta \n print \"theta =\" , np.rad2deg(theta)\n RotY = np.matrix( [ [ np.cos(theta), 0.0, np.sin(theta) ],\n [ 0.0 , 1.0, 0.0 ],\n [ -np.sin(theta), 0.0, np.cos(theta) ]\n ]) \n \n psi = np.arctan( abs( adir[1] / adir[0] ) )\n #[2, 4, 6, 8 ] => left rotation\n #[1, 3, 5, 7 ] => right rotation\n if octantA2 in [1, 3, 5, 7]:\n psi = -psi\n print \"psi =\" , np.rad2deg(psi)\n RotZ = np.matrix( [ [ np.cos(psi), -np.sin(psi), 0.0 ],\n [ np.sin(psi), np.cos(psi), 0.0 ],\n [ 0.0 , 0.0 , 1.0 ]\n ])\n return np.asarray( RotY * RotZ )", "def _get_rotation_matrix(transform):\n # caution: UE4 is using left-hand ortation order\n roll = np.deg2rad(-transform.rotation.roll)\n pitch = np.deg2rad(-transform.rotation.pitch)\n yaw = np.deg2rad(transform.rotation.yaw)\n sr, cr = np.sin(roll), np.cos(roll)\n sp, cp = np.sin(pitch), np.cos(pitch)\n sy, cy = np.sin(yaw), np.cos(yaw)\n rotation_matrix = np.array([[cy * cp, -sy * sr + cy * sp * sr, cy * sp * cr + sy * sr],\n [sy * cp, cy * sp * sr + cy * sr, -cy * sr + sy * sp * cr],\n [-sp, cp * sr, cp * cr]])\n return rotation_matrix", "def givens_rotation_matrix(i, j, theta, N):\n R = np.identity(N)\n c = np.cos(theta)\n s = np.sin(theta)\n R[i, i] = c\n R[j, j] = c\n R[i, j] = -s\n R[j, i] = s\n return R", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = xmat @ ymat @ zmat\n return rotMat", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = xmat @ ymat @ zmat\n return rotMat", "def get_transform_matrix(theta, phi = None, invert_rot = False, invert_focal = False):\n\n if phi is None:\n phi = const.PHI_IDX * 10.0\n\n #extrinsic x intrinsic\n camera_matrix = np.zeros((4, 4), dtype=np.float32)\n\n intrinsic_matrix = np.eye(4, dtype=np.float32)\n extrinsic_matrix = np.eye(4, dtype=np.float32)\n\n sin_phi = np.sin(float(phi) / 180.0 * np.pi)\n cos_phi = np.cos(float(phi) / 180.0 * np.pi)\n sin_theta = np.sin(float(-theta) / 180.0 * np.pi)\n cos_theta = np.cos(float(-theta) / 180.0 * np.pi)\n\n #theta rotation\n rotation_azimuth = np.zeros((3, 3), dtype=np.float32)\n rotation_azimuth[0, 0] = cos_theta\n rotation_azimuth[2, 2] = cos_theta\n rotation_azimuth[0, 2] = -sin_theta\n rotation_azimuth[2, 0] = sin_theta\n rotation_azimuth[1, 1] = 1.0\n\n #phi rotation\n rotation_elevation = np.zeros((3, 3), dtype=np.float32)\n rotation_elevation[0, 0] = cos_phi\n rotation_elevation[0, 1] = sin_phi\n rotation_elevation[1, 0] = -sin_phi\n rotation_elevation[1, 1] = cos_phi\n rotation_elevation[2, 2] = 1.0\n\n #rotate phi, then theta\n rotation_matrix = np.matmul(rotation_azimuth, rotation_elevation)\n if invert_rot:\n rotation_matrix = np.linalg.inv(rotation_matrix)\n\n displacement = np.zeros((3, 1), dtype=np.float32)\n displacement[0, 0] = const.DIST_TO_CAM\n displacement = np.matmul(rotation_matrix, displacement)\n\n #assembling 4x4 from R + T\n extrinsic_matrix[0:3, 0:3] = rotation_matrix\n extrinsic_matrix[0:3, 3:4] = -displacement\n\n if invert_focal:\n intrinsic_matrix[2, 2] = float(const.focal_length)\n intrinsic_matrix[1, 1] = float(const.focal_length)\n else:\n intrinsic_matrix[2, 2] = 1.0 / float(const.focal_length)\n intrinsic_matrix[1, 1] = 1.0 / float(const.focal_length)\n\n camera_matrix = np.matmul(extrinsic_matrix, intrinsic_matrix)\n return camera_matrix", "def rotation_matrix_xyz(axis, angle, angle_dim):\n assert angle_dim is \"deg\" or angle_dim is \"rad\"\n assert axis is \"x\" or axis is \"y\" or axis is \"z\"\n x = 0\n y = 0\n z = 0\n\n if angle_dim is \"deg\":\n a = np.deg2rad(angle)\n else:\n a = angle\n\n if axis is \"x\":\n x = 1\n y = 0\n z = 0\n if axis is \"y\":\n x = 0\n y = 1\n z = 0\n if axis is \"z\":\n x = 0\n y = 0\n z = 1\n\n s = np.sin(a)\n c = np.cos(a)\n rotation_matrix = np.array([[c + x ** 2 * (1 - c), x * y * (1 - c) - z * s, x * z * (1 - c) + y * s],\n [y * x * (1 - c) + z * s, c + y ** 2 * (1 - c), y * z * (1 - c) - x * s],\n [z * x * (1 - c) - y * s, z * y * (1 - c) + x * s, c + z ** 2 * (1 - c)]])\n\n return rotation_matrix", "def rot(phi, theta, omega):\n\n cos = jnp.cos(theta / 2)\n sin = jnp.sin(theta / 2)\n\n return jnp.array(\n [\n [\n jnp.exp(-0.5j * (phi + omega)) * cos,\n -(jnp.exp(0.5j * (phi - omega))) * sin,\n ],\n [jnp.exp(-0.5j * (phi - omega)) * sin, jnp.exp(0.5j * (phi + omega)) * cos],\n ]\n )", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:, 0], angle[:, 1], angle[:, 2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach() * 0\n ones = zeros.detach() + 1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).view(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).view(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).view(B, 3, 3)\n\n # rotMat = xmat.bmm(ymat).bmm(zmat)\n # changed to match opencv and conversion euler->mat/mat->euler\n rotMat = torch.bmm(zmat, torch.bmm(ymat, xmat))\n\n return rotMat", "def Rpz(angle=0, units='deg'):\n\n if(units=='deg'):\n angle = angle*pi/180\n\n C = np.cos(angle)\n S = np.sin(angle)\n\n M = np.identity(3)\n\n M[0,0] = +C\n M[0,1] = -S\n M[1,0] = +S\n M[1,1] = +C\n\n return M", "def rotation_matrix_2d(angle):\n psi = Angle(angle).rad\n return np.array([[cos(psi), -sin(psi)],\n [sin(psi), cos(psi)]])", "def vrrotvec2mat(ax_ang):\n\n #file_dir = os.path.dirname(os.path.realpath(__file__))\n #path_dir2 = file_dir + '/../geometry/'\n #sys.path.append(path_dir2)\n\n if ax_ang.ndim == 1:\n if np.size(ax_ang) == 5:\n ax_ang = np.reshape(ax_ang, (5, 1))\n msz = 1\n elif np.size(ax_ang) == 4:\n ax_ang = np.reshape(np.hstack((ax_ang, np.array([1]))), (5, 1))\n msz = 1\n else:\n raise Exception('Wrong Input Type')\n elif ax_ang.ndim == 2:\n if np.shape(ax_ang)[0] == 5:\n msz = np.shape(ax_ang)[1]\n elif np.shape(ax_ang)[1] == 5:\n ax_ang = ax_ang.transpose()\n msz = np.shape(ax_ang)[1]\n else:\n raise Exception('Wrong Input Type')\n else:\n raise Exception('Wrong Input Type')\n\n direction = ax_ang[0:3, :]\n angle = ax_ang[3, :]\n\n d = np.array(direction, dtype=np.float64)\n d /= np.linalg.norm(d, axis=0)\n x = d[0, :]\n y = d[1, :]\n z = d[2, :]\n c = np.cos(angle)\n s = np.sin(angle)\n tc = 1 - c\n\n mt11 = tc*x*x + c\n mt12 = tc*x*y - s*z\n mt13 = tc*x*z + s*y\n\n mt21 = tc*x*y + s*z\n mt22 = tc*y*y + c\n mt23 = tc*y*z - s*x\n\n mt31 = tc*x*z - s*y\n mt32 = tc*y*z + s*x\n mt33 = tc*z*z + c\n\n mtx = np.column_stack((mt11, mt12, mt13, mt21, mt22, mt23, mt31, mt32, mt33))\n\n inds1 = np.where(ax_ang[4, :] == -1)\n mtx[inds1, :] = -mtx[inds1, :]\n\n if msz == 1:\n mtx = mtx.reshape(3, 3)\n else:\n mtx = mtx.reshape(msz, 3, 3)\n\n return mtx", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = torch.matmul(torch.matmul(xmat, ymat), zmat)\n return rotMat", "def matrix_rotate_3d_y(deg: float) -> np.matrix:\n from numpy import cos, sin, pi\n rad_y = -deg * pi/180\n c_y = cos(rad_y)\n s_y = sin(rad_y)\n return np.matrix([[c_y, 0, s_y], [0, 1, 0], [-s_y, 0, c_y]])", "def rotation_matrix(self):\n self._normalise()\n product_matrix = np.dot(self._q_matrix(), self._q_bar_matrix().conj().transpose())\n return product_matrix[1:][:,1:]", "def rotation_matrix(self, rotation, rotation_order=\"zyx\"):\n x = math.radians(rotation[0])\n y = math.radians(rotation[1])\n z = math.radians(rotation[2])\n\n cos = math.cos\n sin = math.sin\n if rotation_order == 'zyx':\n index_0 = cos(y) * cos(z)\n index_1 = cos(z) * sin(x) * sin(y) - cos(x) * sin(z)\n index_2 = cos(x) * cos(z) * sin(y) + sin(x) * sin(z)\n\n index_3 = cos(y) * sin(z)\n index_4 = cos(x) * cos(z) + sin(x) * sin(y) * sin(z)\n index_5 = -cos(z) * sin(x) + cos(x) * sin(y) * sin(z)\n\n index_6 = -sin(y)\n index_7 = -cos(y) * sin(x)\n index_8 = cos(x) * cos(y)\n elif rotation_order == 'xyz':\n index_0 = cos(y) * cos(z)\n index_1 = -cos(z) * sin(z)\n index_2 = sin(y)\n\n index_3 = cos(x) * sin(z) + sin(x) * sin(y) * cos(z)\n index_4 = cos(x) * cos(z) - sin(x) * sin(y) * sin(z)\n index_5 = -sin(x) * cos(y)\n\n index_6 = sin(x) * sin(z) - cos(x) * sin(y) * cos(z)\n index_7 = sin(x) * cos(z) + cos(x) * sin(y) * sin(z)\n index_8 = cos(x) * cos(y)\n\n rot_mat = ((index_0, index_1, index_2),\n (index_3, index_4, index_5),\n (index_6, index_7, index_8))\n\n return rot_mat", "def theta_phi_of_complex(z):\n return np.stack([theta_of_complex(z), phi_of_complex(z)], axis=1)", "def rotation_matrix(theta):\n return np.array([\n [np.cos(theta), -np.sin(theta)],\n [np.sin(theta), np.cos(theta)]\n ]);", "def get_rotation_matrix(theta, rot_vector):\n\n ux = rot_vector[0]\n uy = rot_vector[1]\n uz = rot_vector[2]\n cost = np.cos(theta)\n sint = np.sin(theta)\n\n R = np.array([[cost+ux**2*(1-cost), ux*uy*(1-cost)-uz*sint, ux*uz*(1-cost)+uy*sint],\n [uy*ux*(1-cost)+uz*sint, cost+uy**2*(1-cost), uy*uz*(1-cost)-ux*sint],\n [uz*ux*(1-cost)-uy*sint, uz*uy*(1-cost)+ux*sint, cost+uz**2*(1-cost)]])\n\n return R", "def rotate_z(self, angle):\n angle *= np.pi / 180\n return self.transform(np.matrix([[np.cos(angle), -np.sin(angle), 0],\n [np.sin(angle), np.cos(angle), 0],\n [0, 0, 1]]))", "def rotation_matrix_3x3_axis(angle, axis):\n assert axis.lower() in ['x','y','z']\n assert -180.0 <= angle <= 180.0\n angle_r = angle * (np.pi / 180.0)\n sa = np.sin(angle_r)\n ca = np.cos(angle_r)\n\n if axis == 'x':\n R = np.array([ [1, 0, 0],\n [0, ca, -sa],\n [0, sa, ca],\n ])\n elif axis == 'y':\n R = np.array([ [ca, 0, sa],\n [0, 1, 0],\n [-sa, 0, ca],\n ])\n elif axis == 'z':\n R = np.array([ [ca, -sa, 0],\n [sa, ca, 0],\n [0, 0, 1],\n ])\n return R", "def _get_rotation_matrix(axis, theta):\n\n #import math\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = axis/np.sqrt(np.dot(axis, axis))\n a = np.cos(theta/2)\n b, c, d = -axis*np.sin(theta/2)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotation_matrix(axis, theta):\n\taxis = np.asarray(axis)\n\taxis = axis / np.sqrt(np.dot(axis, axis))\n\ta = np.cos(theta / 2.0)\n\tb, c, d = -axis * np.sin(theta / 2.0)\n\taa, bb, cc, dd = a * a, b * b, c * c, d * d\n\tbc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n\treturn np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n\t\t\t\t\t [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n\t\t\t\t\t [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def get_3drotation_matrix(axis, angle):\n angle = angle #*-1\n norm = np.linalg.norm(np.array(axis))\n if norm > 0:\n axis /= norm\n ax, ay, az = axis[0], axis[1], axis[2]\n cos, sin = np.cos(angle), np.sin(angle)\n rotmat = np.array([[cos + ax * ax * (1 - cos), ax * ay * (1 - cos) - az * sin, ax * az * (1 - cos) + ay * sin],\n [ay * ax * (1 - cos) + az * sin, cos + ay * ay * (1 - cos), ay * az * (1 - cos) - ax * sin],\n [az * ax * (1 - cos) - ay * sin, az * ay * (1 - cos) + ax * sin, cos + az * az * (1 - cos)]])\n return rotmat", "def rotate_z(angle):\n log.dev(\"lib.mathp.rotate_z is deprecated. Use lib.rotation.R3 instead.\")\n\n cosA = np.cos(angle)\n sinA = np.sin(angle)\n R = np.array([[cosA, sinA, 0], [-sinA, cosA, 0], [0, 0, 1]])\n return R", "def rotation3Dx(theta):\n rmat = np.zeros((3,3))\n rmat[0,0], rmat[0,1], rmat[0,2] = 1.0, 0.0, 0.0\n rmat[1,0], rmat[1,1], rmat[1,2] = 0.0, np.cos(theta), np.sin(theta)\n rmat[2,0], rmat[2,1], rmat[2,2] = 0.0, -np.sin(theta), np.cos(theta)\n \n return rmat", "def so2mat(angle):\n return torch.Tensor(R.from_rotvec(angle).as_dcm())", "def rotation(x,y,z):\r\n phi = np.arctan(z/sqrt(x**2+y**2))\r\n lamb = np.arctan2(y,x)\r\n G = np.array([[-sin(lamb), cos(lamb), 0],\r\n [-sin(phi) * cos(lamb), -sin(phi) * sin(lamb), cos(phi)],\r\n [cos(phi) * cos(lamb), cos(phi) * sin(lamb), sin(phi)]])\r\n return (G)", "def Rz(theta, units='deg'):\n if units == 'deg':\n theta = np.deg2rad(theta)\n\n return np.mat([[np.cos(theta), -np.sin(theta), 0],\n [np.sin(theta), np.cos(theta), 0],\n [0, 0, 1]\n ],\n dtype='f8'\n )" ]
[ "0.6859258", "0.68380105", "0.6828468", "0.67929703", "0.6778919", "0.6778919", "0.6726848", "0.6723164", "0.67022794", "0.67022794", "0.67022794", "0.66824156", "0.6651829", "0.66406155", "0.6638365", "0.66007775", "0.6590055", "0.6520805", "0.6512235", "0.6512235", "0.6498511", "0.6490419", "0.648046", "0.6430972", "0.6415262", "0.6411565", "0.6411565", "0.63937753", "0.63847905", "0.6364955", "0.6362865", "0.6361418", "0.6351222", "0.6347966", "0.6345683", "0.63387793", "0.6331196", "0.63283485", "0.63171417", "0.6316492", "0.6316492", "0.6313296", "0.6312381", "0.6298115", "0.62893665", "0.62893665", "0.6286969", "0.62831825", "0.62818366", "0.62747985", "0.62639976", "0.6253635", "0.6250528", "0.6247667", "0.6232324", "0.6223445", "0.62152785", "0.6210823", "0.6206687", "0.6179155", "0.61778194", "0.6175176", "0.61689556", "0.61503774", "0.6149179", "0.61403364", "0.6140098", "0.6138975", "0.6136966", "0.6129537", "0.6128553", "0.61277586", "0.61239725", "0.6118886", "0.6114573", "0.6114573", "0.61081344", "0.6101127", "0.6098687", "0.6098056", "0.6084367", "0.6083418", "0.6071842", "0.60688436", "0.60511094", "0.6045315", "0.60428774", "0.60394573", "0.6039028", "0.6036953", "0.60307026", "0.60107934", "0.6006338", "0.5992527", "0.5986523", "0.5965634", "0.59631944", "0.5957434", "0.5956252", "0.5950057" ]
0.6610632
15
Implementation of the triplet loss as defined by formula (3)
def triplet_loss(y_true, y_pred, alpha=0.2): anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2] # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1 pos_dist = K.sum(K.square(anchor - positive), axis=-1) # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1 neg_dist = K.sum(K.square(anchor - negative), axis=-1) # Step 3: subtract the two previous distances and add alpha. basic_loss = pos_dist - neg_dist + alpha # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples. loss = K.sum(K.maximum(basic_loss, 0)) return loss
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def triplet_loss(y_true, y_pred):\n [a,p,n] = tf.unstack(y_pred, num=3)\n pos_dist = tf.reduce_sum((a - p)**2, axis=-1)\n neg_dist = tf.reduce_sum((a - n)**2, axis=-1)\n basic_loss = pos_dist - neg_dist + 0.1\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0.0)) \n return loss", "def triplet_loss(y_true,y_pred,alpha=0.2):\n anchor,positive,negative=y_pred[0],y_pred[1],y_pred[2]\n pos_dist=tf.reduce_sum(tf.square(tf.subtract(anchor,positive)),axis=-1)\n neg_dist=tf.reduce_sum(tf.square(tf.subtract(anchor,negative)),axis=-1)\n basic_loss=tf.add(tf.subtract(pos_dist,neg_dist),alpha)\n loss=tf.reduce_sum(tf.maximum(basic_loss,0.0))\n return loss", "def triplet_loss(y_true, y_pred, alpha = 0.2):\r\n \r\n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\r\n \r\n # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1\r\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=-1)\r\n # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1\r\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=-1)\r\n # Step 3: subtract the two previous distances and add alpha.\r\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)\r\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\r\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0))\r\n # END CODE HERE \r\n \r\n return loss", "def triplet_loss(y_true, y_pred, alpha = 0.2):\r\n \r\n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\r\n \r\n ### START CODE HERE ### (≈ 4 lines)\r\n # Step 1: Compute the (encoding) distance between the anchor and the positive\r\n pos_dist = tf.reduce_sum(tf.square(anchor - positive))\r\n # Step 2: Compute the (encoding) distance between the anchor and the negative\r\n neg_dist = tf.reduce_sum(tf.square(anchor - negative))\r\n # Step 3: subtract the two previous distances and add alpha.\r\n basic_loss = pos_dist - neg_dist + alpha\r\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\r\n loss = tf.maximum(basic_loss, 0)\r\n ### END CODE HERE ###\r\n \r\n return loss", "def triplet_loss(y_true, y_pred, alpha = 0.2):\n \n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n \n ### START CODE HERE ### (≈ 4 lines)\n # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor,positive)),axis=-1)\n # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor,negative)),axis=-1)\n # Step 3: subtract the two previous distances and add alpha.\n basic_loss = pos_dist-neg_dist+alpha\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0.0))\n ### END CODE HERE ###\n \n return loss", "def triplet_loss_test():\n\n #Test implementation of triplet loss function \n # code from week9 practice\n num_data = 10\n feat_dim = 6\n margin = 0.2\n\n embeddings = [np.random.rand(num_data, feat_dim).astype(np.float32),\n np.random.rand(num_data, feat_dim).astype(np.float32),\n np.random.rand(num_data, feat_dim).astype(np.float32)]\n labels = np.random.randint(0, 1, size=(num_data)).astype(np.float32)\n\n #Compute loss with numpy\n loss_np = 0.\n anchor = embeddings[0]\n positive = embeddings[1]\n negative = embeddings[2]\n\n for i in range(num_data):\n pos_dist = np.sum(np.square(anchor[i] - positive[i]))\n neg_dist = np.sum(np.square(anchor[i] - negative[i]))\n loss_np += max(0. ,(margin + pos_dist - neg_dist))\n loss_np /= num_data\n print('Triplet loss computed with numpy', loss_np)\n\n loss_tf = loss('triplet_loss')\n\n loss_tf_val = loss_tf(labels, embeddings, margin)\n print('Triplet loss computed with tensorflow', loss_tf_val)\n assert np.allclose(loss_np, loss_tf_val)", "def triplet_loss(anchor, positive, negative, alpha):\n\n with tf.variable_scope('triplet_loss'):\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), 1)\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), 1)\n\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)\n loss = tf.reduce_mean(tf.maximum(basic_loss, 0.0), 0)\n\n return loss", "def triplet_loss(y_true, y_pred, alpha = 0.3):\n \n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n \n # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=-1)\n # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=-1)\n # Step 3: subtract the two previous distances and add alpha(learning variable).\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0.0))\n \n return loss", "def triplet_loss(y_true, y_pred, alpha = 0.2):\n \n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=-1)\n # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=-1)\n # Step 3: subtract the two previous distances and add alpha.\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0))\n \n return loss", "def triplet_loss(y_true, y_pred, alpha=0.2):\n\n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=None, keep_dims=True)\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=None, keep_dims=True)\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), alpha)\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0.0), axis=0)\n\n return loss", "def triplet_loss(y_true, y_pred, alpha=0.5):\n print('y_pred.shape = ', y_pred)\n\n total_lenght = y_pred.shape.as_list()[-1]\n # print('total_lenght=', total_lenght)\n # total_lenght =12\n\n anchor = y_pred[:, 0:int(total_lenght * 1 / 3)]\n positive = y_pred[:, int(total_lenght * 1 / 3):int(total_lenght * 2 / 3)]\n negative = y_pred[:, int(total_lenght * 2 / 3):int(total_lenght * 3 / 3)]\n\n # distance between the anchor and the positive\n pos_dist = K.sum(K.square(anchor - positive), axis=1)\n\n # distance between the anchor and the negative\n neg_dist = K.sum(K.square(anchor - negative), axis=1)\n\n # compute loss\n basic_loss = pos_dist - neg_dist + alpha\n loss = tf.reduce_mean(K.maximum(basic_loss, 0.0))\n\n return loss", "def triplet_loss(y_true, y_pred, alpha=0.2):\n\n anchor, positive, negative = y_pred[0], y_pred[1], y_pred[2]\n\n # Step 1: Compute the (encoding) distance between the anchor and the positive, you will need to sum over axis=-1\n pos_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, positive)), axis=-1)\n # Step 2: Compute the (encoding) distance between the anchor and the negative, you will need to sum over axis=-1\n neg_dist = tf.reduce_sum(tf.square(tf.subtract(anchor, negative)), axis=-1)\n # Step 3: subtract the two previous distances and add alpha.\n basic_loss = (pos_dist - neg_dist) + alpha\n # Step 4: Take the maximum of basic_loss and 0.0. Sum over the training examples.\n loss = tf.reduce_sum(tf.maximum(basic_loss, 0))\n\n return loss", "def bpr_triplet_loss(data):\n positive_item_latent, negative_item_latent, user_latent = data\n\n positive_interactions = tf.math.reduce_sum(tf.math.multiply(user_latent,\n positive_item_latent),\n axis=-1,\n keepdims=True)\n negative_interactions = tf.math.reduce_sum(tf.math.multiply(user_latent,\n negative_item_latent),\n axis=-1,\n keepdims=True)\n\n return tf.math.subtract(tf.constant(1.0),\n tf.sigmoid(tf.math.subtract(positive_interactions,\n negative_interactions)))", "def triplet_loss(anchor, positive, negative, alpha = my_alpha):\r\n delta_1 = tf.reduce_sum(tf.square(anchor - positive)) #square of euclidean distance or L2 norm\r\n delta_2 = tf.reduce_sum(tf.square(anchor - negative))\r\n loss = tf.maximum(delta_1 - delta_2 + alpha, tf.constant(0, dtype = tf.float32))\r\n return loss", "def triplet_loss(y_true, y_pred, margin = 1):\n\n anchor = y_pred[0]\n positive = y_pred[1]\n negative = y_pred[2]\n\n # squared distance between the anchor and the positive\n pos_dist = tf.math.reduce_sum(tf.math.square(anchor - positive), axis=-1)\n\n # squared distance between the anchor and the negative\n neg_dist = tf.math.reduce_sum(tf.math.square(anchor - negative), axis=-1)\n\n # compute loss\n basic_loss = margin + pos_dist - neg_dist\n loss = tf.math.maximum(basic_loss,0.0)\n loss = tf.math.reduce_mean(loss)\n return loss", "def triplet_loss(anchor, positive, negative, margin=0.2):\n distance_ap = tf.reduce_sum(tf.square(anchor - positive), 1)\n distance_an = tf.reduce_sum(tf.square(anchor - negative), 1)\n loss = tf.maximum(0.0, distance_ap - distance_an + margin)\n loss = tf.reduce_mean(loss)\n return loss", "def triplet_loss(self, sim, margin=0.5):\n # sim = sigmoid(5.0 - 5.0*distance)\n pos_sim, neg_sim = tf.slice(sim, [0, 0], [-1, 1]), tf.slice(sim, [0, 1], [-1, -1])\n # pos_sim has larger similarity than neg_sim\n triplet_loss = margin + neg_sim - pos_sim\n triplet_loss = tf.nn.relu(triplet_loss)\n triplet_loss = tf.reduce_sum(triplet_loss, axis=-1)\n triplet_loss = tf.reduce_mean(triplet_loss)\n\n # wmrb_loss = tf.log(1.0 + margin_loss)\n\n return triplet_loss", "def compute_loss(self):", "def compute_triplet_loss(y_pred, margin=0.2):\n anchor_feature, positive_feature, negative_feature = y_pred\n\n with tf.name_scope(\"triplet_loss\"):\n d_p_squared = tf.square(compute_euclidean_distance(anchor_feature, positive_feature))\n d_n_squared = tf.square(compute_euclidean_distance(anchor_feature, negative_feature))\n\n loss = tf.maximum(0., d_p_squared - d_n_squared + margin)\n #loss = d_p_squared - d_n_squared + margin\n\n return tf.reduce_mean(loss), tf.reduce_mean(d_p_squared), tf.reduce_mean(d_n_squared)", "def compute_triplet_loss(anchor_feature, positive_feature, negative_feature, margin):\n\n with tf.variable_scope('triplet_loss'):\n pos_dist = compute_euclidean_distance(anchor_feature, positive_feature, positive=True)\n neg_dist = compute_euclidean_distance(anchor_feature, negative_feature, positive=False)\n\n basic_loss = tf.add(tf.subtract(pos_dist, neg_dist), margin)\n loss = tf.reduce_mean(tf.maximum(basic_loss, 0.0), 0)\n\n return loss, tf.reduce_mean(pos_dist), tf.reduce_mean(neg_dist)", "def triplet_loss_network(input_shape):\n model = base_model(input_shape)\n\n # input layer for anchor, positive and negative branches\n input_anchor = Input(input_shape)\n input_positive = Input(input_shape)\n input_negative = Input(input_shape)\n\n # init the anchor model, positive model and negative model (three base_model())\n anchor_model = model(input_anchor)\n positive_model = model(input_positive)\n negative_model = model(input_negative)\n\n triplet_loss_model = keras.Model(inputs=[input_anchor, input_positive, input_negative], outputs=[anchor_model, positive_model, negative_model], name=\"triplet_loss_network\")\n\n return triplet_loss_model", "def compute_loss(self, x, gt):\n loss = sum([torch.mean((out - gt)**2) for out in self.forward(x)])\n return loss", "def loss(loss_name):\n \n def contrastive_loss(y_true, y_pred, margin = 1):\n \"\"\"Implementation of the triplet loss function\n\n\n Contrastive loss = 0.5 * mean( (1-true_value) * square(distance) + true_value * square( max(margin-distance, 0) ))\n\n Args:\n y_true (int): true label, positive pair (same class) -> 0, \n negative pair (different class) -> 1\n \n y_pred (list): python list containing two objects in a pair of tensors:\n left : the encodings for one image data in a pair\n right : the encodings for the other image data in a pair\n margin (float, optional): m > 0 determines how far the embeddings of \n a negative pair should be pushed apart. Defaults to 1.\n\n\n Returns:\n loss (float): real number, value of the loss\n \"\"\"\n\n left = y_pred[0]\n right = y_pred[1]\n\n distance = tf.math.sqrt(tf.math.reduce_sum(tf.math.square(left - right), axis=-1))\n\n loss_positive = tf.math.square(distance)\n loss_negative = tf.math.square(tf.maximum(0., margin - distance))\n \n loss = y_true * loss_negative + (1 - y_true) * loss_positive\n loss = 0.5 * tf.math.reduce_mean(loss)\n\n return loss\n\n def triplet_loss(y_true, y_pred, margin = 1):\n \"\"\"Implementation of the triplet loss function\n\n Arguments:\n y_true : true labels, required when you define a loss in Keras, \n not applied in this function.\n\n y_pred (list): python list containing three objects:\n anchor : the encodings for the anchor data\n positive : the encodings for the positive data (similar to anchor)\n negative : the encodings for the negative data (different from anchor)\n \n margin (float, optional): m > 0 determines how far the embeddings of \n a negative data should be pushed apart. Defaults to 1.\n\n Returns:\n loss (float): real number, value of the loss\n \"\"\"\n\n anchor = y_pred[0]\n positive = y_pred[1]\n negative = y_pred[2]\n\n # squared distance between the anchor and the positive\n pos_dist = tf.math.reduce_sum(tf.math.square(anchor - positive), axis=-1)\n\n # squared distance between the anchor and the negative\n neg_dist = tf.math.reduce_sum(tf.math.square(anchor - negative), axis=-1)\n\n # compute loss\n basic_loss = margin + pos_dist - neg_dist\n loss = tf.math.maximum(basic_loss,0.0)\n loss = tf.math.reduce_mean(loss)\n return loss\n\n \n if loss_name == 'contrastive_loss':\n return contrastive_loss\n \n if loss_name == 'triplet_loss':\n return triplet_loss", "def compute_loss(self, obs, returns):", "def triple_loss(inputs, labels, sigm = 1e-6, size_average=True):\n loss_item =0.\n cnt =0\n for ind,target in enumerate(labels):\n if(target == 1):\n loss = triple(inputs[1][ind],inputs[2][ind],inputs[0][ind],sigm)\n elif (target == 2):\n loss = triple(inputs[0][ind],inputs[2][ind],inputs[1][ind],sigm)\n else:\n loss = triple(inputs[0][ind], inputs[1][ind], inputs[2][ind],sigm)\n if loss.item() != 0:\n cnt+=1\n loss_item += (loss)\n if size_average :\n loss_item/=cnt\n return loss_item", "def loss_function(self, train_head, train_tail, train_relation, train_head_corrupted, train_tail_corrupted):\n\n # train_head = tf.nn.l2_normalize(train_head, 1)\n # train_tail = tf.nn.l2_normalize(train_tail, 1)\n # train_head_corrupted = tf.nn.l2_normalize(train_head_corrupted, 1)\n # train_tail_corrupted = tf.nn.l2_normalize(train_tail_corrupted, 1)\n\n # loss = tf.reduce_mean(\n # tf.maximum(self.dict_paras['margin']\n # + self.distance(tf.add(train_head, train_relation), train_tail)\n # - self.distance(tf.add(train_head_corrupted, train_relation), train_tail_corrupted), 0.))\n\n loss = tf.reduce_mean(self.distance(tf.add(train_head, train_relation), train_tail))\n\n return loss", "def loss_(self, batch):\n raise NotImplementedError", "def _pairwise_loss(self, pairwise_logits):\n raise NotImplementedError('Calling an abstract method.')", "def compute_loss(self, *args, **kwargs):\n raise NotImplementedError", "def _fit_triplet_loss(self):\n self.keras_model.compile(optimizer=Adam(self.learning_rate, decay=0.000001),\n loss=triplet_loss(margin=self.margin),\n metrics=[triplet_acc(margin=0), triplet_acc(margin=self.margin)])\n\n data_generator = TripletDataGenerator(self)\n aug_gen = data_generator.get_training_gen()\n\n validation_images, y = data_generator.get_hard_preprocessed_validation_dataset()\n callbacks_list = self._get_callbacks()\n loss_hist = [self.keras_model.fit_generator(aug_gen,\n steps_per_epoch=MAX_TRAIN_STEPS,\n epochs=MAX_TRAIN_EPOCHS,\n validation_data=[validation_images, y],\n callbacks=callbacks_list,\n workers=0,\n max_queue_size=1)]\n return loss_hist", "def loss(self, T, mode):\n self.circuit.loss(T, mode)", "def Tanimoto_loss(label, pred):\n smooth = 1e-5\n\n Vli = tf.reduce_mean(tf.reduce_sum(label, axis=[1,2]), axis=0)\n # wli = 1.0/Vli**2 # weighting scheme\n wli = tf.math.reciprocal(Vli**2) # weighting scheme\n\n # ---------------------This line is taken from niftyNet package --------------\n # ref: https://github.com/NifTK/NiftyNet/blob/dev/niftynet/layer/loss_segmentation.py, lines:170 -- 172\n # First turn inf elements to zero, then replace that with the maximum weight value\n new_weights = tf.where(tf.math.is_inf(wli), tf.zeros_like(wli), wli)\n wli = tf.where(tf.math.is_inf(wli), tf.ones_like(wli) * tf.reduce_max(new_weights), wli)\n # --------------------------------------------------------------------\n\n # print('[DEBUG LOSS]')\n # print(label.shape)\n # print(pred.shape)\n\n square_pred = tf.square(pred)\n square_label = tf.square(label)\n add_squared_label_pred = tf.add(square_pred, square_label)\n sum_square = tf.reduce_sum(add_squared_label_pred, axis=[1, 2])\n # print('sum square')\n # print(sum_square.shape)\n\n product = tf.multiply(pred, label)\n sum_product = tf.reduce_sum(product, axis=[1, 2])\n # print('sum product')\n # print(sum_product.shape)\n sum_product_labels = tf.reduce_sum(tf.multiply(wli, sum_product), axis=-1)\n # print('sum product labels')\n # print(sum_product_labels.shape)\n\n denomintor = tf.subtract(sum_square, sum_product)\n # print('denominator')\n # print(denomintor.shape)\n denomintor_sum_labels = tf.reduce_sum(tf.multiply(wli, denomintor), axis=-1)\n # print('denominator sum labels')\n # print(denomintor_sum_labels.shape)\n # Add smooth to avoid numerical instability\n loss = tf.divide(sum_product_labels + smooth, denomintor_sum_labels + smooth)\n # print('loss')\n # print(loss.shape)\n return loss", "def model_loss(inp, fake, real_label, fake_label):\n \n \n Dreal,realcls,R1 = gradpen(inp)\n [Dfake,fakecls] = D(fake)\n # 1. Adversarial loss\n \n glabel = tf.ones_like(Dfake)#tf.random.uniform((Dfake.shape), 1-LN, 1)\n dlabelr = tf.ones_like(Dreal)#tf.random.uniform((Dreal.shape), 1-LN, 1)\n dlabelf = tf.zeros_like(Dfake)#tf.random.uniform((Dfake.shape), 0, LN)\n \n \n \n # D has no sigmoid activation: \"from_logits=True\"\n real_loss = tf.keras.losses.binary_crossentropy(\n dlabelr, Dreal, from_logits=True)\n real_loss = tf.reduce_mean(real_loss)\n \n fake_loss = tf.keras.losses.binary_crossentropy(\n dlabelf, Dfake, from_logits=True)\n fake_loss = tf.reduce_mean(fake_loss)\n \n Dadv = 0.5*(real_loss+fake_loss)\n \n Gadv = tf.keras.losses.binary_crossentropy(\n glabel, Dfake, from_logits=True)\n Gadv = tf.reduce_mean(Gadv)\n \n # 2. Classification loss\n \n Dcls = tf.keras.losses.binary_crossentropy(real_label, realcls, from_logits=True)\n Dcls = tf.reduce_mean(Dcls)\n \n Gcls = tf.keras.losses.binary_crossentropy(fake_label, fakecls, from_logits=True)\n Gcls = tf.reduce_mean(Gcls)\n \n # 3. Total loss\n \n Dloss = Dadv + (GAMMA/2)*R1 + LAMBDA_CLS*Dcls\n \n Gloss = Gadv + LAMBDA_CLS*Gcls\n \n return (Dloss, Dadv, Dcls, R1), (Gloss, Gadv, Gcls)", "def compute_loss(self, **kwargs):\n raise NotImplementedError", "def loss_perceptual(self, vgg_out, vgg_gt, vgg_comp): \n loss = 0\n for o, c, g in zip(vgg_out, vgg_comp, vgg_gt):\n loss += self.l1(o, g) + self.l1(c, g)\n return loss", "def loss(self, prediction_dict, groundtruth_lists):\r\n pass", "def triplet_semihard_loss(margin=1.0, metric=\"cosine\"):\n assert metric in [\"cosine\", \"euclidean\", \"squared_euclidean\"]\n\n @tf.function\n def loss(labels, embeddings):\n # sourced from:\n # https://github.com/tensorflow/tensorflow/blob/r1.14/tensorflow/contrib/losses/python/metric_learning/metric_loss_ops.py#L160-L239\n \"\"\"Computes the triplet loss with semi-hard negative mining.\n\n The loss encourages the positive distances (between a pair of embeddings\n with the same labels) to be smaller than the minimum negative distance\n among which are at least greater than the positive distance plus the\n margin constant (called semi-hard negative) in the mini-batch. If no\n such negative exists, uses the largest negative distance instead.\n\n See: https://arxiv.org/abs/1503.03832.\n\n Args:\n labels: 1-D tf.int32 `Tensor` with shape [batch_size] of multiclass\n integer labels.\n embeddings: 2-D float `Tensor` of embedding vectors. Embeddings\n should be l2 normalized.\n margin: Float, margin term in the loss definition.\n\n Returns:\n triplet_loss: tf.float32 scalar.\n \"\"\"\n labels = tf.cast(labels, tf.int32)\n embeddings = tf.cast(embeddings, tf.float32)\n embeddings = tf.nn.l2_normalize(embeddings, axis=1)\n\n # Reshape [batch_size] label tensor to a [batch_size, 1] label tensor.\n lshape = tf.shape(labels)\n if lshape.shape == 1:\n labels = tf.reshape(labels, [lshape[0], 1])\n\n # Build pairwise squared distance matrix.\n if metric == \"cosine\":\n pdist_matrix = cosine_pairwise_distance(embeddings)\n elif metric == \"euclidean\":\n pdist_matrix = euclidean_pairwise_distance(\n embeddings, squared=False)\n elif metric == \"squared_euclidean\":\n pdist_matrix = euclidean_pairwise_distance(\n embeddings, squared=True)\n # Build pairwise binary adjacency matrix.\n adjacency = tf.equal(labels, tf.transpose(labels))\n # Invert so we can select negatives only.\n adjacency_not = tf.logical_not(adjacency)\n\n batch_size = tf.size(labels)\n\n # Compute the mask.\n pdist_matrix_tile = tf.tile(pdist_matrix, [batch_size, 1])\n mask = tf.logical_and(\n tf.tile(adjacency_not, [batch_size, 1]),\n tf.math.greater(\n pdist_matrix_tile, tf.reshape(\n tf.transpose(pdist_matrix), [-1, 1])))\n mask_final = tf.reshape(\n tf.math.greater(\n tf.math.reduce_sum(\n tf.cast(mask, dtype=tf.float32), 1, keepdims=True),\n 0.0), [batch_size, batch_size])\n mask_final = tf.transpose(mask_final)\n\n adjacency_not = tf.cast(adjacency_not, dtype=tf.float32)\n mask = tf.cast(mask, dtype=tf.float32)\n\n # negatives_outside: smallest D_an where D_an > D_ap.\n negatives_outside = tf.reshape(\n masked_minimum(pdist_matrix_tile, mask), [batch_size, batch_size])\n negatives_outside = tf.transpose(negatives_outside)\n\n # negatives_inside: largest D_an.\n negatives_inside = tf.tile(\n masked_maximum(pdist_matrix, adjacency_not), [1, batch_size])\n semi_hard_negatives = tf.where(\n mask_final, negatives_outside, negatives_inside)\n\n loss_mat = tf.math.add(margin, pdist_matrix - semi_hard_negatives)\n\n mask_positives = tf.cast(\n adjacency, dtype=tf.float32) - tf.linalg.diag(\n tf.ones([batch_size]))\n\n # In lifted-struct, the authors multiply 0.5 for upper triangular\n # in semihard, they take all positive pairs except the diagonal.\n num_positives = tf.math.reduce_sum(mask_positives)\n\n triplet_loss = tf.math.truediv(\n tf.math.reduce_sum(\n tf.math.maximum(\n tf.math.multiply(loss_mat, mask_positives), y=0.0)),\n num_positives, name=\"triplet_semihard_loss\")\n\n return triplet_loss\n\n return loss", "def _flat_reconstruction_loss(self, flat_x_target, flat_rnn_output):\n pass", "def tv_loss(input: th.Tensor):\n input = tf.pad(input, (0, 1, 0, 1), \"replicate\")\n x_diff = input[..., :-1, 1:] - input[..., :-1, :-1]\n y_diff = input[..., 1:, :-1] - input[..., :-1, :-1]\n return (x_diff ** 2 + y_diff ** 2).mean([1, 2, 3])", "def loss(self, y: torch.Tensor, state: AlgorithmState) -> torch.Tensor:\n\n raise NotImplementedError()", "def loss(self, features: Tensor, tags: Tensor, masks: Tensor):\n \n features = self.linear(features)\n T = features.size(1)\n masks_ = masks[:, :T].float()\n forward_score = self.forward_algorithm(features, masks_)\n gold_score = self._score(features, tags[:, :T].long(), masks_)\n loss = (forward_score - gold_score).mean()\n return loss", "def triplet_imposter_semi_hard_loss(margin=1.0, metric=\"cosine\",\n reduce_loss=True):\n assert metric in [\"cosine\", \"euclidean\", \"squared_euclidean\"]\n\n # @tf.function\n def loss(anchor_embeddings, positive_embeddings):\n \"\"\"TODO\n \"\"\"\n anchor_shape = tf.shape(anchor_embeddings)\n positive_shape = tf.shape(positive_embeddings)\n\n # should have same dimensions\n assert anchor_shape.shape == positive_shape.shape\n\n anchor_embeddings = tf.cast(anchor_embeddings, tf.float32)\n anchor_embeddings = tf.nn.l2_normalize(anchor_embeddings, axis=1)\n\n positive_embeddings = tf.cast(positive_embeddings, tf.float32)\n positive_embeddings = tf.nn.l2_normalize(positive_embeddings, axis=1)\n\n # build pairwise distance matrix between anchor and positive embeddings\n if metric == \"cosine\":\n pdist_matrix = cosine_collection_distance(\n anchor_embeddings, positive_embeddings)\n elif metric == \"euclidean\":\n pdist_matrix = euclidean_collection_distance(\n anchor_embeddings, positive_embeddings, squared=False)\n elif metric == \"squared_euclidean\":\n pdist_matrix = euclidean_collection_distance(\n anchor_embeddings, positive_embeddings, squared=True)\n\n # set \"labels\" as indices of each pair in the batch\n labels = tf.range(anchor_shape[0], dtype=tf.int32)\n\n # reshape [batch_size] labeld tensor to a [batch_size, 1] labels tensor\n lshape = tf.shape(labels)\n if lshape.shape == 1:\n labels = tf.reshape(labels, [lshape[0], 1])\n\n batch_size = tf.size(labels)\n\n # build pairwise binary adjacency matrix\n adjacency = tf.equal(labels, tf.transpose(labels))\n # invert so we can select negatives only\n adjacency_not = tf.logical_not(adjacency)\n\n # for a given pair distance, find other pair distances in\n # same row (i.e. with same anchor) > the pair distance\n # (these are the negatives outsides for the anchor)\n pdist_matrix_tile = tf.tile(pdist_matrix, [batch_size, 1])\n mask = tf.logical_and(\n tf.tile(adjacency_not, [batch_size, 1]),\n tf.math.greater(\n pdist_matrix_tile, tf.reshape(\n tf.transpose(pdist_matrix), [-1, 1])))\n\n adjacency_not = tf.cast(adjacency_not, dtype=tf.float32)\n mask = tf.cast(mask, dtype=tf.float32)\n\n # get mask which determines if a pair distance has negatives outside\n mask_final = tf.reshape(\n tf.math.greater(\n tf.math.reduce_sum(mask, 1, keepdims=True),\n 0.0), [batch_size, batch_size])\n mask_final = tf.transpose(mask_final)\n\n # negatives_outside: smallest D_an where D_an > D_ap\n negatives_outside = tf.reshape(\n masked_minimum(pdist_matrix_tile, mask), [batch_size, batch_size])\n negatives_outside = tf.transpose(negatives_outside)\n\n # negatives_inside: largest D_an\n negatives_inside = tf.tile(\n masked_maximum(pdist_matrix, adjacency_not), [1, batch_size])\n\n semi_hard_negatives = tf.where(\n mask_final, negatives_outside, negatives_inside)\n\n loss_mat = tf.math.add(margin, pdist_matrix - semi_hard_negatives)\n\n # take all positive pairs which exist ONLY the diagonal\n mask_positives = tf.cast(adjacency, dtype=tf.float32)\n num_positives = tf.math.reduce_sum(mask_positives)\n\n triplet_loss = tf.math.maximum(\n tf.math.multiply(loss_mat, mask_positives), y=0.0)\n\n if reduce_loss:\n triplet_loss = tf.math.truediv(\n tf.math.reduce_sum(triplet_loss), num_positives)\n\n return triplet_loss\n\n return loss", "def get_loss(self):\r\n\r\n if F.loss_type==\"cosine\":\r\n self.losscos = r2d*tf.acos(1-tf.losses.cosine_distance(tf.nn.l2_normalize(self.labels,1), tf.nn.l2_normalize(self.out, 1), dim=1))\r\n self.loss = tf.losses.cosine_distance(tf.nn.l2_normalize(self.labels,1), tf.nn.l2_normalize(self.out, 1), dim=1)\r\n elif F.loss_type==\"mse2d\":\r\n xl, yl, zl = tf.split(self.labels, 3, axis=1)\r\n xo, yo, zo = tf.split(self.out, 3, axis=1)\r\n thetal, thetao = tf.asin(-yl), tf.asin(-yo)\r\n phil, phio = tf.atan2(-zl, -xl), tf.atan2(-zo, -xo)\r\n self.lb = tf.concat([thetal, phil], axis=1)\r\n self.ob = tf.concat([thetao, phio], axis=1)\r\n self.loss = tf.scalar_mul(tf.constant(r2d), tf.losses.mean_squared_error(self.lb, self.ob, 2))\r\n elif F.loss_type==\"mse3d\":\r\n self.loss = tf.losses.mean_squared_error(tf.nn.l2_normalize(self.labels, 0), tf.nn.l2_normalize(self.out, 0))", "def loss_fn(self, targets, outputs, model):", "def loss_total(self, mask):\n\n def loss(y_true, y_pred):\n\n # Compute predicted image with non-hole pixels set to ground truth\n y_comp = mask * y_true + (1-mask) * y_pred\n\n # Compute the vgg features. \n if self.vgg_device:\n with tf.device(self.vgg_device):\n vgg_out = self.vgg(y_pred)\n vgg_gt = self.vgg(y_true)\n vgg_comp = self.vgg(y_comp)\n else:\n vgg_out = self.vgg(y_pred)\n vgg_gt = self.vgg(y_true)\n vgg_comp = self.vgg(y_comp)\n \n # Compute loss components\n l1 = self.loss_valid(mask, y_true, y_pred)\n l2 = self.loss_hole(mask, y_true, y_pred)\n l3 = self.loss_perceptual(vgg_out, vgg_gt, vgg_comp)\n l4 = self.loss_tv(mask, y_comp)\n l5 = - 0.5 * K.sum(1 + self.z_log_var -self.cl - K.square(self.z_mean)/K.exp(self.cl) - K.exp(self.z_log_var)/K.exp(self.cl))\n # Return loss function\n return l1 + 6*l2 + 0.05*l3 + 0.1*l4 +l5 \n return loss", "def triplet_loss_normals(self, normals):\n batch, T, dim = normals.size()\n similarity_list = []\n for i in range(T):\n for j in range(i + 1, T):\n similarity = torch.pow(torch.cosine_similarity(normals[:, i, :], normals[:, j, :]), 10).unsqueeze(0)\n similarity_list.append(similarity)\n sim_tensor = torch.cat(similarity_list)\n sim_tensor = torch.transpose(sim_tensor, 0, 1)\n sim_tensor = torch.sum(sim_tensor, dim=1).unsqueeze(1)\n # sim_tensor = sim_tensor.mean(-1) #in case we want average over all combinations\n zeros = torch.zeros(batch).unsqueeze(1).to(self.device)\n losses = torch.max(sim_tensor, zeros)\n return losses", "def update(self, triplet_batch):\n feed_dict = {self.anchor: triplet_batch[0],\n self.pos: triplet_batch[1],\n self.neg: triplet_batch[2]}\n _, loss = self.sess.run([self.train_step, self.loss], feed_dict=feed_dict)\n return loss", "def update(self, triplet_batch):\n feed_dict = {self.anchor: triplet_batch[0],\n self.pos: triplet_batch[1],\n self.neg: triplet_batch[2]}\n _, loss = self.sess.run([self.train_step, self.loss], feed_dict=feed_dict)\n return loss", "def update(self, triplet_batch):\n feed_dict = {self.anchor: triplet_batch[0],\n self.pos: triplet_batch[1],\n self.neg: triplet_batch[2]}\n _, loss = self.sess.run([self.train_step, self.loss], feed_dict=feed_dict)\n return loss", "def l1_loss(obs, actual):\n # (tf.Tensor, tf.Tensor, float) -> tf.Tensor\n return tf.reduce_sum(tf.abs(obs - actual) , 1)", "def _define_loss(self):\n\n cost = []\n unit_cost = []\n for nn in range(len(self.ffnet_out)):\n data_out = self.data_out_batch[nn]\n if self.filter_data:\n # this will zero out predictions where there is no data,\n # matching Robs here\n pred = tf.multiply(\n self.networks[self.ffnet_out[nn]].layers[-1].outputs,\n self.data_filter_batch[nn])\n else:\n pred = self.networks[self.ffnet_out[nn]].layers[-1].outputs\n\n nt = tf.cast(tf.shape(pred)[0], tf.float32)\n # define cost function\n if self.noise_dist == 'gaussian':\n with tf.name_scope('gaussian_loss'):\n cost.append(tf.nn.l2_loss(data_out - pred) / nt)\n unit_cost.append(tf.reduce_mean(tf.square(data_out-pred), axis=0))\n\n elif self.noise_dist == 'poisson':\n with tf.name_scope('poisson_loss'):\n\n if self.poisson_unit_norm is not None:\n # normalize based on rate * time (number of spikes)\n cost_norm = tf.multiply(self.poisson_unit_norm[nn], nt)\n else:\n cost_norm = nt\n\n cost.append(-tf.reduce_sum(tf.divide(\n tf.multiply(data_out, tf.log(self._log_min + pred)) - pred,\n cost_norm)))\n\n unit_cost.append(-tf.divide(\n tf.reduce_sum(\n tf.multiply(\n data_out, tf.log(self._log_min + pred)) - pred, axis=0),\n cost_norm))\n\n elif self.noise_dist == 'bernoulli':\n with tf.name_scope('bernoulli_loss'):\n # Check per-cell normalization with cross-entropy\n # cost_norm = tf.maximum(\n # tf.reduce_sum(data_out, axis=0), 1)\n cost.append(tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(\n labels=data_out, logits=pred)))\n unit_cost.append(tf.reduce_mean(\n tf.nn.sigmoid_cross_entropy_with_logits(\n labels=data_out, logits=pred), axis=0))\n else:\n TypeError('Cost function not supported.')\n\n self.cost = tf.add_n(cost)\n self.unit_cost = unit_cost\n\n # Add regularization penalties\n reg_costs = []\n with tf.name_scope('regularization'):\n for nn in range(self.num_networks):\n reg_costs.append(self.networks[nn].define_regularization_loss())\n self.cost_reg = tf.add_n(reg_costs)\n\n self.cost_penalized = tf.add(self.cost, self.cost_reg)\n\n # save summary of cost\n # with tf.variable_scope('summaries'):\n tf.summary.scalar('cost', self.cost)\n tf.summary.scalar('cost_penalized', self.cost_penalized)\n tf.summary.scalar('reg_pen', self.cost_reg)", "def _compute_loss(self, predictions, targets, **params):\n pass", "def nt_transfer_loss(self, student_net_params, masks, teacher_net_params, x, density_level): \n\n masked_student_net_params = get_sparse_params_filtered_by_masks(student_net_params, masks)\n\n # split inputs into two collections, x1 and x2.\n x1 = x[:int(len(x)/2)]\n x2 = x[int(len(x)/2):]\n \n # student network prediction\n student_prediction = self.apply_fn(masked_student_net_params, x) \n \n # teacher network prediction\n teacher_prediction = self.apply_fn(teacher_net_params, x)\n\n # student network's NTK evaluated on x1 and x2\n student_ntk_mat = self.emp_ntk_fn(x1, x2, masked_student_net_params) \n\n # teacher network's NTK evaluated on x1 and x2\n teacher_ntk_mat = self.emp_ntk_fn(x1, x2, teacher_net_params) \n\n # compute kernel, target, and paramter l2 loss\n ker_dist, target_dist, param_squared_norm = self.kernel_dist_target_dist_l2_loss(student_ntk_mat, student_prediction, teacher_ntk_mat, teacher_prediction, masked_student_net_params)\n\n # weight these losses to get the transfer loss\n transfer_loss = self.LAMBDA_KER_DIST * ker_dist + target_dist + (self.LAMBDA_L2_REG / density_level) * param_squared_norm \n\n return transfer_loss", "def tf_l2_loss(Gt, pred,_axis):\n l2diff = tf.subtract(Gt, pred)\n l2loss = tf.reduce_sum(tf.square(l2diff), axis=_axis)\n l2loss = tf.maximum(l2loss, 1e-10)\n l2loss = tf.sqrt(l2loss) # (n_batch, n_class) -> (n_batch, 1)\n\n return l2loss", "def loss(self, T, mode):\n self.circuit.loss(T, self._remap_modes(mode))", "def loss(logits, angles):\n\treturn tf.nn.l2_loss(tf.sub(logits, angles), name='loss')", "def loss(self, **kwargs):\n pass", "def loss(self, X, y):\n pass", "def pointnet_loss(preds, targets, chamfer_lambda, physical_lambda, joint_lambda, has_anno=None, joint_idxs=None):\n\n gen_pc = preds[0]\n out_idxs = preds[1]\n coords_pred = preds[2]\n seg_out = preds[8]\n seg_out = seg_out.argmin(-1)\n bone_weights = preds[9]\n target_pc = targets[1]\n\n if joint_idxs is not None:\n bone_weights = bone_weights[joint_idxs]\n if has_anno is not None:\n coords_pred = coords_pred[has_anno]\n if joint_idxs is not None:\n coords_pred = coords_pred[:, joint_idxs]\n\n # Chamfer loss\n N = gen_pc.shape[1]\n M = target_pc.shape[1]\n\n pc1_expand = gen_pc.unsqueeze(2).repeat(1, 1, M, 1)\n pc2_expand = target_pc.unsqueeze(1).repeat(1, N, 1, 1)\n pc_diff = pc1_expand - pc2_expand\n pc_dist = (pc_diff ** 2).sum(-1)\n pc_dist = torch.sqrt(pc_dist)\n dist1, idx1 = pc_dist.min(2)\n dist2, idx2 = pc_dist.min(1)\n chamfer_loss = (dist1 + dist2).mean()\n loss = chamfer_lambda * chamfer_loss\n\n # Joint loss\n joint_loss = []\n for i in range(bone_weights.shape[0]):\n # Gen mask\n joint_weight = bone_weights[i]\n joint_weight = joint_weight.unsqueeze(0).expand(coords_pred.shape[0], -1)\n joint_weight = torch.gather(joint_weight, -1, out_idxs)\n joint_weight[joint_weight > 0.15] = 1 # threshold\n joint_weight[joint_weight < 1] = 0\n joint_weight_exp = joint_weight.unsqueeze(-1).expand(joint_weight.shape[0], joint_weight.shape[1], joint_weight.shape[1]).byte()\n\n # Target mask TODO: clean up masking\n part_mask = seg_out.clone()\n part_mask[part_mask != i] = -1\n part_mask[part_mask == i] = 1\n part_mask[part_mask == -1] = 0\n part_mask_exp = part_mask.byte().unsqueeze(1).expand(part_mask.shape[0], part_mask.shape[1], part_mask.shape[1])\n mask = joint_weight_exp & part_mask_exp\n part_dist = pc_dist.clone()\n part_dist[mask == 0] = 10\n\n d1, _ = part_dist.min(2)\n d2, _ = part_dist.min(1)\n\n # This is very hacky and I suspect it will cause all points to acquire\n # a gradient value.\n d1[d1 == 10] = 0\n d2[d2 == 10] = 0\n joint_loss.append((d1 + d2).mean())\n\n joint_loss = torch.tensor(joint_loss, device=target_pc.device)\n loss += (joint_lambda * joint_loss.mean())\n\n # Coord Loss Only\n coord_loss = F.mse_loss(preds[2], targets[0])\n loss += coord_loss\n\n # with torch.no_grad():\n # coord_loss = F.mse_loss(coords_pred, targets[0])\n\n # Physical loss\n p_loss = physical_loss(preds[6])\n loss += (physical_lambda * p_loss)\n\n return loss, coord_loss, joint_loss, chamfer_loss, p_loss", "def loss_function(self, x, fwd_rtn):\n px_zs = fwd_rtn[\"px_zs\"]\n qz_x = fwd_rtn[\"qz_x\"]\n px_zss = fwd_rtn[\"px_zss\"]\n qz_xs = fwd_rtn[\"qz_xs\"]\n\n kl = self.calc_kl(qz_x)\n kl_separate = self.calc_kl(qz_xs)\n ll = self.calc_ll(x, px_zs)\n ll_separate = self.calc_ll(x, px_zss)\n\n total = kl + kl_separate - ll - ll_separate\n losses = {\"loss\": total, \"kl\": kl, \"ll\": ll, \"ll_separate\": ll_separate, \"kl_separate\": kl_separate}\n\n return losses", "def compute_loss(self, inputs):\r\n outputs = self.net.compute_outputs(inputs)\r\n loss_grad = self.net.compute_loss_grad(outputs - inputs)\r\n loss = np.sum((inputs - outputs) ** 2, axis=0).mean() / 2.0\r\n return loss, loss_grad", "def do_loss(logits, labels):\n return tf.reduce_sum(tf.square(logits - labels))", "def batch_hard_triplet_loss(embeddings, pids, margin=0.2):\n dists = _compute_pairwise_distance(embeddings, embeddings)\n same_identity_mask = tf.equal(tf.expand_dims(pids, axis=1),\n tf.expand_dims(pids, axis=0))\n\n negative_mask = tf.logical_not(same_identity_mask)\n positive_mask = tf.logical_xor(same_identity_mask,\n tf.eye(tf.shape(pids)[0], dtype=tf.bool))\n\n hardest_positive = tf.reduce_max(\n dists * tf.cast(positive_mask, tf.float32), axis=1)\n hardest_negative = tf.map_fn(\n lambda x: tf.reduce_min(tf.boolean_mask(x[0], x[1])),\n (dists, negative_mask), tf.float32)\n\n losses = hardest_positive - hardest_negative\n\n if isinstance(margin, numbers.Real):\n losses = tf.maximum(margin + losses, 0.0)\n elif margin == 'soft':\n losses = tf.nn.softplus(losses)\n else:\n raise ValueError(\n 'margin can be either a float or `soft`')\n\n num_active = tf.reduce_sum(tf.cast(tf.greater(losses, 1e-5), tf.float32))\n loss = tf.reduce_mean(losses)\n return loss, hardest_positive, hardest_negative, num_active", "def loss(self, z1_rec, z3_rec):\n pass", "def loss_fn(model):\n with flax.nn.stateful(state) as new_state:\n with flax.nn.stochastic(prng_key):\n logits = model(batch['image'])\n loss = cross_entropy_loss(logits, batch['label'])\n # TODO(britefury): check if applying L2 regularization to weights but\n # *not* biases improves results\n weight_penalty_params = jax.tree_leaves(model.params)\n weight_l2 = sum([jnp.sum(x ** 2)\n for x in weight_penalty_params\n if x.ndim > 1])\n weight_penalty = l2_reg * 0.5 * weight_l2\n loss = loss + weight_penalty\n return loss, (new_state, logits)", "def compute_loss(y, tx, w):\n e = y-tx@w\n return 1/(2*y.shape[0])*e.transpose()@e", "def loss(A, Y):\n return A - Y", "def compute_loss(y, tx, w):\n # If a is an N-D array and b is a 1-D array, it is a sum product over the last axis of a and b.\n e = y - np.dot(tx, w)\n loss = 1 / 2 * np.mean(e**2)\n return loss", "def compute_loss(self, features, mode, params, precomputed):\n raise NotImplementedError(\"Model does not implement loss.\")", "def loss_total(self):\r\n def loss(y_true, y_pred):\r\n l2 = 1/2*K.sum(K.square(y_true-y_pred))\r\n\r\n return l2\r\n return loss", "def getLoss(self, x_test, t_test):\n x_t = Variable(x_test, requires_grad=False)\n #Feed inputes into neural network\n t_pred = self.model(x_t)\n #Now lets compute out loss\n loss = self.loss_fn(t_pred, t_test)\n return loss", "def loss(self, x, y):\n raise NotImplementedError", "def rpn_cls_loss(*args):\n y_true, y_pred = args if len(args) == 2 else args[0]\n indices = tf.where(tf.not_equal(y_true, -1))\n target = tf.gather_nd(y_true, indices)\n output = tf.gather_nd(y_pred, indices)\n lf = tf.losses.BinaryCrossentropy()\n return lf(target, output)", "def verbose_loss(self, feedback: _Feedback, extra_info) -> Dict[str, _Array]:\n hint_preds, diff_logits, gt_diffs = extra_info\n\n for inp in feedback.features.inputs:\n if inp.location in [_Location.NODE, _Location.EDGE]:\n nb_nodes = inp.data.shape[1]\n break\n\n total_loss = 0.0\n lengths = feedback.features.lengths\n\n losses = {}\n if self.decode_diffs:\n for loc in _Location:\n for i in range(len(gt_diffs)):\n is_not_done = _is_not_done_broadcast(lengths, i, gt_diffs[i][loc])\n diff_loss = (\n jnp.maximum(diff_logits[i][loc], 0) -\n diff_logits[i][loc] * gt_diffs[i][loc] +\n jnp.log1p(jnp.exp(-jnp.abs(diff_logits[i][loc]))) * is_not_done)\n losses[loc.name + '_diff_%d' % i] = jnp.mean(diff_loss)\n\n if self.decode_hints:\n for truth in feedback.features.hints:\n for i in range(truth.data.shape[0] - 1):\n assert truth.name in hint_preds[i]\n pred = hint_preds[i][truth.name]\n is_not_done = _is_not_done_broadcast(lengths, i, truth.data[i + 1])\n if truth.type_ == _Type.SCALAR:\n if self.decode_diffs:\n total_loss = jnp.mean((pred - truth.data[i + 1])**2 *\n gt_diffs[i][truth.location] * is_not_done)\n else:\n total_loss = jnp.mean((pred - truth.data[i + 1])**2 * is_not_done)\n elif truth.type_ == _Type.MASK:\n if self.decode_diffs:\n total_loss = jnp.mean(\n jnp.maximum(pred, 0) - pred * truth.data[i + 1] +\n jnp.log1p(jnp.exp(-jnp.abs(pred))) *\n gt_diffs[i][truth.location] * is_not_done)\n else:\n total_loss = jnp.mean(\n jnp.maximum(pred, 0) - pred * truth.data[i + 1] +\n jnp.log1p(jnp.exp(-jnp.abs(pred))) * is_not_done)\n elif truth.type_ == _Type.MASK_ONE:\n if self.decode_diffs:\n total_loss = jnp.mean(\n -jnp.sum(\n truth.data[i + 1] * jax.nn.log_softmax(\n pred) * is_not_done, axis=-1, keepdims=True) *\n gt_diffs[i][truth.location])\n else:\n total_loss = jnp.mean(-jnp.sum(\n truth.data[i + 1] * jax.nn.log_softmax(\n pred) * is_not_done, axis=-1))\n elif truth.type_ == _Type.CATEGORICAL:\n if self.decode_diffs:\n total_loss = jnp.mean(\n -jnp.sum(\n truth.data[i + 1] * jax.nn.log_softmax(\n pred), axis=-1, keepdims=True) *\n jnp.expand_dims(gt_diffs[i][truth.location], -1) *\n is_not_done)\n else:\n total_loss = jnp.mean(-jnp.sum(\n truth.data[i + 1] * jax.nn.log_softmax(pred), axis=-1) *\n is_not_done)\n elif truth.type_ == _Type.POINTER:\n if self.decode_diffs:\n total_loss = jnp.mean(-jnp.sum(\n hk.one_hot(truth.data[i + 1], nb_nodes) *\n jax.nn.log_softmax(pred),\n axis=-1) * gt_diffs[i][truth.location] * is_not_done)\n else:\n total_loss = jnp.mean(-jnp.sum(\n hk.one_hot(truth.data[i + 1], nb_nodes) *\n jax.nn.log_softmax(pred),\n axis=-1) * is_not_done)\n else:\n raise ValueError('Incorrect type')\n losses[truth.name + '_%d' % i] = total_loss\n return losses", "def compute_loss(self):\n self.prototypes = self.compute_prototypes()\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.episode.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def _create_loss(self):\n\n with tf.name_scope(\"loss\"):\n \n # gini=(tf.nn.l2_loss( self.score))/100000\n gini = tf.losses.softmax_cross_entropy(self.score, 0*self.score)\n \n promo_prob=tf.reduce_sum(tf.multiply(self.score, self.cohort_weight),\n axis=1)\n inc_value = tf.reduce_mean(tf.multiply(promo_prob, self.value))- self.control_value\n inc_cost = tf.reduce_mean( tf.multiply(promo_prob, self.cost)) - self.control_cost\n \n\n\n # determine loss function based on self.obj_rule\n if self.obj_rule == 'cpiv':\n self.objective = inc_cost / inc_value\n\n elif self.obj_rule == 'ivc':\n # maximize ivc\n self.objective = - inc_value / inc_cost\n\n elif self.obj_rule == 'lagrangian':\n assert self.shadow is not None, 'Need to pass in shadow value if use lagrangian as obj_rule.'\n self.objective = inc_cost - self.shadow * inc_value\n\n elif self.obj_rule == 'value':\n # maximize delta values\n self.objective = - inc_value\n\n # use only cost as objective\n elif self.obj_rule == 'cost':\n # maximize delta cost\n self.objective = - inc_cost\n\n else:\n raise Exception('Invalid obj_rule!')\n\n # regularization\n reg_loss = tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n # weights = tf.trainable_variables() # all vars of your graph\n # reg_loss = tf.norm( weights,ord=1)\n\n # final loss\n self.loss = self.objective +reg_loss+.1*gini", "def build_loss(self):\n import tensorflow as tf\n\n y_1d = [tf.reduce_sum(tf.multiply(self.variables[\"y\"][i], self.variables[\"y_action\"][i]), axis=1) for i in range(len(self.variables[\"y\"]))]\n loss = np.sum([tf.nn.l2_loss(y_1d[i] - self.variables[\"y_true\"]) for i in range(len(y_1d))])\n\n l1_reg = 0\n l2_reg = 0\n\n keys = sorted(self.variables.keys())\n keys = [key for key in keys if critere_keys(key) and \"W\" in key]\n for key in keys:\n l1_reg += tf.reduce_sum(tf.abs(self.variables[key]))\n l2_reg += tf.nn.l2_loss(self.variables[key])\n\n self.loss = loss + self.alpha_reg * l1_reg + self.beta_reg * l2_reg\n\n self.train_step = tf.train.RMSPropOptimizer(self.decay_learning_rate,\n decay=0.99, momentum=0., centered=True).minimize(self.loss, global_step=self.global_step)", "def pseudo_loss(self, params, batches):\n loss = 0\n for batch in batches:\n states = batch[\"states\"]\n actions = batch[\"actions\"]\n returns = batch[\"returns\"]\n\n preds = self.predict_jax(params, states)\n\n baseline = jnp.mean(returns, axis=0)\n preds_select = jnp.take_along_axis(preds, jnp.expand_dims(actions, axis=2), axis=2).squeeze()\n loss += (-jnp.mean(jnp.sum(preds_select * (returns - baseline))))\n\n return loss + self.l2_regularizer(params, 0.001) # try to divide by len(batches)?", "def loss_function(agent, trajectories):\n # All ALL_CAPS variables are constants.\n\n # QUESTIOM: The trajectories already have behavior_logits, why is the need\n # to calculate the target_logits?\n # trajectories shape: list of trajectory\n # target_logits: ArgsActionLogits\n target_logits, baselines = agent.unroll(trajectories)\n\n trajectories = U.stack_namedtuple(trajectories) \n trajectories = U.namedtuple_zip(trajectories) \n\n loss_actor_critic = 0.\n if True:\n rewards = torch.tensor(trajectories.reward, dtype=torch.float32, device=device)\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n # use normalize\n if False:\n scale_dim = 1\n rewards = (rewards - torch.mean(rewards, dim=scale_dim, keepdim=True)) / (torch.std(rewards, dim=scale_dim, keepdim=True) + 1e-9)\n\n print(\"trajectories.reward\", rewards) if debug else None \n print(\"trajectories.reward.shape\", rewards.shape) if debug else None\n\n lambda_loss = td_lambda_loss(baselines[0], rewards, trajectories)\n print(\"lambda_loss:\", lambda_loss) if 1 else None\n loss_actor_critic += (10. * lambda_loss)\n\n # we add the split_vtrace_pg_loss\n pg_loss = split_vtrace_pg_loss(target_logits, baselines[0], rewards, trajectories)\n print(\"pg_loss:\", pg_loss) if 1 else None\n loss_actor_critic += (1.0 * pg_loss)\n\n UPGO_WEIGHT = 1.0\n loss_upgo = UPGO_WEIGHT * split_upgo_loss(target_logits, baselines[0], trajectories)\n print(\"loss_upgo:\", loss_upgo) if debug else None\n\n # note: we want to maximize the entropy\n # so we gradient descent the -entropy\n # Original AlphaStar pseudocode is wrong\n # AlphaStar: loss_ent = entropy_loss(trajectories.behavior_logits, trajectories.masks)\n loss_ent = 3 * (- entropy_loss_for_all_arguments(target_logits, trajectories.masks))\n print(\"loss_ent:\", loss_ent) if 1 else None\n\n #loss_all = target_logits.action_type.sum()\n loss_all = loss_actor_critic + loss_ent # + loss_upgo\n\n loss_list = [lambda_loss, pg_loss, loss_upgo, loss_ent]\n\n return loss_all, loss_list", "def ss_loss_(self, batch):\n raise NotImplementedError", "def _create_loss_op(self):\n # 1.) The reconstruction loss, which forces the NN towards reconstructing more accurately the\n # given input. This function is configurable, but usually it is the Bernoulli negative log-likelihood.\n if self.cost_function == 'abs':\n reconstr_loss = tf.reduce_sum(tf.abs(self.x_decoded - self.x_in), 1)\n elif self.cost_function in ('mse', 'l2', 'square'):\n reconstr_loss = tf.reduce_sum(tf.squared_difference(self.x_in, self.x_decoded), 1)\n elif self.cost_function in ('xentropy', 'log'):\n reconstr_loss = \\\n -tf.reduce_sum(self.x_in * tf.log(1e-10 + self.x_decoded)\n + (1 - self.x_in) * tf.log(1e-10 + 1 - self.x_decoded),\n 1)\n else:\n raise ValueError(self.cost_function, \"Unknown cost function name!\")\n\n # 2.) The latent loss, which is defined as the Kullback Leibler divergence\n ## between the distribution in latent space induced by the encoder on\n # the data and some prior. This acts as a kind of regularizer.\n # This can be interpreted as the number of \"nats\" required\n # for transmitting the the latent space distribution given\n # the prior.\n latent_loss = -0.5 * tf.reduce_sum(1. + self.z_log_sigma_sq\n - tf.square(self.z_mean)\n - tf.exp(self.z_log_sigma_sq), 1)\n\n self.loss_op = tf.reduce_mean(reconstr_loss + latent_loss) # average over batch\n tf.add_to_collection(\"losses\", self.loss_op)\n\n if self.learning_rate is not None:\n global_step = tf.train.get_or_create_global_step()\n self.train_op = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(\n self.loss_op,\n global_step=global_step,\n var_list=tf.get_collection(self.training_scope) if self.training_scope is not None else None)\n\n tf.add_to_collection(\"train_ops\", self.train_op)\n tf_logging.info(\"Added AdamOptimizer with learning rate: %.8f\" % self.learning_rate)\n\n tf.summary.scalar(\"latent_loss\", tf.reduce_mean(latent_loss))\n tf.summary.scalar(\"reconstruction_loss\", tf.reduce_mean(reconstr_loss))\n tf.summary.scalar(\"vae_loss\", self.loss_op)", "def vertex_loss(self, pred_vertices, gt_vertices, has_smpl):\n conf = has_smpl.float()\n loss_vertex = self.criterion_vertex(pred_vertices, gt_vertices)\n loss_vertex = (conf[:, None, None] * loss_vertex).mean()\n return loss_vertex", "def loss_fn(self, lbl, y):\n\n binlbl = self._to_device(lbl[:,0]>.5)\n # center = self._to_device(lbl[:,3]) \n offset = 5. * self._to_device(lbl[:,1:]) \n\n loss = self.criterion(y[:,:2], offset) \n loss2 = self.criterion2(y[:,2], binlbl)\n\n # loss3 = self.criterion(y[:,3], center)\n\n loss = loss + loss2\n return loss", "def compute_loss(self):\n self.test_logits = self.compute_logits()\n loss = tf.nn.sparse_softmax_cross_entropy_with_logits(\n labels=self.data.test_labels, logits=self.test_logits)\n cross_entropy_loss = tf.reduce_mean(loss)\n regularization = tf.reduce_sum(\n tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))\n loss = cross_entropy_loss + self.weight_decay * regularization\n return loss", "def compute_loss(self):\n def calc_loss(inputs, outputs):\n reconstruction_loss = tf.metrics.binary_crossentropy(\n tf_flat(inputs), tf_flat(outputs))\n reconstruction_loss *= OUT_SIZE * OUT_SIZE\n kl_loss = -0.5 * tf.reduce_sum(1.0 + self.log_sigma - tf.square(\n self.mu) - tf.exp(self.log_sigma), 1)\n return tf.reduce_mean(reconstruction_loss + kl_loss)\n return calc_loss", "def loss(self, targets, scores):\n return (2. * numpy.arctan(targets * scores) - 1.)**2", "def loss_fn(self, pred: Tensor, true: Tensor) -> Tensor:\n pass", "def loss(self, x, a, log_p, log_p_to_optimize, r):\n pass", "def triplet_hard_loss(\n y_true: TensorLike,\n y_pred: TensorLike,\n margin: FloatTensorLike = 1.0,\n soft: bool = False,\n distance_metric: Union[str, Callable] = \"L2\",\n) -> tf.Tensor:\n labels = tf.convert_to_tensor(y_true, name=\"labels\")\n embeddings = tf.convert_to_tensor(y_pred, name=\"embeddings\")\n\n convert_to_float32 = (\n embeddings.dtype == tf.dtypes.float16 or embeddings.dtype == tf.dtypes.bfloat16\n )\n precise_embeddings = (\n tf.cast(embeddings, tf.dtypes.float32) if convert_to_float32 else embeddings\n )\n\n # Reshape label tensor to [batch_size, 1].\n lshape = tf.shape(labels)\n labels = tf.reshape(labels, [lshape[0], 1])\n\n # Build pairwise squared distance matrix.\n if distance_metric == \"L2\":\n pdist_matrix = metric_learning.pairwise_distance(\n precise_embeddings, squared=False\n )\n\n elif distance_metric == \"squared-L2\":\n pdist_matrix = metric_learning.pairwise_distance(\n precise_embeddings, squared=True\n )\n\n elif distance_metric == \"angular\":\n pdist_matrix = metric_learning.angular_distance(precise_embeddings)\n\n else:\n pdist_matrix = distance_metric(precise_embeddings)\n\n # Build pairwise binary adjacency matrix.\n adjacency = tf.math.equal(labels, tf.transpose(labels))\n # Invert so we can select negatives only.\n adjacency_not = tf.math.logical_not(adjacency)\n\n adjacency_not = tf.cast(adjacency_not, dtype=tf.dtypes.float32)\n # hard negatives: smallest D_an.\n hard_negatives = _masked_minimum(pdist_matrix, adjacency_not)\n\n batch_size = tf.size(labels)\n\n adjacency = tf.cast(adjacency, dtype=tf.dtypes.float32)\n\n mask_positives = tf.cast(adjacency, dtype=tf.dtypes.float32) - tf.linalg.diag(\n tf.ones([batch_size])\n )\n\n # hard positives: largest D_ap.\n hard_positives = _masked_maximum(pdist_matrix, mask_positives)\n\n if soft:\n triplet_loss = tf.math.log1p(tf.math.exp(hard_positives - hard_negatives))\n else:\n triplet_loss = tf.maximum(hard_positives - hard_negatives + margin, 0.0)\n\n # Get final mean triplet loss\n triplet_loss = tf.reduce_mean(triplet_loss)\n\n if convert_to_float32:\n return tf.cast(triplet_loss, embeddings.dtype)\n else:\n return triplet_loss", "def compute_loss(\n action_probs: tf.Tensor, values: tf.Tensor, returns: tf.Tensor\n) -> tf.Tensor:\n\n advantage = returns - values\n td = tf.subtract(returns, values)\n\n # actor\n # action_log_probs = tf.math.log(action_probs)\n # actor_loss = -tf.math.reduce_sum(action_log_probs * advantage)\n action_log_probs = tf.math.log(action_probs)\n actor_loss = -tf.math.reduce_mean(action_log_probs * td)\n\n # critic\n # td = tf.subtract(returns, values)\n # critic_loss = tf.reduce_mean(tf.square(td))\n critic_loss = huber_loss(values, returns)\n\n tf.print(\"a_loss:\", actor_loss, \"c_loss:\", critic_loss)\n\n return actor_loss + critic_loss", "def my_custom_loss_func(ground_truth, predictions):\n\ttotal = len(predictions)\n\tdiff = np.abs(ground_truth - predictions)\n\ttruth_list = map(lambda x: x<40, diff)\n\ttruth_val = sum(truth_list)\n\treturn truth_val*1.0/total", "def _compute_loss(self, parameters, inputs, ground_truth):\n predictions = self.network_forward(parameters, inputs)\n loss = np.mean((ground_truth - predictions) ** 2)\n return loss", "def _compute_loss(self, parameters, inputs, ground_truth):\n predictions = self.network_forward(parameters, inputs)\n loss = np.mean((ground_truth - predictions) ** 2)\n return loss", "def calculate_loss(model, t, logits, labels):\n model_para = model.get_paramaters_list_reshape()\n myTF.calculate_para_dependence_loss(model_para,t)\n\n myTF.calculate_cross_entropy_loss(logits, labels)\n\n return tf.add_n(tf.get_collection('losses'), name='loss_total')", "def _compute_loss(self):\n state, action, reward, next_state, done = self.replay_buffer.sample(self.batch_size)\n\n state = torch.FloatTensor(state)\n next_state = torch.FloatTensor(next_state)\n action = torch.LongTensor(action)\n reward = torch.FloatTensor(reward)\n done = torch.FloatTensor(done)\n\n q_values = self.dqn(state)\n q_value = q_values.gather(1, action.unsqueeze(1)).squeeze(1)\n\n next_q_values = self.target_dqn(next_state)\n next_q_value = next_q_values.max(1)[0]\n target = reward + self.discount_factor * next_q_value * (1 - done)\n\n # loss = F.smooth_l1_loss(q_value, target.detach())\n loss = F.mse_loss(q_value, target.detach())\n\n return loss", "def get_loss(self, xs, y):\n \"*** YOUR CODE HERE question 4 ***\"\n return nn.SoftmaxLoss(self.run(xs), y)", "def compute_loss_lasso(y, tx, w, lambda_):\n e = y - tx.dot(w)\n\n return e.dot(e)/(2 * len(e)) + lambda_ * sum(abs(w))", "def _auxiliary_loss(self, expert_mask, router_prob):\n # density_1's shape: (dp_group, self.expert_dim)\n density_1 = self.reduce_mean(expert_mask, 1)\n # density_1_proxy's shape: (dp_group, self.expert_dim)\n density_1_proxy = self.reduce_mean2(router_prob, 1)\n loss = self.mul(density_1, density_1_proxy)\n loss = self.reduce_mean3(loss)\n loss = self.mul3(self.mul2(loss, self.expert_dim), self.expert_dim)\n return loss", "def JS_loss_fun_grad(teacher_preds, student_pred, graph):\n # if FLAGS.heat:\n # student_pred = logit2prob_heat(student_pred)\n # else:\n # student_pred = logit2prob(student_pred) \n student_pred = logit2prob(student_pred)\n loss = noisy_op.compute_loss(student_pred, teacher_preds, graph, name=\"nosiy_loss\")\n # loss.set_shape((1,))\n tf.add_to_collection('losses', loss)\n return tf.add_n(tf.get_collection('losses'), name='total_loss'), loss", "def loss(self, predicts, labels, objects_num):\n\n def cond1(num, object_num, loss, predict, label, nilboy):\n return num < object_num\n\n class_loss = tf.constant(0, tf.float32)\n object_loss = tf.constant(0, tf.float32)\n noobject_loss = tf.constant(0, tf.float32)\n coord_loss = tf.constant(0, tf.float32)\n loss = [0, 0, 0, 0]\n for i in range(self.batch_size):\n predict = predicts[i, :, :, :]\n label = labels[i, :, :]\n object_num = objects_num[i]\n nilboy = tf.ones([self.cell_size, self.cell_size, 1])\n tuple_results = tf.while_loop(cond1, self.body1,\n [\n tf.constant(0), object_num,\n [class_loss, object_loss, noobject_loss, coord_loss],\n predict, label, nilboy\n ])\n for j in range(4):\n loss[j] = loss[j] + tuple_results[2][j]\n nilboy = tuple_results[5]\n\n tf.add_to_collection('losses', (loss[0] + loss[1] + loss[2] + loss[3]) / self.batch_size)\n\n tf.summary.scalar('class_loss', loss[0] / self.batch_size)\n tf.summary.scalar('object_loss', loss[1] / self.batch_size)\n tf.summary.scalar('noobject_loss', loss[2] / self.batch_size)\n tf.summary.scalar('coord_loss', loss[3] / self.batch_size)\n tf.summary.scalar('weight_loss', tf.add_n(tf.get_collection('losses')) - (\n loss[0] + loss[1] + loss[2] + loss[3]) / self.batch_size)\n\n return tf.add_n(tf.get_collection('losses'), name='total_loss'), nilboy" ]
[ "0.7934357", "0.78243315", "0.76634", "0.76594365", "0.75780183", "0.75132746", "0.74581045", "0.7425377", "0.7383122", "0.7329623", "0.731566", "0.7292533", "0.7243651", "0.7238686", "0.7167928", "0.714028", "0.7125176", "0.7010623", "0.69959813", "0.6995111", "0.69010466", "0.6849907", "0.6660396", "0.6603207", "0.65054226", "0.6498778", "0.6443631", "0.64335525", "0.642287", "0.6390948", "0.6317659", "0.6282993", "0.62756014", "0.62610227", "0.62555796", "0.6247273", "0.6246459", "0.6242879", "0.6219352", "0.6209511", "0.6201265", "0.6200096", "0.61771244", "0.61720824", "0.6169857", "0.6169149", "0.6145157", "0.6145157", "0.6145157", "0.61136305", "0.61100966", "0.61039805", "0.61030656", "0.60966045", "0.6083494", "0.60764873", "0.6074647", "0.605518", "0.6044589", "0.6037897", "0.6034249", "0.603277", "0.6026163", "0.60185057", "0.6016546", "0.6014833", "0.6010225", "0.60068613", "0.5991926", "0.5980226", "0.5979534", "0.5969722", "0.5964137", "0.5962638", "0.5958292", "0.5958085", "0.595593", "0.5946112", "0.5936883", "0.5936036", "0.5921668", "0.5917352", "0.5917051", "0.59145784", "0.59068364", "0.58997613", "0.5886018", "0.5875157", "0.58722395", "0.58721083", "0.58568287", "0.5855652", "0.5855652", "0.58505714", "0.5848691", "0.58464044", "0.58459145", "0.58424324", "0.58343023", "0.5830938" ]
0.71021456
17
Function to rotate one vector to another, inspired by vrrotvec.m in MATLAB
def vrrotvec(a,b): a = normalize(a) b = normalize(b) ax = normalize(np.cross(a,b)) angle = np.arccos(np.minimum(np.dot(a,b),[1])) if not np.any(ax): absa = np.abs(a) mind = np.argmin(absa) c = np.zeros((1,3)) c[mind] = 0 ax = normalize(np.cross(a,c)) r = np.concatenate((ax,angle)) return r
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def svecRotate(v, T):\n \n return svec(Rotate(smat(v), T))", "def _rot(theta, vec):\n\n rmat = scipy.array([[scipy.cos(theta), -1*scipy.sin(theta)],\n [scipy.sin(theta), scipy.cos(theta)]]) \n return scipy.dot(rmat,vec)", "def rotate_vectors(q, vec):\n rot_vec = []\n for i, v in enumerate(vec):\n rot_vec.append(q.rotate(v))\n return rot_vec", "def vec_rotate_right(x):\n return jnp.roll(x, 1)", "def rotate(vector, angle):\n return np.cos(angle) * vector[0] + np.sin(angle) * vector[1], \\\n -np.sin(angle) * vector[0] + np.cos(angle) * vector[1]", "def rotate(self, other: (float, \"Vector\")) -> \"Vector\":\n if isinstance(other, (int, float)):\n return self._rotate2D(other)\n else:\n return self._matrix_mul(other)", "def rotate(initial_vector, rotated_vector, other_vectors):\n\n init_vec_norm = normalize(initial_vector)\n rot_vec_norm = normalize(np.asarray(rotated_vector))\n middle_vec_norm = normalize(init_vec_norm + rot_vec_norm)\n first_reflector = init_vec_norm - middle_vec_norm\n second_reflector = middle_vec_norm - rot_vec_norm\n Q1 = householder(first_reflector)\n Q2 = householder(second_reflector)\n reflection_matrix = np.matmul(Q2, Q1)\n rotated_vectors = np.matmul(other_vectors, np.transpose(reflection_matrix))\n return rotated_vectors", "def rotate (vect, angle, axis):\n\n cosine = np.cos (angle)\n sine = np.sin (angle)\n\n return (vect * cosine + \\\n sine * np.cross (axis, vect) + \\\n np.dot (axis, vect) * (1 - cosine) * axis)", "def RotateVector(rotation, vector):\n return Vector(\n rotation.rot[0][0]*vector.x + rotation.rot[1][0]*vector.y + rotation.rot[2][0]*vector.z,\n rotation.rot[0][1]*vector.x + rotation.rot[1][1]*vector.y + rotation.rot[2][1]*vector.z,\n rotation.rot[0][2]*vector.x + rotation.rot[1][2]*vector.y + rotation.rot[2][2]*vector.z,\n vector.t\n )", "def rotvec2r(theta, v):\n v = arg2array(v);\n ct = cos(theta)\n st = sin(theta)\n vt = 1 - ct\n r = mat([[ct, -v[2] * st, v[1] * st], \\\n [v[2] * st, ct, -v[0] * st], \\\n [-v[1] * st, v[0] * st, ct]])\n return v * v.T * vt + r", "def zx_rotation(vector,theta):\r\n R = np.array([[np.cos(theta),0,np.sin(theta)],\r\n [0,1,0],\r\n [-np.sin(theta),0,np.cos(theta)]\r\n ])\r\n return np.dot(R,vector)", "def rotate(v: vect2d, angle: float) -> vect2d:\n vector = ((v.x * math.cos(angle) - v.y * math.sin(angle)),\n (v.x * math.sin(angle) + v.x * math.cos(angle)))\n return vector", "def build_rotation_matrix(vec_new, vec_orig):\n theta = np.arccos(np.dot(vec_new, vec_orig) / (np.linalg.norm(vec_new) * np.linalg.norm(vec_orig)))\n\n R = np.array([[np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]])\n\n return R", "def rotvec2tr(theta, v):\n return r2t(rotvec2r(theta, v))", "def _rotate(polyreg, i=None, j=None, u=None, v=None, theta=None, R=None):\n # determine the rotation matrix based on inputs\n if R is not None:\n logger.debug(\"rotate: R=\\n{}\".format(R))\n if i is not None:\n raise ValueError(i)\n if j is not None:\n raise ValueError(j)\n if theta is not None:\n raise ValueError(theta)\n if u is not None:\n raise ValueError(u)\n if v is not None:\n raise ValueError(v)\n elif i is not None and j is not None and theta is not None:\n logger.info(\"rotate via indices and angle.\")\n if R is not None:\n raise ValueError(R)\n if u is not None:\n raise ValueError(u)\n if v is not None:\n raise ValueError(v)\n if i == j:\n raise ValueError(\"Must provide two unique basis vectors.\")\n R = givens_rotation_matrix(i, j, theta, polyreg.dim)\n elif u is not None and v is not None:\n logger.info(\"rotate via 2 vectors.\")\n if R is not None:\n raise ValueError(R)\n if i is not None:\n raise ValueError(i)\n if j is not None:\n raise ValueError(j)\n if theta is not None:\n raise ValueError(theta)\n R = solve_rotation_ap(u, v)\n else:\n raise ValueError(\"R or (i and j and theta) or (u and v) \"\n \"must be defined.\")\n if isinstance(polyreg, Polytope):\n # Ensure that half space is normalized before rotation\n n, p = _hessian_normal(polyreg.A, polyreg.b)\n # Rotate the hyperplane normals\n polyreg.A = np.inner(n, R)\n polyreg.b = p\n else:\n # Rotate subregions\n for poly in polyreg.list_poly:\n _rotate(poly, None, None, R=R)\n # transform bbox and cheby\n if polyreg.bbox is not None:\n polyreg.bbox = (np.inner(polyreg.bbox[0].T, R).T,\n np.inner(polyreg.bbox[1].T, R).T)\n if polyreg._chebXc is not None:\n polyreg._chebXc = np.inner(polyreg._chebXc, R)\n return R", "def xy_rotation(vector,theta):\r\n R = np.array([[np.cos(theta), -np.sin(theta),0],\r\n [np.sin(theta), np.cos(theta),0],\r\n [0,0,1]\r\n ])\r\n return np.dot(R,vector)", "def rotate(q, v):\n if v.ndim == 1:\n qv = np.append(v,0)\n else:\n qv = np.hstack([v,np.zeros((len(v),1))])\n out = mult(q,qv)\n out = mult(out, inv(q))\n return out[:,:3]", "def vecRot(data, seq, euler_angles, **kwargs):\n from scipy.spatial.transform import Rotation as R\n r = R.from_euler(seq, euler_angles, **kwargs)\n return r.apply(data)", "def test_rotate_vec(self):\n\n quat = Quat.from_axis_angle_deg(Vec3(-1, -1, -1), 180.)\n vec = Vec3(1, 0, 0)\n\n rotated_vec = quat.rotate_vec(vec)\n\n self.assertAlmostEqual(-1/3.0, rotated_vec.x)\n self.assertAlmostEqual(2/3.0, rotated_vec.y)\n self.assertAlmostEqual(2/3.0, rotated_vec.z)", "def rotate_a(X,vector):\r\n\taxis_vector = (math.radians(-X)) * np.array([1,0,0])\r\n\tr = R.from_rotvec(axis_vector)\r\n\treturn list(r.apply(vector))", "def compute_rot(v):\n if v[0] >= 0:\n M = nd.eye(len(v))\n else:\n M = - nd.eye(len(v))\n for i in range(1, len(v)):\n if v[i] == 0:\n continue\n rot_minus_theta = nd.eye(len(v))\n temp = nd.dot(M, v)\n\n theta = nd.arctan(temp[i]/temp[0])\n c = nd.cos(theta)\n s = nd.sin(theta)\n\n rot_minus_theta[0,0] = c\n rot_minus_theta[i,i] = c\n rot_minus_theta[0,i] = s\n rot_minus_theta[i,0] = -s\n\n M = nd.dot(rot_minus_theta, M)\n return M", "def rotorconversion(x):\n return cf.MultiVector(layout, val_rotorconversion(x))", "def rotate_vector(vector, angle, anchor=(0, 0)):\n x = vector.x - anchor[0]\n y = vector.y - anchor[1]\n\n cos_theta = cos(angle)\n sin_theta = sin(angle)\n\n nx = x*cos_theta - y*sin_theta\n ny = x*sin_theta + y*cos_theta\n\n nx = nx + anchor[0]\n ny = ny + anchor[1]\n\n return Vector2D(nx, ny)", "def mirror_vector_vector(v1, v2):\n return subtract_vectors(v1, scale_vector(v2, 2 * dot_vectors(v1, v2)))", "def vec_rotate_left(x):\n return jnp.roll(x, -1)", "def apply_rotation_only(self, vector):\n return np.dot(self.rotation_matrix, vector)", "def rot(vec, angle, degrees=True):\r\n if degrees:\r\n angle = np.radians(angle)\r\n r = np.array([[np.cos(-angle), -np.sin(-angle)],\r\n [np.sin(-angle), np.cos(-angle)]])\r\n return r.dot(vec)", "def get_rotation_vector(R):\n v = np.array([R[1,2] - R[2,1],\n R[2,0] - R[0,1],\n R[0,1] - R[1,0]]) # eq. 3.12 in [1], pp.66\n return v", "def vec_rotate_r2g(al, be, ga, lon, lat, urot, vrot, flag):\n\n # first get another coordinate\n if flag == 1:\n (rlon, rlat) = scalar_g2r(al, be, ga, lon, lat)\n else:\n rlon = lon\n rlat = lat\n (lon, lat) = scalar_r2g(al, be, ga, rlon, rlat)\n\n # then proceed...\n rad = mt.pi / 180\n al = al * rad\n be = be * rad\n ga = ga * rad\n\n rotate_matrix = np.zeros(shape=(3, 3))\n rotate_matrix[0, 0] = np.cos(ga) * np.cos(al) - np.sin(ga) * np.cos(be) * np.sin(al)\n rotate_matrix[0, 1] = np.cos(ga) * np.sin(al) + np.sin(ga) * np.cos(be) * np.cos(al)\n rotate_matrix[0, 2] = np.sin(ga) * np.sin(be)\n rotate_matrix[1, 0] = -np.sin(ga) * np.cos(al) - np.cos(ga) * np.cos(be) * np.sin(\n al\n )\n rotate_matrix[1, 1] = -np.sin(ga) * np.sin(al) + np.cos(ga) * np.cos(be) * np.cos(\n al\n )\n rotate_matrix[1, 2] = np.cos(ga) * np.sin(be)\n rotate_matrix[2, 0] = np.sin(be) * np.sin(al)\n rotate_matrix[2, 1] = -np.sin(be) * np.cos(al)\n rotate_matrix[2, 2] = np.cos(be)\n\n rotate_matrix = np.linalg.pinv(rotate_matrix) \n \n rlat = rlat * rad\n rlon = rlon * rad\n lat = lat * rad\n lon = lon * rad\n\n # vector in rotated Cartesian\n txg = -vrot * np.sin(rlat) * np.cos(rlon) - urot * np.sin(rlon)\n tyg = -vrot * np.sin(rlat) * np.sin(rlon) + urot * np.cos(rlon)\n tzg = vrot * np.cos(rlat)\n\n # vector in geo Cartesian\n txr = (\n rotate_matrix[0, 0] * txg\n + rotate_matrix[0, 1] * tyg\n + rotate_matrix[0, 2] * tzg\n )\n tyr = (\n rotate_matrix[1, 0] * txg\n + rotate_matrix[1, 1] * tyg\n + rotate_matrix[1, 2] * tzg\n )\n tzr = (\n rotate_matrix[2, 0] * txg\n + rotate_matrix[2, 1] * tyg\n + rotate_matrix[2, 2] * tzg\n )\n\n # vector in geo coordinate\n v = (\n -np.sin(lat) * np.cos(lon) * txr\n - np.sin(lat) * np.sin(lon) * tyr\n + np.cos(lat) * tzr\n )\n u = -np.sin(lon) * txr + np.cos(lon) * tyr\n\n u = np.array(u)\n v = np.array(v)\n\n return (u, v)", "def cpvrotate(self, other):\n return Vec2d(self.x*other.x - self.y*other.y, self.x*other.y + self.y*other.x)", "def rotate_vector ( angle, axis, old ):\n\n import numpy as np\n \n # Note that the axis vector should be normalized and we test for this\n # In general, the old vector need not be normalized, and the same goes for the result\n # although quite often in our applications they will be\n\n assert old.size == 3, 'Incorrect size of old'\n assert axis.size == 3, 'Incorrect size of axis'\n assert np.isclose(np.sum(axis**2),1.0), 'Non-unit vector {} {} {}'.format(*axis)\n\n c = np.cos ( angle )\n s = np.sin ( angle )\n proj = np.dot ( axis, old ) # The two vectors need not be perpendicular\n\n # Standard (Goldstein) rotation formula\n e = c * old + ( 1.0 - c ) * proj * axis + s * np.cross ( axis, old )\n\n return e", "def rotate(self,r):\n return r.hprod( self.hprod( r.inv() ) )", "def rotate2p(v1: vect2d, v2: vect2d, angle: float) -> vect2d:\n dx = v2.x - v1.x\n dy = v2.y - v1.y\n vector = vect2d((dx * math.cos(angle) - dy * math.sin(angle)),\n (dx * math.sin(angle) + dx * math.cos(angle)))\n vector += v1\n\n return vector", "def cpvunrotate(self, other):\n return Vec2d(self.x*other.x + self.y*other.y, self.y*other.x - self.x*other.y)", "def orient(ps, origin, v1, v2):\r\n \r\n ps = np.vstack((v1, v2, ps))\r\n ps -= origin\r\n if ps[0][1] == 0:\r\n a = 0\r\n else:\r\n a = np.arcsin(np.fabs(ps[0][1]) / np.sqrt(ps[0][1] ** 2 + ps[0][2] ** 2))\r\n if (ps[0][1] < 0 <= ps[0][2]) or (ps[0][1] > 0 > ps[0][2]):\r\n a = 2 * np.pi - a\r\n if (ps[0][1] * np.sin(a) + ps[0][2] * np.cos(a)) < 0:\r\n a = np.pi + a \r\n ps = rotate(a, ps, 0)\r\n if ps[0][0] == 0:\r\n b = 0\r\n else:\r\n b = np.arcsin(np.fabs(ps[0][0]) / np.sqrt(ps[0][0] ** 2 + ps[0][2] ** 2))\r\n if (ps[0][0] < 0 and ps[0][2] < 0) or (ps[0][0] > 0 and ps[0][2] > 0):\r\n b = 2 * np.pi - b\r\n if (ps[0][2] * np.cos(b) - ps[0][0] * np.sin(b)) < 0:\r\n b = np.pi + b\r\n ps = rotate(b, ps, 1)\r\n if ps[1][1] == 0:\r\n c = 0\r\n else:\r\n c = np.arcsin(np.fabs(ps[1][1]) / np.sqrt(ps[1][0]**2 + ps[1][1]**2))\r\n if (ps[1][0] < 0 and ps[1][1] < 0) or (ps[1][0] > 0 and ps[1][1] > 0):\r\n c = 2 * np.pi - c\r\n if (ps[1][0] * np.cos(c) - ps[1][1] * np.sin(c)) < 0:\r\n c = np.pi + c\r\n ps = rotate(c, ps, 2)\r\n return ps[2:]", "def rot_from_axisangle(vec):\r\n angle = torch.norm(vec, 2, 2, True)\r\n axis = vec / (angle + 1e-7)\r\n\r\n ca = torch.cos(angle)\r\n sa = torch.sin(angle)\r\n C = 1 - ca\r\n\r\n x = axis[..., 0].unsqueeze(1)\r\n y = axis[..., 1].unsqueeze(1)\r\n z = axis[..., 2].unsqueeze(1)\r\n\r\n xs = x * sa\r\n ys = y * sa\r\n zs = z * sa\r\n xC = x * C\r\n yC = y * C\r\n zC = z * C\r\n xyC = x * yC\r\n yzC = y * zC\r\n zxC = z * xC\r\n\r\n rot = torch.zeros((vec.shape[0], 4, 4)).to(device=vec.device)\r\n\r\n rot[:, 0, 0] = torch.squeeze(x * xC + ca)\r\n rot[:, 0, 1] = torch.squeeze(xyC - zs)\r\n rot[:, 0, 2] = torch.squeeze(zxC + ys)\r\n rot[:, 1, 0] = torch.squeeze(xyC + zs)\r\n rot[:, 1, 1] = torch.squeeze(y * yC + ca)\r\n rot[:, 1, 2] = torch.squeeze(yzC - xs)\r\n rot[:, 2, 0] = torch.squeeze(zxC - ys)\r\n rot[:, 2, 1] = torch.squeeze(yzC + xs)\r\n rot[:, 2, 2] = torch.squeeze(z * zC + ca)\r\n rot[:, 3, 3] = 1\r\n\r\n return rot", "def vrrotvec2mat(r):\n s = np.sin(r[3])\n c = np.cos(r[3])\n t = 1 - c\n \n n = normalize(r[0:3])\n \n x = n[0]\n y = n[1]\n z = n[2]\n \n m = np.array(\n [[t*x*x + c, t*x*y - s*z, t*x*z + s*y],\n [t*x*y + s*z, t*y*y + c, t*y*z - s*x],\n [t*x*z - s*y, t*y*z + s*x, t*z*z + c]]\n )\n return m", "def test_rotate_vec_x(self):\n\n quat = Quat.from_axis_angle_deg(Vec3(1, 0, 0), 90.)\n vec = Vec3(1, 1, 1)\n\n rotated_vec = quat.rotate_vec(vec)\n\n # 90 deg around x moves y from positive to negative\n self.assertAlmostEqual(1.0, rotated_vec.x)\n self.assertAlmostEqual(-1.0, rotated_vec.y)\n self.assertAlmostEqual(1.0, rotated_vec.z)", "def rotate(vector, angle, inverse=False):\n gamma, beta, alpha = angle[0], angle[1], angle[2]\n\n # Rotation matrices around the X (gamma), Y (beta), and Z (alpha) axis\n RX = rot_axis(gamma, 0)\n RY = rot_axis(beta, 1)\n RZ = rot_axis(alpha, 2)\n\n # Composed rotation matrix with (RX, RY, RZ)\n if inverse:\n return np.dot(np.dot(np.dot(RX.T, RY.T), RZ.T), vector)\n else:\n return np.dot(np.dot(np.dot(RZ, RY), RX), vector)", "def find_rotation(a, b):\n a.shape = (3,)\n b.shape = (3,)\n\n a /= np.linalg.norm(a)\n b /= np.linalg.norm(b)\n \n v = np.cross(a, b)\n \n angle_AB = -1*vector_angle(a, b) \n \n print(angle_AB)\n s = np.linalg.norm(v) * np.sin(angle_AB)\n \n c = np.dot(a, b) * np.cos(angle_AB)\n \n # Rotation matrix, R = I + Vx + Vx^2 * (1-c)/s^2\n I = np.identity(3)\n Vx = np.array([[0, -v[2], v[1]], [v[2], 0, -v[0]], [-v[1], v[0], 0]])\n \n R = I + Vx + np.linalg.matrix_power(Vx, 2) / (1+c)\n return R", "def test_revolute(self):\n # Rotate around the z axis\n r = Joint.revolute(np.array([0, 0, 1]))\n t_mat = r(np.array([np.pi / 2]))\n rot_vec = np.dot(t_mat, np.array([1, 0, 0, 1]))[:3]\n self.assertTrue(np.allclose(\n rot_vec, np.array([0, 1, 0]), rtol=1e-5, atol=1e-5))", "def rotation_inv(R: np.array) -> np.array:\n return R.T", "def rotate_c(X,a_set,vector):\r\n\taxis_vector = math.radians(-X) * np.array([0,0,1])\r\n\tr = R.from_rotvec(axis_vector)\r\n\treturn list(r.apply(vector))", "def rotaxis2m(theta, vector):\n vector = vector.normalized()\n c = numpy.cos(theta)\n s = numpy.sin(theta)\n t = 1 - c\n x, y, z = vector.get_array()\n rot = numpy.zeros((3, 3))\n # 1st row\n rot[0, 0] = t * x * x + c\n rot[0, 1] = t * x * y - s * z\n rot[0, 2] = t * x * z + s * y\n # 2nd row\n rot[1, 0] = t * x * y + s * z\n rot[1, 1] = t * y * y + c\n rot[1, 2] = t * y * z - s * x\n # 3rd row\n rot[2, 0] = t * x * z - s * y\n rot[2, 1] = t * y * z + s * x\n rot[2, 2] = t * z * z + c\n return rot", "def vec_rotate_g2r(al, be, ga, lon, lat, ugeo, vgeo, flag):\n\n # first get another coordinate\n if flag == 1:\n (rlon, rlat) = scalar_g2r(al, be, ga, lon, lat)\n else:\n rlon = lon\n rlat = lat\n (lon, lat) = scalar_r2g(al, be, ga, rlon, rlat)\n\n # then proceed...\n rad = mt.pi / 180\n al = al * rad\n be = be * rad\n ga = ga * rad\n\n rotate_matrix = np.zeros(shape=(3, 3))\n rotate_matrix[0, 0] = np.cos(ga) * np.cos(al) - np.sin(ga) * np.cos(be) * np.sin(al)\n rotate_matrix[0, 1] = np.cos(ga) * np.sin(al) + np.sin(ga) * np.cos(be) * np.cos(al)\n rotate_matrix[0, 2] = np.sin(ga) * np.sin(be)\n rotate_matrix[1, 0] = -np.sin(ga) * np.cos(al) - np.cos(ga) * np.cos(be) * np.sin(\n al\n )\n rotate_matrix[1, 1] = -np.sin(ga) * np.sin(al) + np.cos(ga) * np.cos(be) * np.cos(\n al\n )\n rotate_matrix[1, 2] = np.cos(ga) * np.sin(be)\n rotate_matrix[2, 0] = np.sin(be) * np.sin(al)\n rotate_matrix[2, 1] = -np.sin(be) * np.cos(al)\n rotate_matrix[2, 2] = np.cos(be)\n\n #rotate_matrix = np.linalg.pinv(rotate_matrix) \n \n rlat = rlat * rad\n rlon = rlon * rad\n lat = lat * rad\n lon = lon * rad\n \n # vector in Cartesian \n txg = -vgeo * np.sin(lat) * np.cos(lon) - ugeo * np.sin(lon)\n tyg = -vgeo * np.sin(lat) * np.sin(lon) + ugeo * np.cos(lon)\n tzg = vgeo * np.cos(lat)\n\n # vector in rotated Cartesian\n txr = (\n rotate_matrix[0, 0] * txg\n + rotate_matrix[0, 1] * tyg\n + rotate_matrix[0, 2] * tzg\n )\n tyr = (\n rotate_matrix[1, 0] * txg\n + rotate_matrix[1, 1] * tyg\n + rotate_matrix[1, 2] * tzg\n )\n tzr = (\n rotate_matrix[2, 0] * txg\n + rotate_matrix[2, 1] * tyg\n + rotate_matrix[2, 2] * tzg\n )\n\n # vector in rotated coordinate\n v = (\n -np.sin(rlat) * np.cos(rlon) * txr\n - np.sin(rlat) * np.sin(rlon) * tyr\n + np.cos(rlat) * tzr\n )\n u = -np.sin(rlon) * txr + np.cos(rlon) * tyr\n\n u = np.array(u)\n v = np.array(v)\n\n return (u, v)", "def rotate(self, vector):\n if isinstance(vector, Quaternion):\n return self._rotate_quaternion(vector)\n q = Quaternion(vector=vector)\n a = self._rotate_quaternion(q).vector\n if isinstance(vector, list):\n l = [x for x in a]\n return l\n elif isinstance(vector, tuple):\n l = [x for x in a]\n return tuple(l)\n else:\n return a", "def yz_rotation(vector,theta):\r\n R = np.array([[1,0,0],\r\n [0, np.cos(theta),-np.sin(theta)],\r\n [0, np.sin(theta), np.cos(theta)]\r\n ])\r\n return np.dot(R,vector)", "def vec_rotate_right_n(x, n):\n return jnp.roll(x, n)", "def qrot(q, v):\n assert q.shape[-1] == 4\n assert v.shape[-1] == 3\n assert q.shape[:-1] == v.shape[:-1]\n\n qvec = q[..., 1:]\n uv = torch.cross(qvec.double(), v.double(), dim=len(q.shape) - 1)\n uuv = torch.cross(qvec.double(), uv.double(), dim=len(q.shape) - 1)\n return v + 2 * (q[..., :1] * uv + uuv)", "def rotation_to_align_a_with_b(a, b):\n norm_a = np.linalg.norm(a)\n norm_b = np.linalg.norm(b)\n if not np.allclose(a, a/norm_a):\n print('Input a vector not unit normal - normalising')\n a = a / norm_a\n print(a)\n if not np.allclose(b, b/norm_b):\n print('Input b vector not unit normal - normalising')\n b = b / norm_b\n print(b)\n\n v = np.cross(a,b)\n #s = np.linalg.norm(v)\n c = np.dot(a,b)\n f = 1./(1. + c)\n vmat = np.array([[ 0, -v[2], v[1]],\n [ v[2], 0, -v[0]],\n [-v[1], v[0], 0]])\n return np.eye(3,3) + vmat + f *(np.matmul(vmat,vmat))", "def rotateFunction(rotCenter, rotVect):\n rotn = (1 / rotVect.norm()) * rotVect\n\n def rot(coords):\n oldVect = vector(coords) - vector(rotCenter)\n newVect = (math.cos(rotVect.norm()) * oldVect +\n math.sin(rotVect.norm()) * rotn * oldVect +\n ((1 - math.cos(rotVect.norm())) *\n (rotn.dotProduct(oldVect)) * rotn))\n return (rotCenter[0] + newVect.x,\n rotCenter[1] + newVect.y,\n rotCenter[2] + newVect.z)\n return rot", "def rot(wx, wy, order, dist):\n for _ in range(dist//90):\n if order == \"R\":\n wx, wy = wy, -wx\n elif order == \"L\":\n wx, wy = -wy, wx\n return wx, wy", "def calculate_angles_to_rotate_vector(self, starting_vec, ending_vec, starting_angles=None, search_method=0):\n# print \"starting_vec, ending_vec\", starting_vec, ending_vec\n\n # We want to find a rotation matrix R\n # R puts starting_vec onto ending_vec\n # But R has the freedom to rotate all the other axes around ending_vec - all\n # of these are equally valid.\n\n if np.allclose(vector_length(starting_vec), 0) or np.allclose(vector_length(ending_vec), 0):\n return None\n\n #Normalize our vectors\n starting_vec = starting_vec/vector_length(starting_vec)\n ending_vec = ending_vec/vector_length(ending_vec)\n\n #Find an initial rotation matrix.\n # We'll rotate around the cross-product of start x end, staying in the plane defined by these vectors\n rotation_axis = np.cross(starting_vec, ending_vec)\n #TODO: check for too-close vectors to get a valid cross-product\n angle = np.arccos( np.dot(starting_vec, ending_vec) )\n initial_R = numpy_utils.rotation_matrix_around_vector(rotation_axis, angle)\n\n result_vec = np.dot(initial_R, column(starting_vec)).flatten()\n #Check that the matrices match, but not if all are NaN\n #if not np.any(np.isnan(result_vec) and np.isnan(ending_vec)):\n if not np.any(np.isnan(result_vec)):\n assert np.allclose( result_vec, ending_vec), \"initial rotation matrix makes the correct rotation. Got %s, expected %s\" % ( result_vec, ending_vec)\n\n\n def optimize(start, stop, step):\n \"\"\"Routine to optimize by brute force\"\"\"\n #Go through every angle\n rot_angle_list = np.arange(start, stop, step)\n fitness_list = []\n best_angles_list = []\n for (i, rot_angle) in enumerate(rot_angle_list):\n (fitness, best_angles) = self._angle_fitness(rot_angle, initial_R, ending_vec, starting_vec)\n fitness_list.append(fitness)\n best_angles_list.append(best_angles)\n #Find the best result\n best_index = np.argmin(fitness_list)\n best_rot_angle = rot_angle_list[best_index]\n best_angles = best_angles_list[best_index]\n return (best_rot_angle, best_angles)\n\n\n def optimize_c_code(start, stop, step):\n \"\"\"Routine to optimize by brute force\"\"\"\n #Go through every angle\n rot_angle_list = np.arange(start, stop, step)\n (best_rot_angle, best_angles) = self._angle_fitness_brute(rot_angle_list, initial_R, ending_vec, starting_vec)\n return (best_rot_angle, best_angles)\n\n args = (initial_R, ending_vec, starting_vec)\n\n if search_method:\n #--- scipy optimize ----\n\n # Get a starting point by brute force \n step = np.deg2rad(2)\n (best_rot_angle, best_angles) = optimize_c_code(-2.2*pi, pi*2.2, step)\n\n # And optimize with that\n if False:\n x0 = best_rot_angle\n res = scipy.optimize.fminbound(self._angle_fitness, 0, 2*pi, args, xtol=4e-3, disp=0, maxfun=100, full_output=0)\n best_rot_angle = res\n else:\n x0 = np.array([ best_rot_angle ])\n res = scipy.optimize.fmin(self._angle_fitness_python, x0, args, xtol=4e-3, ftol=1e-2, disp=0, maxiter=100)\n best_rot_angle = res.reshape( (1) )[0] #avoid error with 0-dimension array\n\n #Call the same function to get the best angles too\n (fitness, best_angles) = self._angle_fitness_python(best_rot_angle, *args)\n\n else:\n #--- semi-brute optimization routine ----\n #for optimize_func in [optimize, optimize_c_code]:\n step = np.deg2rad(2)\n # (best_rot_angle, best_angles) = optimize_c_code(-0.2*pi, pi*2.2, step)\n (best_rot_angle, best_angles) = optimize_c_code(-1.2*pi, pi*1.2, step)\n for x in xrange(4):\n newstep = step/10\n (best_rot_angle, best_angles) = optimize_c_code(best_rot_angle-step, best_rot_angle+step, newstep)\n step = newstep\n\n #Optimized angles\n return best_angles", "def from_rotvec(rotvec: Variable) -> Variable:\n from scipy.spatial.transform import Rotation as R\n supported = [units.deg, units.rad]\n if rotvec.unit not in supported:\n raise UnitError(f\"Rotation vector unit must be one of {supported}.\")\n r = R.from_rotvec(rotvec.values, degrees=rotvec.unit == units.deg)\n return matrices(dims=rotvec.dims, values=r.as_matrix())", "def test_rotation(self):\n quat_rotated = rowan.rotate(input1, vector_inputs)\n\n matrices = rowan.to_matrix(input1)\n matrix_rotated = np.einsum(\"ijk,ki->ij\", matrices, vector_inputs.T)\n self.assertTrue(np.allclose(matrix_rotated, quat_rotated))", "def vrotate(arr, coord_in, coord_out, coord_type_in, coord_type_out):\n # pylint: disable=too-many-return-statements\n if coord_type_in == coord_type_out:\n return arr\n\n coord_out = None if coord_out is None else asarray(coord_out)\n coord_in = None if coord_in is None else asarray(coord_in)\n\n if coord_type_in == GEODETIC_ABOVE_WGS84:\n if coord_type_out == GEODETIC_ABOVE_WGS84:\n return arr\n elif coord_type_out == GEOCENTRIC_SPHERICAL:\n return vrot_sph2geod(arr, coord_out[..., 0] - coord_in[..., 0])\n elif coord_type_out == GEOCENTRIC_CARTESIAN:\n return vrot_sph2cart(arr, coord_in[..., 0], coord_in[..., 1])\n\n elif coord_type_in == GEOCENTRIC_SPHERICAL:\n if coord_type_out == GEODETIC_ABOVE_WGS84:\n return vrot_sph2geod(arr, coord_out[..., 0] - coord_in[..., 0])\n elif coord_type_out == GEOCENTRIC_CARTESIAN:\n return vrot_sph2cart(arr, coord_in[..., 0], coord_in[..., 1])\n\n elif coord_type_in == GEOCENTRIC_CARTESIAN:\n if coord_type_out in SPHERICAL_COORD_TYPES:\n return vrot_cart2sph(arr, coord_out[..., 0], coord_out[..., 1])\n\n raise ValueError(\"Unsupported coordinate system type!\")", "def vector_polar(v):\n return vector_mag(v), vector_angle(v)", "def qrot(q, v):\n assert q.shape[-1] == 4\n assert v.shape[-1] == 3\n assert q.shape[:-1] == v.shape[:-1]\n\n original_shape = v.shape\n q = q.view(-1, 4)\n v = v.view(-1, 3)\n\n qvec = q[:, 1:]\n uv = torch.cross(qvec, v, dim=1)\n uuv = torch.cross(qvec, uv, dim=1)\n return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)", "def vect_from_lspe_to_earth (self, vector, time):\n\n position_coord = self.lspe_coordinates (time)\n # The following code has been optimized:\n # position_vector = coord_to_pointing (position_coord)\n # angle = np.arccos (np.dot (self.spin_axis_lspe (time), position_vector))\n # and is therefore a one-line assignment: \n angle = np.pi * 0.5 - position_coord[0]\n rot_axis = np.array ([-np.sin (position_coord[1]),\n np.cos (position_coord[1]),\n 0])\n\n return rotate (vector, angle, rot_axis)", "def calculate_angles_to_rotate_vector(self, starting_vec, ending_vec, starting_angles=None, search_method=0):\n #Find the starting rotation matrix\n if not starting_angles is None:\n (phi, chi, omega) = starting_angles[0:3]\n starting_rot_matrix = numpy_utils.rotation_matrix(phi, chi, omega)\n #Rotate the starting vector\n starting_vec = np.dot(starting_rot_matrix, column(starting_vec)).flatten()\n\n #Find the rotation matrix that satisfies ending_vec = R . starting_vec\n\n #The cross product of q0 X q_over_a gives a rotation axis to use\n rotation_axis = np.cross(starting_vec, ending_vec)\n\n #Now we find the rotation angle about that axis that puts q0 on q_over_a\n angle = np.arccos( np.dot(starting_vec, ending_vec) / (vector_length(starting_vec)*vector_length(ending_vec)))\n\n #Make the rot. matrix\n R = numpy_utils.rotation_matrix_around_vector(rotation_axis, angle)\n\n if not starting_angles is None:\n #The final rotation we want is starting_rot_matrix 1st; R second.\n # So this is the resulting matrix\n R = np.dot(R, starting_rot_matrix)\n\n #The function finds some angles that work\n angles = numpy_utils.angles_from_rotation_matrix(R)\n\n #Position is always allowed\n return (angles)", "def carla_rotation_to_directional_numpy_vector(numpy_vector, carla_rotation):\n rotation_matrix = carla_rotation_to_numpy_rotation_matrix(carla_rotation)\n # directional_vector = numpy.array([1, 0, 0])\n rotated_directional_vector = rotation_matrix.dot(numpy_vector)\n return rotated_directional_vector", "def rotate(points, rot_vecs):\n theta = np.linalg.norm(rot_vecs, axis=1)[:, np.newaxis]\n with np.errstate(invalid='ignore'):\n v = rot_vecs / theta\n v = np.nan_to_num(v)\n dot = np.sum(points * v, axis=1)[:, np.newaxis]\n cos_theta = np.cos(theta)\n sin_theta = np.sin(theta)\n\n return cos_theta * points + sin_theta * np.cross(v, points) + dot * (1 - cos_theta) * v", "def rotate(points, rot_vecs):\n theta = np.linalg.norm(rot_vecs, axis=1)[:, np.newaxis]\n with np.errstate(invalid='ignore'):\n v = rot_vecs / theta\n v = np.nan_to_num(v)\n dot = np.sum(points * v, axis=1)[:, np.newaxis]\n cos_theta = np.cos(theta)\n sin_theta = np.sin(theta)\n\n return cos_theta * points + sin_theta * np.cross(v, points) + dot * (1 - cos_theta) * v", "def rotate(points, rot_vecs):\n theta = np.linalg.norm(rot_vecs, axis=1)[:, np.newaxis]\n with np.errstate(invalid='ignore'):\n v = rot_vecs / theta\n v = np.nan_to_num(v)\n dot = np.sum(points * v, axis=1)[:, np.newaxis]\n cos_theta = np.cos(theta)\n sin_theta = np.sin(theta)\n\n return cos_theta * points + sin_theta * np.cross(v, points) + dot * (1 - cos_theta) * v", "def rotate_matrix_from_vectors(vec1, vec2):\n norm = lambda vec: (vec[0]**2 + vec[1]**2 + vec[2]**2)**0.5\n nvec1 = vec1/norm(vec1)\n nvec2 = vec2/norm(vec2)\n if(norm(nvec1 - nvec2)<= 0.001):\n return np.eye(3)\n if(norm(nvec1 + nvec2)<= 0.001):\n return np.eye(3)*-1\n cross = lambda v1, v2 : np.array([v1[1]*v2[2] - v2[1]*v1[2], v1[2]*v2[0] - v1[0]*v2[2], v1[0]*v2[1] - v1[1]*v2[0]]) \n dot = lambda v1,v2: v1[0]*v2[0]+v1[1]*v2[1]+v1[2]*v2[2]\n skew_symmetric = lambda vec: np.array([[0,-vec[2],vec[1]],[vec[2],0,-vec[0]],[-vec[1],vec[0],0]])\n costh = dot(nvec1, nvec2)\n axis = cross(nvec1, nvec2)\n AXIS = skew_symmetric(axis)\n sinth = (axis[0]**2+ axis[1]**2 + axis[2]**2)**0.5\n rot = np.eye(3) + AXIS + ((1-costh)/(sinth**2))*(AXIS@AXIS)\n return rot", "def test_rotate_around_v3_x_axis(self):\n from pedemath.vec3 import rotate_around_vector_v3\n\n vec_a = Vec3(3, 4, 5)\n vec_b = Vec3(1, 0, 0)\n\n result = rotate_around_vector_v3(vec_a, math.pi, vec_b)\n expected = Vec3(3, -4, -5)\n\n self.assertAlmostEqual(result.x, expected.x)\n self.assertAlmostEqual(result.y, expected.y)\n self.assertAlmostEqual(result.z, expected.z)", "def quatPassiveRot(q, v):\n\tv_q = np.zeros((4, 1))\n\tv_q[1:] = v\n\tv_qnew = quatLeftMat(q).T @ quatRightMat(q) @ v_q\n\treturn v_qnew[1:]", "def rotate_vector(x, y, z, theta_x=None, theta_y=None, theta_z=None):\n if not np:\n raise PysimmError('pysimm.calc.rotate_vector function requires numpy')\n xt = random() * 2 * pi if theta_x is None else theta_x\n yt = random() * 2 * pi if theta_y is None else theta_y\n zt = random() * 2 * pi if theta_z is None else theta_z\n\n c = np.array((x, y, z))\n rot_mat_x = np.array([[1, 0, 0],\n [0, cos(xt), -sin(xt)],\n [0, sin(xt), cos(xt)]])\n rot_mat_y = np.array([[cos(yt), 0, sin(yt)],\n [0, 1, 0],\n [-sin(yt), 0, cos(yt)]])\n rot_mat_z = np.array([[cos(zt), -sin(zt), 0],\n [sin(zt), cos(zt), 0],\n [0, 0, 1]])\n\n return list(np.dot(rot_mat_z, np.dot(rot_mat_y, np.dot(rot_mat_x, c))))", "def rotation(self):\n\t\treturn self.piv.a.rotate.v", "def rotate(points, rot_vecs):\n theta = np.linalg.norm(rot_vecs, axis=1)[:, np.newaxis] #np.newaxis converts this into a column vector.\n with np.errstate(invalid='ignore'):\n v = rot_vecs / theta\n v = np.nan_to_num(v)\n \n check = (theta!=0).astype(int)\n dot = np.sum(points * v, axis=1)[:, np.newaxis]\n cos_theta = np.cos(theta)\n sin_theta = np.sin(theta)\n return (cos_theta * points) + check*(((1 - cos_theta) * v * dot) + (sin_theta * np.cross(v, points)))", "def test_from_two_vectors(self):\r\n for _ in range(20):\r\n v0 = np.random.randn(3)\r\n v1 = np.random.randn(3)\r\n v0 /= np.linalg.norm(v0)\r\n v1 /= np.linalg.norm(v1)\r\n\r\n q = from_two_vectors(v0, v1)\r\n R = to_rotation(q)\r\n\r\n zero_vec = R @ v0 - v1\r\n self.assertAlmostEqual(np.linalg.norm(zero_vec), 0.0)\r\n\r\n q_inv = from_two_vectors(v1, v0)\r\n R_inv = to_rotation(q_inv)\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def rotate_phasor(r, r1, r2):\n return (r - r2) / (r1 - r2)", "def solve_rotation_ap(u, v):\n # TODO: Assert vectors are non-zero and non-parallel aka exterior\n # product is non-zero\n N = u.size # the number of dimensions\n uv = np.stack([u, v], axis=1) # the plane of rotation\n M = np.identity(N) # stores the rotations for rorienting reference frame\n # ensure u has positive basis0 component\n if uv[0, 0] < 0:\n M[0, 0] = -1\n M[1, 1] = -1\n uv = M.dot(uv)\n # align uv plane with the basis01 plane and u with basis0.\n for c in range(2):\n for r in range(N - 1, c, -1):\n if uv[r, c] != 0: # skip rotations when theta will be zero\n theta = np.arctan2(uv[r, c], uv[r - 1, c])\n Mk = givens_rotation_matrix(r, r - 1, theta, N)\n uv = Mk.dot(uv)\n M = Mk.dot(M)\n # rotate u onto v\n theta = 2 * np.arctan2(uv[1, 1], uv[0, 1])\n logger.debug(\n \"solve_rotation_ap: {d} degree rotation\".format(\n d=180 * theta / np.pi))\n R = givens_rotation_matrix(0, 1, theta, N)\n # perform M rotations in reverse order\n M_inverse = M.T\n R = M_inverse.dot(R.dot(M))\n return R", "def vector_angle(v):\n assert len(v) == 2\n x, y = v\n return np.arctan2(y, x)", "def Rotation(v, theta):\n\n v = np.array(v)\n if v.shape != (3,) or abs(v.dot(v) - 1.0) > 1e-8 or not np.all(np.isreal(v)):\n raise ValueError('Rotation vector v should be a 3D real unit vector.')\n\n return np.cos(theta/2) * Identity() - 1j * np.sin(theta/2) * (\n v[0] * PauliX() + v[1] * PauliY() + v[2] * PauliZ())", "def rotation(x1, z1, x2, z2):\n e1 = np.zeros(shape=(3, 3))\n e2 = np.zeros(shape=(3, 3))\n e1[0, :] = x1 / np.linalg.norm(x1)\n e1[2, :] = z1 / np.linalg.norm(z1)\n e1[1, :] = np.cross(e1[2, :], e1[0, :])\n e2[0, :] = x2 / np.linalg.norm(x2)\n e2[2, :] = z2 / np.linalg.norm(z2)\n e2[1, :] = np.cross(e2[2, :], e2[0, :])\n R = np.zeros(shape=(3, 3))\n for i in range(3):\n for j in range(3):\n R[i, j] = np.dot(e1[i, :], e2[j, :])\n R = np.transpose(R)\n return R", "def trf_rotation(_V, _ang, _P):\r\n normFact = 1./sqrt(_V[0]*_V[0] + _V[1]*_V[1] + _V[2]*_V[2]);\r\n axVect = [normFact*_V[0], normFact*_V[1], normFact*_V[2]]\r\n VxVx = axVect[0]*axVect[0]\r\n VyVy = axVect[1]*axVect[1]\r\n VzVz = axVect[2]*axVect[2]\r\n cosAng = cos(_ang)\r\n sinAng = sin(_ang)\r\n one_m_cos = 1. - cosAng\r\n one_m_cosVxVy = one_m_cos*axVect[0]*axVect[1]\r\n one_m_cosVxVz = one_m_cos*axVect[0]*axVect[2]\r\n one_m_cosVyVz = one_m_cos*axVect[1]*axVect[2]\r\n sinVx = sinAng*axVect[0]\r\n sinVy = sinAng*axVect[1]\r\n sinVz = sinAng*axVect[2]\r\n st0 = [VxVx + cosAng*(VyVy + VzVz), one_m_cosVxVy - sinVz, one_m_cosVxVz + sinVy]\r\n st1 = [one_m_cosVxVy + sinVz, VyVy + cosAng*(VxVx + VzVz), one_m_cosVyVz - sinVx]\r\n st2 = [one_m_cosVxVz - sinVy, one_m_cosVyVz + sinVx, VzVz + cosAng*(VxVx + VyVy)]\r\n M = [st0, st1, st2]\r\n st00 = [1. - st0[0], -st0[1], -st0[2]]\r\n st01 = [-st1[0], 1. - st1[1], -st1[2]]\r\n st02 = [-st2[0], -st2[0], 1. - st2[2]]\r\n M0 = [st00, st01, st02]\r\n V = matr_prod(M0, _P)\r\n return [M, V]", "def rotate2(x, angle, origin=(0, 0)):\n origin = np.asarray(origin)\n x = np.asarray(x) - origin\n r = rotation_matrix2(angle)\n return x.dot(r.T) + origin", "def atan2_vec(vector):\n return -np.arctan2(vector[1], vector[0])", "def getEllipsYZRotMatrix(a1, a2):\n adir = a2 - a1\n amid = a1 + 0.5 * adir\n kath = np.sqrt((adir[0] * adir[0] + adir[1] * adir[1]) / 4.0)\n octantA2 = octant(a2)\n theta = np.arctan( abs( (adir[2]/2) / kath) )\n #[1, 4, 6, 7 ] => left rotation\n #[2, 3, 5, 8 ] => right rotation\n if octantA2 in [2, 3, 5, 8]: \n theta = -theta \n print \"theta =\" , np.rad2deg(theta)\n RotY = np.matrix( [ [ np.cos(theta), 0.0, np.sin(theta) ],\n [ 0.0 , 1.0, 0.0 ],\n [ -np.sin(theta), 0.0, np.cos(theta) ]\n ]) \n \n psi = np.arctan( abs( adir[1] / adir[0] ) )\n #[2, 4, 6, 8 ] => left rotation\n #[1, 3, 5, 7 ] => right rotation\n if octantA2 in [1, 3, 5, 7]:\n psi = -psi\n print \"psi =\" , np.rad2deg(psi)\n RotZ = np.matrix( [ [ np.cos(psi), -np.sin(psi), 0.0 ],\n [ np.sin(psi), np.cos(psi), 0.0 ],\n [ 0.0 , 0.0 , 1.0 ]\n ])\n return np.asarray( RotY * RotZ )", "def test_rotate_vec_y(self):\n\n quat = Quat.from_axis_angle_deg(Vec3(0, 1, 0), 90.)\n vec = Vec3(1, 1, 1)\n\n rotated_vec = quat.rotate_vec(vec)\n\n # 90 deg around y moves z from positive to negative\n self.assertAlmostEqual(1.0, rotated_vec.x)\n self.assertAlmostEqual(1.0, rotated_vec.y)\n self.assertAlmostEqual(-1.0, rotated_vec.z)", "def quatActiveRot(q, v):\n\tv_q = np.zeros((4, 1))\n\tv_q[1:] = v\n\tv_qnew = quatLeftMat(q) @ quatRightMat(q).T @ v_q\n\treturn v_qnew[1:]", "def rotate_toward(initial_vector, final_vector, other_vectors, degrees: float = 5):\n final_vector = normalize(final_vector)\n initial_vector = normalize(initial_vector)\n cos_phi = np.dot(initial_vector, final_vector)\n theta = degrees * np.pi / 180\n cos_theta = np.cos(theta)\n phi = np.arccos(cos_phi)\n if phi < theta:\n return (rotate(initial_vector, final_vector, other_vectors), True)\n cos_phi_theta = np.cos(phi - theta)\n A = np.asarray([[cos_phi, 1], [1, cos_phi]])\n B = np.asarray([cos_phi_theta, cos_theta])\n x = np.linalg.solve(A, B)\n rotated_vector = x[0] * initial_vector + x[1] * final_vector\n return (rotate(initial_vector, rotated_vector, other_vectors), False)", "def test_rotate_around_v3_z_axis(self):\n from pedemath.vec3 import rotate_around_vector_v3\n\n vec_a = Vec3(3, 4, 5)\n vec_b = Vec3(0, 0, 1)\n\n result = rotate_around_vector_v3(vec_a, math.pi, vec_b)\n expected = Vec3(-3, -4, 5)\n\n self.assertAlmostEqual(result.x, expected.x)\n self.assertAlmostEqual(result.y, expected.y)\n self.assertAlmostEqual(result.z, expected.z)", "def rotmat(p, q):\n rot = numpy.dot(refmat(q, -p), refmat(p, -p))\n return rot", "def test_rotate_vec_z(self):\n\n quat = Quat.from_axis_angle_deg(Vec3(0, 0, 1), 90.)\n vec = Vec3(1, 1, 1)\n\n rotated_vec = quat.rotate_vec(vec)\n\n # 90 deg around z moves x from positive to negative\n self.assertAlmostEqual(-1.0, rotated_vec.x)\n self.assertAlmostEqual(1.0, rotated_vec.y)\n self.assertAlmostEqual(1.0, rotated_vec.z)", "def orientToXYZR( a, b ):\n if allclose(a,b):\n return (0,1,0,0)\n an,bn = normalise( (a,b) )\n angle = arccos(dot(an,bn))\n x,y,z = crossProduct( a, b )[0]\n if allclose( (x,y,z), 0.0):\n y = 1.0\n return (x,y,z,angle)", "def getOrientationVect(self, a,b):\r\n return np.array(a)-np.array(b)", "def cal_rotation_matrix(nvec=None, dest=[0., 0., 1]):\n import numpy.linalg as lag\n import math\n # rotation axis\n dest = np.asarray(dest) # towards +z direction.\n\n if lag.norm(nvec) != 1.0:\n nvec = nvec / lag.norm(nvec)\n if lag.norm(dest) != 1.0:\n dest = dest / lag.norm(dest)\n\n print(nvec, dest)\n r_axis = np.cross(nvec, dest)\n angle = math.acos(np.dot(nvec, dest))\n\n return _get_rotation_matrix(r_axis, angle)", "def rotvec_to_rotmat(rot_vecs):\n l1norm = torch.norm(rot_vecs + 1e-8, p=2, dim=1)\n angle = torch.unsqueeze(l1norm, -1)\n normalized = torch.div(rot_vecs, angle)\n angle = angle * 0.5\n v_cos = torch.cos(angle)\n v_sin = torch.sin(angle)\n quat = torch.cat([v_cos, v_sin * normalized], dim=1)\n return quat_to_rotmat(quat)", "def vec_rotate_left_n(x, n):\n return jnp.roll(x, -n)", "def vec_angle_deg(v1,v2):\r\n \r\n return math.degrees(vec_angle_rad(v1,v2))", "def test_to_rotation(self):\r\n q = np.array([-1, 1, 3, 2])\r\n q = q / np.linalg.norm(q)\r\n R_gt = np.array([\r\n [-1/3., -14/15., -2/15.],\r\n [2/3., -1/3., 2/3.],\r\n [-2/3., 2/15., 11/15.]]).T\r\n R = to_rotation(q)\r\n\r\n zero_matrix = R - R_gt\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n for _ in range(20):\r\n q = np.random.randn(4)\r\n q /= np.linalg.norm(q)\r\n q_inv = quaternion_conjugate(q)\r\n\r\n R = to_rotation(q)\r\n R_inv = to_rotation(q_inv)\r\n\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n # orthogonal matrix\r\n zero_matrix = R @ R.T - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def rotate_to_local(self,vxyz):\n return sp.mxv(self.mtxtofov,vxyz)", "def rotate(self, a):\n ca = cos(a)\n sa = sin(a)\n self.v = Matrix([\n [ca, -sa],\n [sa, ca]\n ]) @ self.v\n return self", "def unit(direction):\r\n return Vector(0, -1).rotate(direction)", "def rotate(X):\n return X", "def CombineRotation(a, b):\n # Use matrix multiplication: c = b*a.\n # We put 'b' on the left and 'a' on the right because,\n # just like when you use a matrix M to rotate a vector V,\n # you put the M on the left in the product M*V.\n # We can think of this as 'b' rotating all the 3 column vectors in 'a'.\n\n return RotationMatrix([\n [\n b.rot[0][0]*a.rot[0][0] + b.rot[1][0]*a.rot[0][1] + b.rot[2][0]*a.rot[0][2],\n b.rot[0][1]*a.rot[0][0] + b.rot[1][1]*a.rot[0][1] + b.rot[2][1]*a.rot[0][2],\n b.rot[0][2]*a.rot[0][0] + b.rot[1][2]*a.rot[0][1] + b.rot[2][2]*a.rot[0][2]\n ],\n [\n b.rot[0][0]*a.rot[1][0] + b.rot[1][0]*a.rot[1][1] + b.rot[2][0]*a.rot[1][2],\n b.rot[0][1]*a.rot[1][0] + b.rot[1][1]*a.rot[1][1] + b.rot[2][1]*a.rot[1][2],\n b.rot[0][2]*a.rot[1][0] + b.rot[1][2]*a.rot[1][1] + b.rot[2][2]*a.rot[1][2]\n ],\n [\n b.rot[0][0]*a.rot[2][0] + b.rot[1][0]*a.rot[2][1] + b.rot[2][0]*a.rot[2][2],\n b.rot[0][1]*a.rot[2][0] + b.rot[1][1]*a.rot[2][1] + b.rot[2][1]*a.rot[2][2],\n b.rot[0][2]*a.rot[2][0] + b.rot[1][2]*a.rot[2][1] + b.rot[2][2]*a.rot[2][2]\n ]\n ])", "def get_vtktransform_rotate_global_to_local(v1, v2, v3):\n\n\n x = np.array([1.0, 0.0, 0.0])\n y = np.array([0.0, 1.0, 0.0])\n z = np.array([0.0, 0.0, 1.0])\n\n rot_mat = np.zeros((3, 3), dtype=float)\n rot_mat[0, 0] = np.dot(v1, x)\n rot_mat[0, 1] = np.dot(v1, y)\n rot_mat[0, 2] = np.dot(v1, z)\n rot_mat[1, 0] = np.dot(v2, x)\n rot_mat[1, 1] = np.dot(v2, y)\n rot_mat[1, 2] = np.dot(v2, z)\n rot_mat[2, 0] = np.dot(v3, x)\n rot_mat[2, 1] = np.dot(v3, y)\n rot_mat[2, 2] = np.dot(v3, z)\n\n rot_mat = np.column_stack((rot_mat, np.array([0.0, 0.0, 0.0])))\n rot_mat = np.vstack((rot_mat, np.array([0.0, 0.0, 0.0, 1.0])))\n\n vtkM = vtk.vtkMatrix4x4()\n\n for i in range(4):\n for j in range(4):\n vtkM.SetElement(i, j, rot_mat[i, j])\n\n transform = vtk.vtkTransform()\n transform.PreMultiply()\n transform.SetMatrix(vtkM)\n\n return transform", "def next_rotation(q_1: Q, q_2: Q) -> Q:\n q_1.check_representations(q_2)\n\n if not math.isclose(q_1.t, q_2.t):\n raise ValueError(f\"Oops, to be a rotation, the first values must be the same: {q_1.t} != {q_2.t}\")\n\n if not math.isclose(norm_squared(q_1).t, norm_squared(q_2).t):\n raise ValueError(f\"Oops, the norm squared of these two are not equal: {norm_squared(q_1).t} != {norm_squared(q_2).t}\")\n\n next_rot = product(q_1, q_2)\n v_abs_q_1 = abs_of_vector(q_1).t\n next_vector_normalized = normalize(vector_q(next_rot), v_abs_q_1)\n next_vector_normalized.t = q_1.t\n\n return next_vector_normalized" ]
[ "0.73906195", "0.7337561", "0.72559744", "0.70578516", "0.702117", "0.6936926", "0.69332725", "0.6840454", "0.68362135", "0.6819142", "0.6817099", "0.67670673", "0.6754099", "0.6747304", "0.67367476", "0.6716166", "0.6703668", "0.6696891", "0.66868734", "0.6665173", "0.66038626", "0.6592167", "0.65872097", "0.6586955", "0.65862936", "0.6567359", "0.656555", "0.6548028", "0.6525666", "0.6516882", "0.65125716", "0.649642", "0.6484465", "0.64404535", "0.63665247", "0.6360384", "0.63561", "0.63469803", "0.63236386", "0.6297557", "0.62792486", "0.6271691", "0.6265999", "0.6259433", "0.6259075", "0.62590504", "0.6242854", "0.62357706", "0.62203294", "0.62095684", "0.61899585", "0.6184093", "0.6178095", "0.61666405", "0.61655915", "0.61638016", "0.6161364", "0.61430746", "0.6108979", "0.6108105", "0.61070347", "0.61068076", "0.61068076", "0.61068076", "0.6103586", "0.61026496", "0.6101503", "0.60976195", "0.60936916", "0.6083514", "0.6083347", "0.60701877", "0.6069838", "0.6061374", "0.6056669", "0.60564387", "0.60468245", "0.60457504", "0.6045297", "0.60399365", "0.6039307", "0.60179895", "0.6013817", "0.60094696", "0.60092866", "0.5993791", "0.59801716", "0.5960971", "0.59337264", "0.59272116", "0.59162307", "0.59134555", "0.5907801", "0.58953744", "0.58949196", "0.58895993", "0.5888597", "0.58867675", "0.58839035", "0.58736616" ]
0.7442148
0
Convert the axisangle representation to the matrix representation of the rotation
def vrrotvec2mat(r): s = np.sin(r[3]) c = np.cos(r[3]) t = 1 - c n = normalize(r[0:3]) x = n[0] y = n[1] z = n[2] m = np.array( [[t*x*x + c, t*x*y - s*z, t*x*z + s*y], [t*x*y + s*z, t*y*y + c, t*y*z - s*x], [t*x*z - s*y, t*y*z + s*x, t*z*z + c]] ) return m
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def axis2rotmat(axis):\n return quat2rotmat(axis2quat(axis))", "def rotation_matrix(angle, axis):\n about_z = rotation_about_z(angle)\n z_to_axis = z_to_vector(axis)\n axis_to_z = np.linalg.inv(z_to_axis)\n return reduce(np.dot, [z_to_axis, about_z, axis_to_z])", "def angle_to_rotation_matrix(angle: torch.Tensor) -> torch.Tensor:\n ang_rad = deg2rad(angle)\n cos_a: torch.Tensor = torch.cos(ang_rad)\n sin_a: torch.Tensor = torch.sin(ang_rad)\n return torch.stack([cos_a, sin_a, -sin_a, cos_a], dim=-1).view(*angle.shape, 2, 2)", "def rotation_matrix( axis, angle ):\n\n # Trig factors.\n ca = cos(angle)\n sa = sin(angle)\n C = 1 - ca\n\n # Depack the axis.\n x, y, z = tuple( axis )\n\n # Multiplications (to remove duplicate calculations).\n xs = x*sa\n ys = y*sa\n zs = z*sa\n xC = x*C\n yC = y*C\n zC = z*C\n xyC = x*yC\n yzC = y*zC\n zxC = z*xC\n\n # Update the rotation matrix.\n matrix \t = np.zeros( (3,3) )\n matrix[0, 0] = x*xC + ca\n matrix[0, 1] = xyC - zs\n matrix[0, 2] = zxC + ys\n matrix[1, 0] = xyC + zs\n matrix[1, 1] = y*yC + ca\n matrix[1, 2] = yzC - xs\n matrix[2, 0] = zxC - ys\n matrix[2, 1] = yzC + xs\n matrix[2, 2] = z*zC + ca\n return matrix", "def rotation(self, angle, axis):\r\n\r\n sqr_a = axis.x*axis.x\r\n sqr_b = axis.y*axis.y\r\n sqr_c = axis.z*axis.z\r\n len2 = sqr_a+sqr_b+sqr_c\r\n\r\n k2 = math.cos(angle)\r\n k1 = (1.0-k2)/len2\r\n k3 = math.sin(angle)/math.sqrt(len2)\r\n k1ab = k1*axis.x*axis.y\r\n k1ac = k1*axis.x*axis.z\r\n k1bc = k1*axis.y*axis.z\r\n k3a = k3*axis.x\r\n k3b = k3*axis.y\r\n k3c = k3*axis.z\r\n\r\n return mat4( k1*sqr_a+k2, k1ab-k3c, k1ac+k3b, 0.0,\r\n k1ab+k3c, k1*sqr_b+k2, k1bc-k3a, 0.0,\r\n k1ac-k3b, k1bc+k3a, k1*sqr_c+k2, 0.0,\r\n 0.0, 0.0, 0.0, 1.0)", "def _rot(axis, angle):\n if axis == 1:\n return Matrix([[1, 0, 0],\n [0, cos(angle), -sin(angle)],\n [0, sin(angle), cos(angle)]])\n elif axis == 2:\n return Matrix([[cos(angle), 0, sin(angle)],\n [0, 1, 0],\n [-sin(angle), 0, cos(angle)]])\n elif axis == 3:\n return Matrix([[cos(angle), -sin(angle), 0],\n [sin(angle), cos(angle), 0],\n [0, 0, 1]])", "def angle_axis_to_rotation_matrix(angle_axis):\n def _compute_rotation_matrix(angle_axis, theta2, eps=1e-6):\n # We want to be careful to only evaluate the square root if the\n # norm of the angle_axis vector is greater than zero. Otherwise\n # we get a division by zero.\n k_one = 1.0\n theta = torch.sqrt(theta2)\n wxyz = angle_axis / (theta + eps)\n wx, wy, wz = torch.chunk(wxyz, 3, dim=1)\n cos_theta = torch.cos(theta)\n sin_theta = torch.sin(theta)\n\n r00 = cos_theta + wx * wx * (k_one - cos_theta)\n r10 = wz * sin_theta + wx * wy * (k_one - cos_theta)\n r20 = -wy * sin_theta + wx * wz * (k_one - cos_theta)\n r01 = wx * wy * (k_one - cos_theta) - wz * sin_theta\n r11 = cos_theta + wy * wy * (k_one - cos_theta)\n r21 = wx * sin_theta + wy * wz * (k_one - cos_theta)\n r02 = wy * sin_theta + wx * wz * (k_one - cos_theta)\n r12 = -wx * sin_theta + wy * wz * (k_one - cos_theta)\n r22 = cos_theta + wz * wz * (k_one - cos_theta)\n rotation_matrix = torch.cat(\n [r00, r01, r02, r10, r11, r12, r20, r21, r22], dim=1)\n return rotation_matrix.view(-1, 3, 3)\n\n def _compute_rotation_matrix_taylor(angle_axis):\n rx, ry, rz = torch.chunk(angle_axis, 3, dim=1)\n k_one = torch.ones_like(rx)\n rotation_matrix = torch.cat(\n [k_one, -rz, ry, rz, k_one, -rx, -ry, rx, k_one], dim=1)\n return rotation_matrix.view(-1, 3, 3)\n\n # stolen from ceres/rotation.h\n\n _angle_axis = torch.unsqueeze(angle_axis, dim=1)\n theta2 = torch.matmul(_angle_axis, _angle_axis.transpose(1, 2))\n theta2 = torch.squeeze(theta2, dim=1)\n\n # compute rotation matrices\n rotation_matrix_normal = _compute_rotation_matrix(angle_axis, theta2)\n rotation_matrix_taylor = _compute_rotation_matrix_taylor(angle_axis)\n\n # create mask to handle both cases\n eps = 1e-6\n mask = (theta2 > eps).view(-1, 1, 1).to(theta2.device)\n mask_pos = (mask).type_as(theta2)\n mask_neg = (mask == False).type_as(theta2) # noqa\n\n # create output pose matrix\n batch_size = angle_axis.shape[0]\n rotation_matrix = torch.eye(4).to(angle_axis.device).type_as(angle_axis)\n rotation_matrix = rotation_matrix.view(1, 4, 4).repeat(batch_size, 1, 1)\n # fill output matrix with masked values\n rotation_matrix[..., :3, :3] = \\\n mask_pos * rotation_matrix_normal + mask_neg * rotation_matrix_taylor\n return rotation_matrix # Nx4x4", "def rotmat(axis, angle):\n mat = np.eye(3)\n if angle is None or np.isclose(angle, 0.0):\n return mat\n cang = np.cos(angle*radians)\n sang = np.sin(angle*radians)\n if axis == 1:\n mat = np.array(((1, 0, 0), (0, cang, -sang), (0, sang, cang)))\n elif axis == 2:\n mat = np.array(((cang, 0, sang), (0, 1, 0), (-sang, 0, cang)))\n else:\n mat = np.array(((cang, -sang, 0), (sang, cang, 0), (0, 0, 1)))\n return np.matrix(mat)", "def axisAnglesToRotMat(xrot, yrot, zrot):\n\n xmat = np.eye(3)\n ymat = np.eye(3)\n zmat = np.eye(3)\n\n xmat[1, 1] = np.cos(xrot)\n xmat[1, 2] = -np.sin(xrot)\n xmat[2, 1] = np.sin(xrot)\n xmat[2, 2] = np.cos(xrot)\n\n ymat[0, 0] = np.cos(yrot)\n ymat[0, 2] = np.sin(yrot)\n ymat[2, 0] = -np.sin(yrot)\n ymat[2, 2] = np.cos(yrot)\n\n zmat[0, 0] = np.cos(zrot)\n zmat[0, 1] = -np.sin(zrot)\n zmat[1, 0] = np.sin(zrot)\n zmat[1, 1] = np.cos(zrot)\n\n return concat(zmat, ymat, xmat)", "def axisAnglesToRotMat(xrot, yrot, zrot):\n\n xmat = np.eye(3)\n ymat = np.eye(3)\n zmat = np.eye(3)\n\n xmat[1, 1] = np.cos(xrot)\n xmat[1, 2] = -np.sin(xrot)\n xmat[2, 1] = np.sin(xrot)\n xmat[2, 2] = np.cos(xrot)\n\n ymat[0, 0] = np.cos(yrot)\n ymat[0, 2] = np.sin(yrot)\n ymat[2, 0] = -np.sin(yrot)\n ymat[2, 2] = np.cos(yrot)\n\n zmat[0, 0] = np.cos(zrot)\n zmat[0, 1] = -np.sin(zrot)\n zmat[1, 0] = np.sin(zrot)\n zmat[1, 1] = np.cos(zrot)\n\n return concat(zmat, ymat, xmat)", "def R_axis_angle(axis, angle):\n\n # Trig factors.\n ca = math.cos(angle)\n sa = math.sin(angle)\n C = 1 - ca\n\n # Depack the axis.\n x, y, z = axis\n\n # Multiplications (to remove duplicate calculations).\n xs = x * sa\n ys = y * sa\n zs = z * sa\n xC = x * C\n yC = y * C\n zC = z * C\n xyC = x * yC\n yzC = y * zC\n zxC = z * xC\n\n # Update the rotation matrix.\n matrix = np.zeros((3, 3))\n matrix[0, 0] = x * xC + ca\n matrix[0, 1] = xyC - zs\n matrix[0, 2] = zxC + ys\n matrix[1, 0] = xyC + zs\n matrix[1, 1] = y * yC + ca\n matrix[1, 2] = yzC - xs\n matrix[2, 0] = zxC - ys\n matrix[2, 1] = yzC + xs\n matrix[2, 2] = z * zC + ca\n return matrix", "def axis_angle_rm(axis=np.array([1, 0, 0]), angle=-1.57):\n c = math.cos(angle)\n s = math.sin(angle)\n t = 1 - c\n x, y, z = axis[0], axis[1], axis[2]\n rotation_matrix = np.array(\n [\n [t*x*x + c, t*x*y - z*s, t*x*z + y*s],\n [t*x*y + z*s, t*y*y + c, t*y*z - x*s],\n [t*x*z - y*s, t*y*z + x*s, t*z*z + c]\n ])\n return rotation_matrix", "def rotation_axis_matrix(phi: numbers.Real, axis: int):\n\n if axis == 0:\n return [[1, 0, 0, 0],\n [0, cos(phi), sin(phi), 0],\n [0, sin(phi), cos(phi), 0],\n [0, 0, 0, 1]]\n elif axis == 1:\n return [[cos(phi), 0, sin(phi), 0],\n [0, 1, 0, 0],\n [-sin(phi), 0, cos(phi), 0],\n [0, 0, 0, 1]]\n elif axis == 2:\n return [[cos(phi), -sin(phi), 0, 0],\n [sin(phi), cos(phi), 0, 0],\n [0, 0, 1, 0],\n [0, 0, 0, 1]]\n else:\n raise ValueError(\"only 3d space coordinates as homogeneous vectors are supported\")", "def axisangle2matrix(angle, direction, point=None):\r\n sina = math.sin(angle)\r\n cosa = math.cos(angle)\r\n direction = unit_vector(direction[:3])\r\n # rotation matrix around unit vector\r\n R = numpy.diag([cosa, cosa, cosa])\r\n R += numpy.outer(direction, direction) * (1.0 - cosa)\r\n direction *= sina\r\n R += numpy.array([[ 0.0, -direction[2], direction[1]],\r\n [ direction[2], 0.0, -direction[0]],\r\n [-direction[1], direction[0], 0.0]])\r\n M = numpy.identity(4)\r\n M[:3, :3] = R\r\n if point is not None:\r\n # rotation not around origin\r\n point = numpy.array(point[:3], dtype=numpy.float64, copy=False)\r\n M[:3, 3] = point - numpy.dot(R, point)\r\n return M", "def rotation_matrix_arbitrary_axis(angle, axis):\n axis = normalize_vector(axis)\n\n a = np.cos(angle / 2)\n b, c, d = axis * np.sin(angle / 2)\n\n e11 = np.square(a) + np.square(b) - np.square(c) - np.square(d)\n e12 = 2 * (b * c - a * d)\n e13 = 2 * (b * d + a * c)\n\n e21 = 2 * (b * c + a * d)\n e22 = np.square(a) + np.square(c) - np.square(b) - np.square(d)\n e23 = 2 * (c * d - a * b)\n\n e31 = 2 * (b * d - a * c)\n e32 = 2 * (c * d + a * b)\n e33 = np.square(a) + np.square(d) - np.square(b) - np.square(c)\n\n return np.array([[e11, e12, e13], [e21, e22, e23], [e31, e32, e33]])", "def _get_rotation_matrix(axis, theta):\n\n #import math\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = axis/np.sqrt(np.dot(axis, axis))\n a = np.cos(theta/2)\n b, c, d = -axis*np.sin(theta/2)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotation_matrix(axis,theta):\n\taxis = np.asarray(axis)\n\ttheta = np.asarray(theta)\n\tif np.all(axis==0): return np.identity(3) \n\taxis = axis/np.sqrt(np.dot(axis,axis))\n\ta = np.cos(theta/2)\n\tb, c, d = -axis*np.sin(theta/2)\n\taa, bb, cc, dd = a*a, b*b, c*c, d*d\n\tbc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n\treturn np.array([[aa+bb-cc-dd,2*(bc+ad),2*(bd-ac)],[2*(bc-ad),aa+cc-bb-dd,2*(cd+ab)],\n\t\t[2*(bd+ac),2*(cd-ab),aa+dd-bb-cc]])", "def rotation_matrix_3x3_axis(angle, axis):\n assert axis.lower() in ['x','y','z']\n assert -180.0 <= angle <= 180.0\n angle_r = angle * (np.pi / 180.0)\n sa = np.sin(angle_r)\n ca = np.cos(angle_r)\n\n if axis == 'x':\n R = np.array([ [1, 0, 0],\n [0, ca, -sa],\n [0, sa, ca],\n ])\n elif axis == 'y':\n R = np.array([ [ca, 0, sa],\n [0, 1, 0],\n [-sa, 0, ca],\n ])\n elif axis == 'z':\n R = np.array([ [ca, -sa, 0],\n [sa, ca, 0],\n [0, 0, 1],\n ])\n return R", "def rotation(axis, angle):\n axis = np.asarray(axis)\n try:\n angle = angle[:,None]\n except:\n pass\n return np.hstack([np.asarray(axis)*np.sin(angle/2.),np.cos(angle/2.)])", "def get_3drotation_matrix(axis, angle):\n angle = angle #*-1\n norm = np.linalg.norm(np.array(axis))\n if norm > 0:\n axis /= norm\n ax, ay, az = axis[0], axis[1], axis[2]\n cos, sin = np.cos(angle), np.sin(angle)\n rotmat = np.array([[cos + ax * ax * (1 - cos), ax * ay * (1 - cos) - az * sin, ax * az * (1 - cos) + ay * sin],\n [ay * ax * (1 - cos) + az * sin, cos + ay * ay * (1 - cos), ay * az * (1 - cos) - ax * sin],\n [az * ax * (1 - cos) - ay * sin, az * ay * (1 - cos) + ax * sin, cos + az * az * (1 - cos)]])\n return rotmat", "def rot_angle2rot_mat(angle):\n rot_mat = np.asarray([\n [np.cos(angle), -np.sin(angle)],\n [np.sin(angle), np.cos(angle)]\n ])\n return rot_mat", "def rotation_matrix(axis, theta):\n\taxis = np.asarray(axis)\n\taxis = axis / np.sqrt(np.dot(axis, axis))\n\ta = np.cos(theta / 2.0)\n\tb, c, d = -axis * np.sin(theta / 2.0)\n\taa, bb, cc, dd = a * a, b * b, c * c, d * d\n\tbc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n\treturn np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n\t\t\t\t\t [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n\t\t\t\t\t [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def rotateEuler(self,axis, angle):\n if(axis == 'Z'):\n return np.array([[cos(angle), -sin(angle),0,0],[sin(angle), cos(angle),0,0],[0,0,1,0],[0,0,0,1]])\n if(axis == 'Y'):\n return np.array([[cos(angle),0,sin(angle),0],[0,1,0,0],[-sin(angle),0,cos(angle),0],[0,0,0,1]])\n if(axis == 'X'):\n return np.array([[1,0,0,0],[0,cos(angle), -sin(angle),0],[0,sin(angle), cos(angle),0],[0,0,0,1]])", "def rotation_matrix(angle) -> np.array:\n return np.array([\n [np.cos(angle), np.sin(angle)],\n [-np.sin(angle), np.cos(angle)]])", "def get_rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def get_rotation_matrix(axis, theta):\n\n axis = np.array(list(axis))\n axis = axis / np.linalg.norm(axis)\n axis *= -np.sin(theta/2.0)\n a = np.cos(theta/2.0)\n b, c, d = tuple(axis.tolist())\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def from_axisangle(self, axis: np.ndarray, angle: float) -> np.ndarray:\n axis /= np.linalg.norm(axis)\n K = skew(axis)\n return np.identity(3) + np.sin(angle)*K + (1-np.cos(angle))*K@K", "def rotation_matrix(self):\n self._normalise()\n product_matrix = np.dot(self._q_matrix(), self._q_bar_matrix().conj().transpose())\n return product_matrix[1:][:,1:]", "def axis_angle(cls, axis: Union[tuple, Vector], angle: Number):\n if isinstance(axis, (list, tuple)):\n assert(len(axis) == 3)\n axis = Vector(*axis)\n\n assert(isinstance(axis, Vector))\n\n K = Matrix(3, 3)\n\n axis.normalize()\n\n x = axis[0, 0]\n y = axis[1, 0]\n z = axis[2, 0]\n\n K[0, 1] = -z\n K[0, 2] = y\n K[1, 2] = -x\n\n K[1, 0] = z\n K[2, 0] = -y\n K[2, 1] = x\n\n c = np.cos(angle)\n s = np.sin(angle)\n\n I = Matrix.identity(3)\n\n rot = I + (s * I + (1 - c) * K) * K\n\n return cls(rot)", "def _rotation_matrix(self, axis, angle):\n axis = axis/np.linalg.norm(axis)\n axis_squared = np.square(axis)\n cos_angle = np.cos(angle)\n sin_angle = np.sin(angle)\n rot_matrix_row_one = np.array([cos_angle+axis_squared[0]*(1-cos_angle),\n axis[0]*axis[1]*(1-cos_angle) - axis[2]*sin_angle,\n axis[0]*axis[2]*(1-cos_angle)+axis[1]*sin_angle])\n\n rot_matrix_row_two = np.array([axis[1]*axis[0]*(1-cos_angle)+axis[2]*sin_angle,\n cos_angle+axis_squared[1]*(1-cos_angle),\n axis[1]*axis[2]*(1-cos_angle) - axis[0]*sin_angle])\n\n rot_matrix_row_three = np.array([axis[2]*axis[0]*(1-cos_angle)-axis[1]*sin_angle,\n axis[2]*axis[1]*(1-cos_angle)+axis[0]*sin_angle,\n cos_angle+axis_squared[2]*(1-cos_angle)])\n\n rotation_matrix = np.array([rot_matrix_row_one, rot_matrix_row_two, rot_matrix_row_three])\n return rotation_matrix", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = xmat @ ymat @ zmat\n return rotMat", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = xmat @ ymat @ zmat\n return rotMat", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / np.sqrt(np.dot(axis, axis))\n a = np.cos(theta / 2.0)\n b, c, d = -axis * np.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def fromAngleAxis(cls,angle,axis):\n from axis_angle import AxisAngle\n return AxisAngle(angle,axis).asQuat()", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def axangle2rotmat(axangles):\r\n\r\n if type(axangles) is not np.ndarray:\r\n raise ValueError('Rodrigues only works on numpy arrays')\r\n \r\n # store original shape\r\n shape = axangles.shape\r\n assert shape[-1] % 3 == 0, \"inputs are not axis angles\"\r\n axangles = axangles.reshape((-1, 3))\r\n\r\n rotmats = []\r\n for i in range(axangles.shape[0]):\r\n rotmat, _ = cv2.Rodrigues(axangles[i])\r\n rotmats.append(rotmat)\r\n\r\n # restore original shape\r\n new_shape = shape[:-1] + (shape[-1]//3*9,)\r\n return np.array(rotmats).reshape(new_shape)", "def rotation_matrix(axis, theta): \n \n import numpy as np\n import math\n\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2)\n b, c, d = -axis*math.sin(theta/2)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2.0)\n b, c, d = -axis*math.sin(theta/2.0)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotation_matrix2(angle):\n c, s = cos(angle), sin(angle)\n return np.array([[c, -s], [s, c]])", "def vrrotvec2mat(ax_ang):\n\n #file_dir = os.path.dirname(os.path.realpath(__file__))\n #path_dir2 = file_dir + '/../geometry/'\n #sys.path.append(path_dir2)\n\n if ax_ang.ndim == 1:\n if np.size(ax_ang) == 5:\n ax_ang = np.reshape(ax_ang, (5, 1))\n msz = 1\n elif np.size(ax_ang) == 4:\n ax_ang = np.reshape(np.hstack((ax_ang, np.array([1]))), (5, 1))\n msz = 1\n else:\n raise Exception('Wrong Input Type')\n elif ax_ang.ndim == 2:\n if np.shape(ax_ang)[0] == 5:\n msz = np.shape(ax_ang)[1]\n elif np.shape(ax_ang)[1] == 5:\n ax_ang = ax_ang.transpose()\n msz = np.shape(ax_ang)[1]\n else:\n raise Exception('Wrong Input Type')\n else:\n raise Exception('Wrong Input Type')\n\n direction = ax_ang[0:3, :]\n angle = ax_ang[3, :]\n\n d = np.array(direction, dtype=np.float64)\n d /= np.linalg.norm(d, axis=0)\n x = d[0, :]\n y = d[1, :]\n z = d[2, :]\n c = np.cos(angle)\n s = np.sin(angle)\n tc = 1 - c\n\n mt11 = tc*x*x + c\n mt12 = tc*x*y - s*z\n mt13 = tc*x*z + s*y\n\n mt21 = tc*x*y + s*z\n mt22 = tc*y*y + c\n mt23 = tc*y*z - s*x\n\n mt31 = tc*x*z - s*y\n mt32 = tc*y*z + s*x\n mt33 = tc*z*z + c\n\n mtx = np.column_stack((mt11, mt12, mt13, mt21, mt22, mt23, mt31, mt32, mt33))\n\n inds1 = np.where(ax_ang[4, :] == -1)\n mtx[inds1, :] = -mtx[inds1, :]\n\n if msz == 1:\n mtx = mtx.reshape(3, 3)\n else:\n mtx = mtx.reshape(msz, 3, 3)\n\n return mtx", "def homog_rot_mtx(angle_rads: float, axis: str) -> numpy.array:\n cosang = numpy.cos(angle_rads)\n sinang = numpy.sin(angle_rads)\n\n if \"z\" == axis:\n return numpy.array(\n (\n (cosang, -sinang, 0, 0),\n (sinang, cosang, 0, 0),\n (0, 0, 1, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )\n elif \"y\" == axis:\n return numpy.array(\n (\n (cosang, 0, sinang, 0),\n (0, 1, 0, 0),\n (-sinang, 0, cosang, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )\n else:\n return numpy.array(\n (\n (1, 0, 0, 0),\n (0, cosang, -sinang, 0),\n (0, sinang, cosang, 0),\n (0, 0, 0, 1),\n ),\n dtype=numpy.float64,\n )", "def rotation_matrix_xyz(axis, angle, angle_dim):\n assert angle_dim is \"deg\" or angle_dim is \"rad\"\n assert axis is \"x\" or axis is \"y\" or axis is \"z\"\n x = 0\n y = 0\n z = 0\n\n if angle_dim is \"deg\":\n a = np.deg2rad(angle)\n else:\n a = angle\n\n if axis is \"x\":\n x = 1\n y = 0\n z = 0\n if axis is \"y\":\n x = 0\n y = 1\n z = 0\n if axis is \"z\":\n x = 0\n y = 0\n z = 1\n\n s = np.sin(a)\n c = np.cos(a)\n rotation_matrix = np.array([[c + x ** 2 * (1 - c), x * y * (1 - c) - z * s, x * z * (1 - c) + y * s],\n [y * x * (1 - c) + z * s, c + y ** 2 * (1 - c), y * z * (1 - c) - x * s],\n [z * x * (1 - c) - y * s, z * y * (1 - c) + x * s, c + z ** 2 * (1 - c)]])\n\n return rotation_matrix", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:,0], angle[:,1], angle[:,2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach()*0\n ones = zeros.detach()+1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).reshape(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).reshape(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).reshape(B, 3, 3)\n\n rotMat = torch.matmul(torch.matmul(xmat, ymat), zmat)\n return rotMat", "def transformation_matrix(self):\n t = np.array([[0.0], [0.0], [0.0]])\n Rt = np.hstack([self.rotation_matrix, t])\n return np.vstack([Rt, np.array([0.0, 0.0, 0.0, 1.0])])", "def rotateEuler(axis, angle):\n if(axis == 'Z'):\n return np.array([[cos(angle), -sin(angle),0,0],[sin(angle), cos(angle),0,0],[0,0,1,0],[0,0,0,1]])\n if(axis == 'Y'):\n return np.array([[cos(angle),0,sin(angle),0],[0,1,0,0],[-sin(angle),0,cos(angle),0],[0,0,0,1]])\n if(axis == 'X'):\n return np.array([[1,0,0,0],[0,cos(angle), -sin(angle),0],[0,sin(angle), cos(angle),0],[0,0,0,1]])", "def rotation_matrix(theta, axis=None):\n if axis is None:\n axis = [0, 0, 1]\n axis = np.asarray(axis)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2.0)\n b, c, d = -axis*math.sin(theta/2.0)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotate_matrix(angle):\n c = np.cos(angle)\n s = np.sin(angle)\n return np.array([[c, s],\n [-s, c]])", "def rotation_matrix(self):\n return self.affine_matrix[0:3][:, 0:3]", "def to_axisangle(self) -> Tuple[np.ndarray, float]:\n angle = np.arccos((self.A.trace()-1)/2)\n axis = np.zeros(3)\n if angle!=0:\n axis = np.array([self.A[2, 1]-self.A[1, 2], self.A[0, 2]-self.A[2, 0], self.A[1, 0]-self.A[0, 1]])/(2*np.sin(angle))\n return axis, angle", "def so2mat(angle):\n return torch.Tensor(R.from_rotvec(angle).as_dcm())", "def rotation_matrix(axis, theta):\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n\n R = np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])\n\n T = np.identity(4)\n T[:3, :3] = R\n return T", "def euler2mat(angle):\n B = angle.size(0)\n x, y, z = angle[:, 0], angle[:, 1], angle[:, 2]\n\n cosz = torch.cos(z)\n sinz = torch.sin(z)\n\n zeros = z.detach() * 0\n ones = zeros.detach() + 1\n zmat = torch.stack([cosz, -sinz, zeros,\n sinz, cosz, zeros,\n zeros, zeros, ones], dim=1).view(B, 3, 3)\n\n cosy = torch.cos(y)\n siny = torch.sin(y)\n\n ymat = torch.stack([cosy, zeros, siny,\n zeros, ones, zeros,\n -siny, zeros, cosy], dim=1).view(B, 3, 3)\n\n cosx = torch.cos(x)\n sinx = torch.sin(x)\n\n xmat = torch.stack([ones, zeros, zeros,\n zeros, cosx, -sinx,\n zeros, sinx, cosx], dim=1).view(B, 3, 3)\n\n # rotMat = xmat.bmm(ymat).bmm(zmat)\n # changed to match opencv and conversion euler->mat/mat->euler\n rotMat = torch.bmm(zmat, torch.bmm(ymat, xmat))\n\n return rotMat", "def m2rotaxis(m):\n eps = 1e-5\n\n # Check for singularities a la http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToAngle/\n if (\n abs(m[0, 1] - m[1, 0]) < eps\n and abs(m[0, 2] - m[2, 0]) < eps\n and abs(m[1, 2] - m[2, 1]) < eps\n ):\n # Singularity encountered. Check if its 0 or 180 deg\n if (\n abs(m[0, 1] + m[1, 0]) < eps\n and abs(m[0, 2] + m[2, 0]) < eps\n and abs(m[1, 2] + m[2, 1]) < eps\n and abs(m[0, 0] + m[1, 1] + m[2, 2] - 3) < eps\n ):\n angle = 0\n else:\n angle = numpy.pi\n else:\n # Angle always between 0 and pi\n # Sense of rotation is defined by axis orientation\n t = 0.5 * (numpy.trace(m) - 1)\n t = max(-1, t)\n t = min(1, t)\n angle = numpy.arccos(t)\n\n if angle < 1e-15:\n # Angle is 0\n return 0.0, Vector(1, 0, 0)\n elif angle < numpy.pi:\n # Angle is smaller than pi\n x = m[2, 1] - m[1, 2]\n y = m[0, 2] - m[2, 0]\n z = m[1, 0] - m[0, 1]\n axis = Vector(x, y, z)\n axis.normalize()\n return angle, axis\n else:\n # Angle is pi - special case!\n m00 = m[0, 0]\n m11 = m[1, 1]\n m22 = m[2, 2]\n if m00 > m11 and m00 > m22:\n x = numpy.sqrt(m00 - m11 - m22 + 0.5)\n y = m[0, 1] / (2 * x)\n z = m[0, 2] / (2 * x)\n elif m11 > m00 and m11 > m22:\n y = numpy.sqrt(m11 - m00 - m22 + 0.5)\n x = m[0, 1] / (2 * y)\n z = m[1, 2] / (2 * y)\n else:\n z = numpy.sqrt(m22 - m00 - m11 + 0.5)\n x = m[0, 2] / (2 * z)\n y = m[1, 2] / (2 * z)\n axis = Vector(x, y, z)\n axis.normalize()\n return numpy.pi, axis", "def rotation(theta, axis):\n axis = np.asarray(axis)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2.0)\n b, c, d = -axis*math.sin(theta/2.0)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def rotation_matrix_to_angle_axis(rotation_matrix):\n # todo add check that matrix is a valid rotation matrix\n quaternion = rotation_matrix_to_quaternion(rotation_matrix)\n return quaternion_to_angle_axis(quaternion)", "def _rotation_matrix(theta):\n c, s = np.cos(theta), np.sin(theta)\n return np.array(((c, -s), (s, c)))", "def angleAxis2rot3D(axis, theta):\n if len(axis) is not 3:\n raise ValueError('Number of axis element must be 3!')\n axis = axis.astype(float)\n axis /= np.linalg.norm(axis)\n a = axis[0]\n b = axis[1]\n c = axis[2]\n cosTheta = np.cos(theta)\n bracket = 1 - cosTheta\n aBracket = a * bracket\n bBracket = b * bracket\n cBracket = c * bracket\n sinTheta = np.sin(theta)\n aSinTheta = a * sinTheta\n bSinTheta = b * sinTheta\n cSinTheta = c * sinTheta\n rot3D = np.array([[a*aBracket+cosTheta, a*bBracket-cSinTheta, a*cBracket+bSinTheta],\n [b*aBracket+cSinTheta, b*bBracket+cosTheta, b*cBracket-aSinTheta],\n [c*aBracket-bSinTheta, c*bBracket+aSinTheta, c*cBracket+cosTheta]])\n return rot3D", "def rotate_matrix(axis, theta):\n if np.abs(axis).sum() < 1e-6 or np.abs(theta) < 1e-6:\n return np.eye(3)\n axis = np.asarray(axis)\n axis = axis / math.sqrt(np.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return np.array(\n [\n [aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc],\n ]\n )", "def _from_axis_angle(cls, axis, angle):\n mag_sq = np.dot(axis, axis)\n if mag_sq == 0.0:\n raise ZeroDivisionError(\"Provided rotation axis has no length\")\n # Ensure axis is in unit vector form\n if (abs(1.0 - mag_sq) > 1e-12):\n axis = axis / sqrt(mag_sq)\n theta = angle / 2.0\n r = cos(theta)\n i = axis * sin(theta)\n\n return cls(r, i[0], i[1], i[2])", "def angle_and_axis(basis):\n q = matrix.col(basis.orientation).normalize()\n return q.unit_quaternion_as_axis_and_angle(deg=True)", "def matrix(self):\n return self._rotation", "def _get_rotation_matrix(transform):\n # caution: UE4 is using left-hand ortation order\n roll = np.deg2rad(-transform.rotation.roll)\n pitch = np.deg2rad(-transform.rotation.pitch)\n yaw = np.deg2rad(transform.rotation.yaw)\n sr, cr = np.sin(roll), np.cos(roll)\n sp, cp = np.sin(pitch), np.cos(pitch)\n sy, cy = np.sin(yaw), np.cos(yaw)\n rotation_matrix = np.array([[cy * cp, -sy * sr + cy * sp * sr, cy * sp * cr + sy * sr],\n [sy * cp, cy * sp * sr + cy * sr, -cy * sr + sy * sp * cr],\n [-sp, cp * sr, cp * cr]])\n return rotation_matrix", "def rotation_matrix_2d(angle):\n psi = Angle(angle).rad\n return np.array([[cos(psi), -sin(psi)],\n [sin(psi), cos(psi)]])", "def Pivot(rotation, axis, angle):\n # Check for an invalid coordinate axis.\n if axis not in [0, 1, 2]:\n raise Error('Invalid axis {}. Must be [0, 1, 2].'.format(axis))\n\n radians = math.radians(angle)\n c = math.cos(radians)\n s = math.sin(radians)\n\n # We need to maintain the \"right-hand\" rule, no matter which\n # axis was selected. That means we pick (i, j, k) axis order\n # such that the following vector cross product is satisfied:\n # i x j = k\n i = (axis + 1) % 3\n j = (axis + 2) % 3\n k = axis\n\n rot = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]\n\n rot[i][i] = c*rotation.rot[i][i] - s*rotation.rot[i][j]\n rot[i][j] = s*rotation.rot[i][i] + c*rotation.rot[i][j]\n rot[i][k] = rotation.rot[i][k]\n\n rot[j][i] = c*rotation.rot[j][i] - s*rotation.rot[j][j]\n rot[j][j] = s*rotation.rot[j][i] + c*rotation.rot[j][j]\n rot[j][k] = rotation.rot[j][k]\n\n rot[k][i] = c*rotation.rot[k][i] - s*rotation.rot[k][j]\n rot[k][j] = s*rotation.rot[k][i] + c*rotation.rot[k][j]\n rot[k][k] = rotation.rot[k][k]\n\n return RotationMatrix(rot)", "def rotation_around_axis(self,axis,angle,**kwargs):\n xyz = self.get('x,y,z',**kwargs)\n\n # get the data\n ct,st = np.cos(angle),np.sin(angle)\n ux,uy,uz = axis\n\n # get the center of the molecule\n xyz0 = np.mean(xyz,0)\n\n # definition of the rotation matrix\n # see https://en.wikipedia.org/wiki/Rotation_matrix\n rot_mat = np.array([\n [ct + ux**2*(1-ct), ux*uy*(1-ct) - uz*st, ux*uz*(1-ct) + uy*st],\n [uy*ux*(1-ct) + uz*st, ct + uy**2*(1-ct), uy*uz*(1-ct) - ux*st],\n [uz*ux*(1-ct) - uy*st, uz*uy*(1-ct) + ux*st, ct + uz**2*(1-ct) ]])\n\n # apply the rotation\n xyz = np.dot(rot_mat,(xyz-xyz0).T).T + xyz0\n self.update('x,y,z',xyz,**kwargs)\n\n return xyz0", "def rotation_matrix3(axis, theta):\n R = np.eye(3)\n c = math.cos(theta)\n s = math.sin(theta)\n a1 = (axis + 1) % 3\n a2 = (axis + 2) % 3\n R[a1, a1] = c\n R[a1, a2] = -s\n R[a2, a1] = s\n R[a2, a2] = c\n return np.matrix(R)", "def rotation_mat(self) -> np.ndarray:\n rot = np.zeros((3, 3))\n\n txx = 2 * self.x * self.x\n tyy = 2 * self.y * self.y\n tzz = 2 * self.z * self.z\n twx = 2 * self.w * self.x\n twy = 2 * self.w * self.y\n twz = 2 * self.w * self.z\n txy = 2 * self.x * self.y\n txz = 2 * self.x * self.z\n tyz = 2 * self.y * self.z\n\n rot[0, 0] = 1. - tyy - tzz\n rot[0, 1] = txy - twz\n rot[0, 2] = txz + twy\n rot[1, 0] = txy + twz\n rot[1, 1] = 1. - txx - tzz\n rot[1, 2] = tyz - twx\n rot[2, 0] = txz - twy\n rot[2, 1] = tyz + twx\n rot[2, 2] = 1. - txx - tyy\n\n return rot", "def py_rotation_from_matrix(matrix):\n return np.float32(quat2angle_axis(mat2quat(matrix)))", "def eulerAnglesToRotationMatrix(theta):\n\n R_x = np.array([[1, 0, 0 ],\n [0, np.cos(theta[0]), -np.sin(theta[0]) ],\n [0, np.sin(theta[0]), np.cos(theta[0]) ]\n ])\n R_y = np.array([[np.cos(theta[1]), 0, np.sin(theta[1]) ],\n [0, 1, 0 ],\n [-np.sin(theta[1]), 0, np.cos(theta[1]) ]\n ])\n R_z = np.array([[np.cos(theta[2]), -np.sin(theta[2]), 0],\n [np.sin(theta[2]), np.cos(theta[2]), 0],\n [0, 0, 1]\n ])\n R = np.dot(R_z, np.dot( R_y, R_x ))\n return R", "def rotate(mat,angle):\n return np.dot(Mueller.rotator(angle), np.dot(mat, Mueller.rotator(-angle)))", "def rotate(self, angle, axis):\r\n R=self.rotation(angle, axis)\r\n self.mlist = (self*R).mlist\r\n return self", "def test_conversions_matrix_axis_angle_continuous():\n for angle in np.arange(3.1, 3.2, 0.01):\n a = np.array([1.0, 0.0, 0.0, angle])\n R = pr.matrix_from_axis_angle(a)\n pr.assert_rotation_matrix(R)\n\n a2 = pr.axis_angle_from_matrix(R)\n pr.assert_axis_angle_equal(a, a2)\n\n R2 = pr.matrix_from_axis_angle(a2)\n assert_array_almost_equal(R, R2)\n pr.assert_rotation_matrix(R2)", "def quaternion_to_angle_axis(quaternion):\n ha = np.arccos(quaternion[0])\n theta = 2 * ha\n if theta < np.finfo(float).eps:\n theta = 0\n axis = np.array([1, 0, 0])\n else:\n axis = quaternion[[1, 2, 3]] / np.sin(ha)\n return theta, axis", "def construct_euler_rodriguez_matrix(self, axis, theta):\n axis = numpy.asarray(axis)\n axis = axis / math.sqrt(numpy.dot(axis, axis))\n a = math.cos(theta / 2.0)\n b, c, d = -axis * math.sin(theta / 2.0)\n aa, bb, cc, dd = a * a, b * b, c * c, d * d\n bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d\n return numpy.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],\n [2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],\n [2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])", "def rotation_matrix(rotate):\n tx, ty, tz = rotate\n Rx = np.array([[1, 0, 0], [0, np.cos(tx), -np.sin(tx)], [0, np.sin(tx), np.cos(tx)]])\n Ry = np.array([[np.cos(ty), 0, -np.sin(ty)], [0, 1, 0], [np.sin(ty), 0, np.cos(ty)]])\n Rz = np.array([[np.cos(tz), -np.sin(tz), 0], [np.sin(tz), np.cos(tz), 0], [0, 0, 1]])\n return np.dot(Rx, np.dot(Ry, Rz))", "def rotdMat(angle, axis=0):\n if axis == 2:\n return np.array([[cosd(angle), -sind(angle), 0],\n [sind(angle), cosd(angle), 0], [0, 0, 1]])\n elif axis == 1:\n return np.array([[cosd(angle), 0, -sind(angle)],\n [0, 1, 0], [sind(angle), 0, cosd(angle)]])\n else:\n return np.array([[1, 0, 0], [0, cosd(angle), -sind(angle)],\n [0, sind(angle), cosd(angle)]])", "def to_axang(self) -> Tuple[np.ndarray, float]:\n return self.to_axisangle()", "def _rotationMatrix(self, n_dim, theta):\n i = np.identity(n_dim)\n c, s = np.cos(theta)*i, np.sin(theta)*i\n rotation = np.bmat([[c, s], [-s, c]])\n return rotation", "def _cubelet_rotation_matrix(self, cubelet_meta_info, qpos_array):\n euler_angles = qpos_array[cubelet_meta_info[\"euler_qpos\"]]\n return rotation.euler2mat(euler_angles)", "def rot2mat(rotation: np.ndarray) -> np.ndarray:\n rotation_radians = ndarray_to_rotation(rotation)\n pitch = np.deg2rad(rotation_radians.pitch)\n roll = np.deg2rad(rotation_radians.roll)\n yaw = np.deg2rad(rotation_radians.yaw)\n return transforms3d.euler.euler2mat(roll, pitch, yaw).T", "def rotate(mat,angle):\n return np.dot(Jones.rotator(angle), np.dot(mat, Jones.rotator(-angle)))", "def generate_rotation_matrix(x_angle, y_angle, z_angle):\n return np.array([\n [1, 0, 0],\n [0, np.cos(x_angle), -np.sin(x_angle)],\n [0, np.sin(x_angle), np.cos(x_angle)],\n ]).dot([\n [np.cos(y_angle), 0, np.sin(y_angle)],\n [0, 1, 0],\n [-np.sin(y_angle), 0, np.cos(y_angle)],\n ]).dot([\n [np.cos(z_angle), -np.sin(z_angle), 0],\n [np.sin(z_angle), np.cos(z_angle), 0],\n [0, 0, 1],\n ]).tolist()", "def _euler_angles_to_rotation_matrix(theta):\n R_x = np.array([[1, 0, 0],\n [0, math.cos(theta[0]), -math.sin(theta[0])],\n [0, math.sin(theta[0]), math.cos(theta[0])]\n ])\n\n R_y = np.array([[math.cos(theta[1]), 0, math.sin(theta[1])],\n [0, 1, 0],\n [-math.sin(theta[1]), 0, math.cos(theta[1])]\n ])\n\n R_z = np.array([[math.cos(theta[2]), -math.sin(theta[2]), 0],\n [math.sin(theta[2]), math.cos(theta[2]), 0],\n [0, 0, 1]\n ])\n\n R = np.dot(R_z, np.dot(R_y, R_x))\n\n return R", "def rotateMatrix_by_axis(axis, theta):\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = unitVec(axis)\n a = cos(theta / 2)\n b, c, d = -axis * sin(theta / 2)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.array([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac)],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab)],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc]])", "def eulerAnglesToRotationMatrix(self, theta):\n\n R_x = np.array([[1, 0, 0],\n [0, math.cos(theta[0]), -math.sin(theta[0])],\n [0, math.sin(theta[0]), math.cos(theta[0])]\n ])\n\n R_y = np.array([[math.cos(theta[1]), 0, math.sin(theta[1])],\n [0, 1, 0],\n [-math.sin(theta[1]), 0, math.cos(theta[1])]\n ])\n\n R_z = np.array([[math.cos(theta[2]), -math.sin(theta[2]), 0],\n [math.sin(theta[2]), math.cos(theta[2]), 0],\n [0, 0, 1]\n ])\n\n R = np.dot(R_z, np.dot(R_y, R_x))\n\n return R", "def quaternion2AngleAxis(quaternion):\n HA = np.arccos(quaternion[0])\n theta = 2 * HA\n if theta < np.finfo(float).eps:\n theta = 0\n axis = np.array([1, 0, 0])\n else:\n axis = quaternion[[1, 2, 3]] / np.sin(HA)\n return theta, axis", "def axis_angle_matrix3(unit, theta):\n x, y, z = unit\n c = math.cos(theta)\n s = math.sin(theta)\n C = 1 - c\n return np.matrix([\n [x * x * C + c, x * y * C - z * s, x * z * C + y * s],\n [y * x * C + z * s, y * y * C + c, y * z * C - x * s],\n [z * x * C - y * s, z * y * C + x * s, z * z * C + c],\n ])", "def rotation_matrix(yaw, pitch) -> TransformationMatrixType:\n return rotation_matrix_yx(math.radians(yaw + 180), math.radians(pitch))", "def angle_axis_to_quaternion(angle_axis: torch.Tensor) -> torch.Tensor:\n if not torch.is_tensor(angle_axis):\n raise TypeError(\"Input type is not a torch.Tensor. Got {}\".format(\n type(angle_axis)))\n\n if not angle_axis.shape[-1] == 3:\n raise ValueError(\"Input must be a tensor of shape Nx3 or 3. Got {}\"\n .format(angle_axis.shape))\n # unpack input and compute conversion\n a0: torch.Tensor = angle_axis[..., 0:1]\n a1: torch.Tensor = angle_axis[..., 1:2]\n a2: torch.Tensor = angle_axis[..., 2:3]\n theta_squared: torch.Tensor = a0 * a0 + a1 * a1 + a2 * a2\n\n theta: torch.Tensor = torch.sqrt(theta_squared)\n half_theta: torch.Tensor = theta * 0.5\n\n mask: torch.Tensor = theta_squared > 0.0\n ones: torch.Tensor = torch.ones_like(half_theta)\n\n k_neg: torch.Tensor = 0.5 * ones\n k_pos: torch.Tensor = torch.sin(half_theta) / theta\n k: torch.Tensor = torch.where(mask, k_pos, k_neg)\n w: torch.Tensor = torch.where(mask, torch.cos(half_theta), ones)\n\n quaternion: torch.Tensor = torch.zeros_like(angle_axis)\n quaternion[..., 0:1] += a0 * k\n quaternion[..., 1:2] += a1 * k\n quaternion[..., 2:3] += a2 * k\n return torch.cat([w, quaternion], dim=-1)", "def rot_mat2rot_angle(rot_mat):\n return np.arctan2(rot_mat[1, 0], rot_mat[0, 0])", "def get_rotation(self) -> np.array:\n axis = self.get_arms()[1]\n force = [self.d_x, self.d_y] # \"Force applied on the arm\"\n o_m = [self.target.x_obj - axis.x_obj, self.target.y_obj - axis.y_obj]\n torque = o_m[0]*force[1] - o_m[1] * force[0] # OM vectorial F\n if torque == 1: # Anti clockwise rotation\n rotation = np.array([[0, -1], [1, 0]])\n if torque == -1: # Clockwise rotation\n rotation = np.array([[0, 1], [-1, 0]])\n if torque == 0: # No rotation\n rotation = np.array([[0, 0], [0, 0]])\n return rotation", "def rotation_matrix_cp(axis, theta):\n axis = np.asarray(axis)\n theta = np.asarray(theta)\n axis = axis/math.sqrt(np.dot(axis, axis))\n a = math.cos(theta/2.0)\n b, c, d = -axis*math.sin(theta/2.0)\n aa, bb, cc, dd = a*a, b*b, c*c, d*d\n bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d\n return np.matrix([[aa+bb-cc-dd, 2*(bc+ad), 2*(bd-ac), 0.],\n [2*(bc-ad), aa+cc-bb-dd, 2*(cd+ab), 0.],\n [2*(bd+ac), 2*(cd-ab), aa+dd-bb-cc, 0.],\n [0., 0., 0., 1.]])", "def _rotate_about_origin(self, angle, axis):\n matrix = rotation_matrix(angle, axis)\n self._normal = matrix.dot(self._normal)\n self._position = matrix.dot(self._position)", "def rotation_matrix(phi):\n return np.asmatrix([\n [np.cos(phi), -np.sin(phi), 0],\n [np.sin(phi), np.cos(phi), 0],\n [0, 0, 1]\n ])", "def axang2quat(ax_ang):\n\n if ax_ang.ndim == 1:\n if np.size(ax_ang) == 5:\n ax_ang = np.reshape(ax_ang, (5, 1))\n msz = 1\n elif np.size(ax_ang) == 4:\n ax_ang = np.reshape(np.hstack((ax_ang, np.array([1]))), (5, 1))\n msz = 1\n else:\n raise Exception('Wrong Input Type')\n elif ax_ang.ndim == 2:\n if np.shape(ax_ang)[0] == 5:\n msz = np.shape(ax_ang)[1]\n elif np.shape(ax_ang)[1] == 5:\n ax_ang = ax_ang.transpose()\n msz = np.shape(ax_ang)[1]\n else:\n raise Exception('Wrong Input Type')\n else:\n raise Exception('Wrong Input Type')\n\n direction = ax_ang[0:3, :]\n angle = ax_ang[3, :]\n\n d = np.array(direction, dtype=np.float64)\n d /= np.linalg.norm(d, axis=0)\n x = d[0, :]\n y = d[1, :]\n z = d[2, :]\n q0 = np.cos(angle/2)\n s = np.sin(angle/2)\n\n q1 = x*s\n q2 = y*s\n q3 = z*s\n\n qtype = 0*q3;\n inds1 = np.where(ax_ang[4, :] == -1); qtype[inds1] = -1;\n inds2 = np.where(ax_ang[4, :] == 1); qtype[inds2] = 1;\n\n return quat.Quaternion(q0, q1, q2, q3, qtype)", "def test_to_rotation(self):\r\n q = np.array([-1, 1, 3, 2])\r\n q = q / np.linalg.norm(q)\r\n R_gt = np.array([\r\n [-1/3., -14/15., -2/15.],\r\n [2/3., -1/3., 2/3.],\r\n [-2/3., 2/15., 11/15.]]).T\r\n R = to_rotation(q)\r\n\r\n zero_matrix = R - R_gt\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n for _ in range(20):\r\n q = np.random.randn(4)\r\n q /= np.linalg.norm(q)\r\n q_inv = quaternion_conjugate(q)\r\n\r\n R = to_rotation(q)\r\n R_inv = to_rotation(q_inv)\r\n\r\n zero_matrix = R @ R_inv - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)\r\n\r\n # orthogonal matrix\r\n zero_matrix = R @ R.T - np.identity(3)\r\n self.assertAlmostEqual(np.linalg.norm(zero_matrix), 0.0)", "def angle_axis_to_rot3d(axis, theta):\n if isinstance(axis, string_types):\n axis = axis.lower()\n if axis == 'x':\n axis = np.array([1., 0., 0.])\n elif axis == 'y':\n axis = np.array([0., 1., 0.])\n elif axis == 'z':\n axis = np.array([0., 0., 1.])\n else:\n raise ValueError(\"Axis should be 'x', 'y', 'z' or a 3D vector.\")\n elif len(axis) != 3:\n raise ValueError(\"Axis should be 'x', 'y', 'z' or a 3D vector.\")\n axis = axis.astype(float)\n axis /= np.linalg.norm(axis)\n a = axis[0]\n b = axis[1]\n c = axis[2]\n cos_theta = np.cos(theta)\n bracket = 1 - cos_theta\n a_bracket = a * bracket\n b_bracket = b * bracket\n c_bracket = c * bracket\n sin_theta = np.sin(theta)\n a_sin_theta = a * sin_theta\n b_sin_theta = b * sin_theta\n c_sin_theta = c * sin_theta\n rot3d = np.array(\n [[a * a_bracket + cos_theta, a * b_bracket - c_sin_theta, a * c_bracket + b_sin_theta],\n [b * a_bracket + c_sin_theta, b * b_bracket + cos_theta, b * c_bracket - a_sin_theta],\n [c * a_bracket - b_sin_theta, c * b_bracket + a_sin_theta, c * c_bracket + cos_theta]])\n return rot3d", "def rotate(self, axis, theta):\n v = Vector3(self) # ensure vector\n k = Vector3(axis.uv())\n return type(self)(\n cosd(theta) * v\n + sind(theta) * k.cross(v)\n + (1 - cosd(theta)) * k * (k.dot(v))\n )", "def quaternion_to_angle_axis(quaternion: torch.Tensor) -> torch.Tensor:\n if not torch.is_tensor(quaternion):\n raise TypeError(\"Input type is not a torch.Tensor. Got {}\".format(\n type(quaternion)))\n\n if not quaternion.shape[-1] == 4:\n raise ValueError(\"Input must be a tensor of shape Nx4 or 4. Got {}\"\n .format(quaternion.shape))\n # unpack input and compute conversion\n q1: torch.Tensor = quaternion[..., 1]\n q2: torch.Tensor = quaternion[..., 2]\n q3: torch.Tensor = quaternion[..., 3]\n sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3\n\n sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta)\n cos_theta: torch.Tensor = quaternion[..., 0]\n two_theta: torch.Tensor = 2.0 * torch.where(\n cos_theta < 0.0,\n torch.atan2(-sin_theta, -cos_theta),\n torch.atan2(sin_theta, cos_theta))\n\n k_pos: torch.Tensor = two_theta / sin_theta\n k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta)\n k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg)\n\n angle_axis: torch.Tensor = torch.zeros_like(quaternion)[..., :3]\n angle_axis[..., 0] += q1 * k\n angle_axis[..., 1] += q2 * k\n angle_axis[..., 2] += q3 * k\n return angle_axis" ]
[ "0.81078196", "0.78859186", "0.74368423", "0.7374491", "0.7312507", "0.7310099", "0.7273846", "0.7225561", "0.71868175", "0.71868175", "0.70643413", "0.70557725", "0.70138526", "0.7002238", "0.6987983", "0.6980551", "0.69750994", "0.6967742", "0.69532573", "0.693683", "0.69265157", "0.6916285", "0.69101995", "0.6898823", "0.68858194", "0.6855879", "0.68455595", "0.6795108", "0.67902833", "0.678924", "0.67883575", "0.67883575", "0.67879033", "0.67862207", "0.6784926", "0.6784926", "0.6784926", "0.67847586", "0.6781819", "0.6779478", "0.6766697", "0.6763952", "0.67571217", "0.67553174", "0.6750495", "0.6743776", "0.6727128", "0.6709489", "0.66866904", "0.6682864", "0.6679887", "0.6636408", "0.6627751", "0.66131246", "0.6596633", "0.6563699", "0.6529953", "0.6509539", "0.65014935", "0.6473154", "0.64720404", "0.6449556", "0.6448447", "0.6440168", "0.6401755", "0.64010924", "0.63954896", "0.63945967", "0.6384845", "0.6381444", "0.6377023", "0.63752574", "0.63701206", "0.6362999", "0.6362834", "0.6350111", "0.6346118", "0.6337946", "0.6330967", "0.632867", "0.6324008", "0.6322284", "0.6316864", "0.62933844", "0.62874067", "0.6280739", "0.62769717", "0.6271926", "0.62709135", "0.62533206", "0.62469906", "0.624311", "0.62273335", "0.6223563", "0.62186", "0.62161565", "0.6177931", "0.616782", "0.61644995", "0.6164224", "0.61635375" ]
0.0
-1
Sort the buses reversed by their period, having tagged them with their position in the sequence, which is their c value. >>> list(prep_input(EXAMPLE_BUSES)) [(59, 4), (31, 6), (19, 7), (13, 1), (7, 0)]
def prep_input(buses): return sorted([(bus, offset) for offset, bus in enumerate(buses) if bus], reverse=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def process_bc_freqs(bc_freqs):\r\n\r\n bcs_list = []\r\n for curr_key in bc_freqs.keys():\r\n bcs_list.append((curr_key, int(bc_freqs[curr_key])))\r\n\r\n bcs_list = sorted(bcs_list, key=itemgetter(1), reverse=True)\r\n\r\n sorted_bcs = []\r\n for curr_bc in bcs_list:\r\n sorted_bcs.append(\"%s\\t%d\" % (curr_bc[0], curr_bc[1]))\r\n\r\n return sorted_bcs", "def bsort(seq, cmp):\n sorted = False # assume the seq is not sorted to start with\n while not sorted:\n sorted = True # assume it's already sorted correctly\n for index, value in enumerate(seq): # for every element in seq\n if index > 0: # past the first..\n if not cmp(seq[index-1], value): # if this element is out of order\n sorted = False # then the list is not sorted yet\n seq[index-1], seq[index] = seq[index], seq[index-1] # and swap it", "def calculate_finishing_order(x):\n\t# Creates a list of keys which are sorted by their values\n\n\treturn [sailor_names for sailor_names,sailorValues in sorted(x.items(), key=lambda y: y[1], reverse=True)]", "def sort_auto(self):\n key = lambda buz1, buz2: buz1 if buz1.trip_duration <= buz2.trip_duration else buz2\n self.autobuze.sort(key=key)", "def _sort_manber_myers(self, suffix_pos: List) -> List:\n bucket = self._create_bucket(suffix_pos)\n for _, v in sorted(bucket.items()):\n if self.debug: print(f\"_sort_manber_myers function: bucket value: {v}\") \n if len(v) > 1:\n # recursive call for next stage\n self.stage *= 2\n self._sort_manber_myers(v)\n else:\n # otherwise add starting position of suffix to result\n self.suffixes.append(v[0]) \n if self.debug: print(f\"_sort_manber_myers function: suffixes: {self.suffixes}\\n\") \n return self.suffixes", "def _reversed_insts_to_header(self, bb):\n rinsts = []\n for bb in self._bbs_to_header(bb):\n rinsts.extend(reversed(bb.insts))\n return rinsts", "def sort_by_bfile(self, bfiles, events_by_b):\n if len(bfiles) > 1:\n print \"Warning: Several .b files detected. Using the first in list:\"\n print bfiles[0]\n with open(bfiles[0], 'r') as bf:\n for line in bf:\n data = line.split()\n try:\n jobid = data[0]\n impb = float(data[1])\n if impb >= self._bmin and impb <= self._bmax:\n events_by_b.append(self.outputname(jobid))\n except ValueError:\n continue", "def bordasOf(self, bundle):\n\t\treturn sorted([self.borda[item] for item in bundle], reverse=True)", "def comb_sort(data):\n shrink_factor = 1.3\n gap = len(data)\n swapped = True\n i = 0\n\n while gap > 1 or swapped:\n # Update the gap value for a next comb\n gap = int(float(gap) / shrink_factor)\n\n swapped = False\n i = 0\n\n while gap + i < len(data):\n if data[i] > data[i + gap]:\n # Swap values\n data[i], data[i + gap] = data[i + gap], data[i]\n swapped = True\n i += 1\n\n return data", "def getRevCodonSeqs(self):\r\n compDict = {'A': 't', 'T': 'a', 'G': 'c', 'C': 'g'} # nuc compliments for reverse strand\r\n revPep = [] # list to hold the temporary reverse peptides before incorporation into the complete list\r\n for seq in self.allPepSeqs:\r\n revSeq = seq[::-1] # reverses the strand to be prepped for nt compliments\r\n for nuc in compDict:\r\n revSeq = revSeq.replace(nuc, compDict[nuc]) # replaces nt's with their compliments\r\n revSeq = revSeq.upper()\r\n revPep.append(revSeq)\r\n for i in revPep:\r\n self.allPepSeqs.append(i) # adds the reverse strand peptide to the list of possible peptide seqs\r\n return", "def sort_currency_list_if_changed(self):\r\n currency_list = self.gox.wallet.keys()\r\n if len(currency_list) == len(self.sorted_currency_list):\r\n return\r\n\r\n # now we will bring base and quote currency to the front and sort the\r\n # the rest of the list of names by acount balance in descending order\r\n if self.gox.curr_base in currency_list:\r\n currency_list.remove(self.gox.curr_base)\r\n if self.gox.curr_quote in currency_list:\r\n currency_list.remove(self.gox.curr_quote)\r\n currency_list.sort(key=lambda name: -self.gox.wallet[name])\r\n currency_list.insert(0, self.gox.curr_quote)\r\n currency_list.insert(0, self.gox.curr_base)\r\n self.sorted_currency_list = currency_list", "def buble_sort(l):\r\n for i in range(len(l)):\r\n for j in range(i+1, len(l)):\r\n if (l[j-1]>l[j]):\r\n l[j-1], l[j] = l[j], l[j-1]", "def bubbleSort(list):", "def bubbleSort(sequence):\n n = len(sequence)\n # Perform n-1 bubble operations on the sequence\n for i in range(n - 1):\n # Bubble the largest item to the end.\n for j in range(n - i - 1):\n if sequence[j] > sequence[j+1]:\n sequence[j], sequence[j+1] = sequence[j+1], sequence[j]", "def sorter(sequence):\n def bubble_sort(a):\n \"\"\"\n This function sort the list\n \"\"\"\n for i in reversed(range(len(a))):\n for j in range(1, i + 1):\n if a[j-1] > a[j]:\n a[j], a[j-1] = a[j-1], a[j]\n return a\n\n listed_seq = list(sequence)\n for number in listed_seq:\n if not isinstance(number, int):\n raise ValueError(\"Can't find max, wrong data format\")\n return bubble_sort(listed_seq)[-1]", "def reversesort(self):\n ...", "def sorter(sequence):\n def bubble_sort(a):\n \"\"\"\n This function sort the list\n \"\"\"\n for i in reversed(range(len(a))):\n for j in range(1, i + 1):\n if a[j-1] > a[j]:\n a[j], a[j-1] = a[j-1], a[j]\n return a\n\n listed_seq = list(sequence)\n for number in listed_seq:\n if not isinstance(number, int):\n raise ValueError(\"Can't find max, wrong data format\")\n return bubble_sort(listed_seq)[0]", "def BubbleSort(ulist):\n done = 0 #This variable is used to break the loop when sorting is done\n while not done:\n done = 1\n for i in range(len(ulist) - 1):\n if ulist[i] > ulist[i+1]:\n ulist[i], ulist[i+1] = ulist[i+1], ulist[i]\n done = 0", "def sort_fasta_by_abundance(fasta_lines, fasta_out_f):\r\n seq_index = {}\r\n count = 0\r\n for seq_id, seq in parse_fasta(fasta_lines):\r\n count += 1\r\n try:\r\n seq_index[seq].append(seq_id)\r\n except KeyError:\r\n seq_index[seq] = [seq_id]\r\n\r\n seqs = []\r\n for k, v in seq_index.items():\r\n seqs.append((len(v), k, v))\r\n del seq_index[k]\r\n seqs.sort()\r\n for count, seq, seq_ids in seqs[::-1]:\r\n for seq_id in seq_ids:\r\n fasta_out_f.write('>%s\\n%s\\n' % (seq_id, seq))", "def burbuja(lista:list):\n vector = lista\n for i in range(0, len(vector)-1):\n for j in range(0, len(vector)-1):\n if vector[j] > vector[j+1]:\n tmp = vector[j+1]\n vector[j+1] = vector[j]\n vector[j] = tmp\n return vector", "def order_ideal(self, gens):", "def order(inputspectra):\n \n if np.all(np.diff(inputspectra.x_values) <= 0):\n inputspectra.x_values = inputspectra.x_values[::-1]\n inputspectra.spectra = inputspectra.spectra[:,::-1]\n return inputspectra", "def test_sort_reversed():\n reverse_sorted_data = [3, 2, 1]\n sorted_data = bubble_sort(reverse_sorted_data)\n assert sorted_data == [1, 2, 3]", "def test_bd_cycles_ascending(fprime_test_api):\n length = 60\n count_pred = predicates.greater_than(length - 1)\n results = fprime_test_api.await_telemetry_count(\n count_pred, \"blockDrv.BD_Cycles\", timeout=length\n )\n last = None\n reordered = False\n ascending = True\n for result in results:\n if last is not None:\n last_time = last.get_time()\n result_time = result.get_time()\n if result_time - last_time > 1.5:\n msg = \"FSW didn't send an update between {} and {}\".format(\n last_time.to_readable(), result_time.to_readable()\n )\n fprime_test_api.log(msg)\n elif result_time < last_time:\n msg = \"There is potential reorder error between {} and {}\".format(\n last_time, result_time\n )\n fprime_test_api.log(msg)\n reordered = True\n\n if not result.get_val() > last.get_val():\n msg = \"Not all updates ascended: First ({}) Second ({})\".format(\n last.get_val(), result.get_val()\n )\n fprime_test_api.log(msg)\n ascending = False\n\n last = result\n\n case = True\n case &= fprime_test_api.test_assert(\n ascending, \"Expected all updates to ascend.\", True\n )\n case &= fprime_test_api.test_assert(\n not reordered, \"Expected no updates to be dropped.\", True\n )\n fprime_test_api.predicate_assert(\n count_pred,\n len(results) - 1,\n \"Expected >= {} updates\".format(length - 1),\n True,\n )\n fprime_test_api.assert_telemetry_count(0, \"rateGroup1Comp.RgCycleSlips\")\n assert case, \"Expected all checks to pass (ascending, reordering). See log.\"", "def sort_barcodes(barcode_list):\n return sorted(barcode_list, key=alphanum_key)", "def bubble_sort_modificado(a):\n N = len(a)\n n = 0\n b = False\n while (n != N) and (not b):\n k = N - 1\n b = True\n while k != n:\n if a[k-1] > a[k]:\n b = False\n a[k-1], a[k] = a[k], a[k-1]\n k -= 1\n n += 1", "def bubble_sort(dataset):\n\t# start with array length and decrement each time \n\tarrayLen = len(dataset)\n\tbubbleIndex = len(dataset) - 1\n\twhile bubbleIndex != 0:\n\t\tarrayIndex = 0\n\t\twhile arrayIndex < arrayLen - 1:\n\t\t\tthisVal = dataset[arrayIndex]\n\t\t\tnextVal = dataset[arrayIndex + 1]\n\t\t\tif thisVal > nextVal:\n\t\t\t\tdataset[arrayIndex + 1] = thisVal\n\t\t\t\tdataset[arrayIndex] = nextVal\n\t\t\tarrayIndex += 1\n\t\tprint \"Current State:\", dataset\n\t\tbubbleIndex -= 1", "def bubble_sort(input_list):\n \n length = len(input_list)\n \n for i in range(length):\n for j in range(length-i-1):\n if input_list[j] > input_list[j+1]:\n input_list[j], input_list[j+1] = input_list[j+1], input_list[j]\n \n return input_list", "def order_vep_by_csq(annotation_list):\n for ann in annotation_list:\n ann['major_consequence'] = worst_csq_from_csq(ann['Consequence'])\n return sorted(annotation_list, key=(lambda ann:csq_order_dict[ann['major_consequence']]))", "def sort_descending(list_in):\n return list(map(str, list_in)).sort(key=lambda f: int(''.join(filter(str.isdigit, f))))", "def cp_to_bt(cp):\n if isinstance(cp, ExchangeForSpecies):\n return [cp.played_card_index] + cp.loi\n else:\n return None", "def sortByDate(inlist):\n\n seq = []\n for i, each in enumerate(inlist):\n # Lightly parse each flight (just reads the preamble)\n # Putting the last 3 returns of MISlightly into the _ junk var\n flight, _, _, _ = parseMISlightly(each)\n seq.append(flight.takeoff)\n\n # Sort by takeoff time (flight.takeoff is a datetime obj!)\n newseq = np.argsort(seq)\n\n return newseq", "def sequence_elements(self):\n seq_model = self.opt_model.seq_model\n self.elements.sort(key=lambda e:\n seq_model.ifcs.index(e.reference_interface()))", "def bubble_sort(a):\n for i in reversed(range(len(a))):\n for j in range(1, i + 1):\n if a[j-1] > a[j]:\n a[j], a[j-1] = a[j-1], a[j]\n return a", "def bubble_sort(a):\n for i in reversed(range(len(a))):\n for j in range(1, i + 1):\n if a[j-1] > a[j]:\n a[j], a[j-1] = a[j-1], a[j]\n return a", "def get_all_peptides(nuc_seq):\n # TODO - Refactor to use a generator function (in start order)\n # rather than making a list and sorting?\n answer = []\n full_len = len(nuc_seq)\n if options.strand != \"reverse\":\n for frame in range(0, 3):\n for offset, n, t in break_up_frame(nuc_seq[frame:]):\n start = frame + offset # zero based\n answer.append((start, start + len(n), +1, n, t))\n if options.strand != \"forward\":\n rc = reverse_complement(nuc_seq)\n for frame in range(0, 3):\n for offset, n, t in break_up_frame(rc[frame:]):\n start = full_len - frame - offset # zero based\n answer.append((start - len(n), start, -1, n, t))\n answer.sort()\n return answer", "def bubble_sort(items):\n for i in range(len(items)):\n for j in range(len(items)-1-i):\n if items[j] > items[j+1]:\n items[j], items[j+1] = items[j+1], items[j] # Swap!", "def test_reversed_version_sorting(self):\n assert natsort(['1', '5', '10', '50'], reverse=True) == ['50', '10', '5', '1']", "def test_hookimpls_can_be_sorted_by_the_order():\n # given\n hooks = [\n HookImpl(\"what\", \"when\", None, [], 1),\n HookImpl(\"what\", \"when\", None, [], 10),\n HookImpl(\"what\", \"when\", None, [], 5),\n HookImpl(\"what\", \"when\", None, [], 2),\n HookImpl(\"what\", \"when\", None, [], 30),\n HookImpl(\"what\", \"when\", None, [], 8),\n HookImpl(\"what\", \"when\", None, [], 7),\n ]\n\n # when\n sorted_hooks = sorted(hooks)\n\n # then\n assert sorted_hooks == [\n HookImpl(\"what\", \"when\", None, [], 1),\n HookImpl(\"what\", \"when\", None, [], 2),\n HookImpl(\"what\", \"when\", None, [], 5),\n HookImpl(\"what\", \"when\", None, [], 7),\n HookImpl(\"what\", \"when\", None, [], 8),\n HookImpl(\"what\", \"when\", None, [], 10),\n HookImpl(\"what\", \"when\", None, [], 30),\n ]", "def _GetEpiOrder(self):\n self.epi_series.sort()\n for series in self.epi_series:\n self.GetEpiAcqTimes(series)\n self.AssignEpiNames()", "def test_sort_outputs_0a6a357e(self):\n outputs = bip69.get_outputs_from_rpc_json(self.tx_json_0a6a357e)\n bip69_outputs = bip69.sort_outputs(outputs)\n self.assertEqual(bip69_outputs[0], (('76a9144a5fba237213a062f6f57978f79'\n '6390bdcf8d01588ac'), 400057456))\n self.assertEqual(bip69_outputs[1], (('76a9145be32612930b8323add2212a4ec'\n '03c1562084f8488ac'), 40000000000))", "def bifurcation_diagram(args, Bpbmin, Bpbmax, ylim=(-1, 0.6)):\n\n xs = []\n Bpb_list = np.linspace(Bpbmin, Bpbmax, 100)\n Iext, G, Ein, Eex, eps, a, b, A, Bpb, Bbp, vsl = args\n\n sol, t = calcODE(args, -1.5, -1.5, 0.5, 0.5, 0.5, 0.5, ts=4000, nt=2 ** 25)\n sol = sol[-len(sol) // 2:, :]\n t = t[-len(t) // 2:]\n\n x0 = sol[0, :]\n n = np.array(ode(x0, t[0], *args))\n q, _ = np.linalg.qr(n[:, None], mode='complete')\n\n periods = []\n for Bpb in Bpb_list:\n args = (Iext, G, Ein, Eex, eps, a, b, A, Bpb, Bbp, vsl)\n sol, t = calcODE(args, *sol[-1, :], ts=1000, nt=2 ** 15)\n sol = sol[-len(sol) // 2:, :]\n t = t[-len(t) // 2:]\n\n for i in range(len(sol) - 1):\n x1 = sol[i]\n x2 = sol[i + 1]\n if np.sign(n @ (x2 - x0)) != np.sign(n @ (x1 - x0)):\n c1 = dist(x1, x0, n)\n c2 = dist(x2, x0, n)\n alpha = c2 / (c1 + c2)\n x_new = x1 + alpha * (x2 - x1)\n x = (x_new - x0).dot(q)\n xs.append((Bpb, x[0], x[1], x[2], x[3], x[4], x[5]))\n # if np.linalg.norm(x_new - x0) < 1e-2 and period is None:\n period = t[i] - periods[-1][-1] if len(periods) else 0\n periods.append((Bpb, period, np.linalg.norm(x_new - x0), t[i]))\n\n plt.figure(figsize=(15, 10))\n plt.scatter([i[0] for i in xs], [i[2] for i in xs], s=10)\n plt.xlabel('$B_{pb}$')\n\n # plt.ylim(ylim)\n plt.show()\n\n periods = [i for i in periods if i[1] > 0]\n\n return periods, xs", "def bubble_sort(data):\n the_list = list(data)\n\n length = len(the_list)\n \"\"\" Iterate from the start to the back,\n then go to back - 1 \"\"\"\n while length >= 1:\n innerloop(the_list)\n length -= 1\n\n return the_list", "def sort_descending(self):\n # sort_descending_sitem = self.locator_finder_by_idx(self.sort_descending_id)\n # sort_descending_sitem = sort_descending_sitem.find_element_by_xpath(\"./..\")\n # sort_descending_sitem.click()\n # time.sleep(2)\n \n if self.current_package_version() == semver.VersionInfo.parse(\"3.8.0\"):\n sort_by_descending = '//*[@id=\"collectionsDropdown\"]/ul[3]/li[4]/a/label/i'\n sort_descending_sitem = self.locator_finder_by_xpath(sort_by_descending)\n else:\n sort_descending_sitem = self.locator_finder_by_xpath(self.sort_descending_id)\n sort_descending_sitem.click()\n time.sleep(2)", "def bogosort(to_sort):\n # Be sure to sort the list at each pass in the while loop to make it extra\n # inefficient!\n while sorted(to_sort) != to_sort:\n shuffle(to_sort)", "def test_sort_reversed():\n assert bubble_sort([5, 4, 3, 2, 1]) == [1, 2, 3, 4, 5]", "def _sort_compounds(self):\n self.sorted_molecules = sorted(self.values(), key=operator.attrgetter('criterion'))", "def cocktail_sort(num_list):\n\n # Setting variables\n start_index = 0\n end_index = len(num_list) - 1\n swapped = True\n\n while swapped:\n\n # Pass moves up\n swapped = False\n for i in range(start_index, end_index, 1):\n # Exchanges items\n if num_list[i] > num_list[i + 1]:\n temp = num_list[i]\n num_list[i] = num_list[i + 1]\n num_list[i + 1] = temp\n swapped = True\n end_index -= 1\n\n # Pass moves down\n swapped = False\n for i in range(end_index, start_index, -1):\n # Exchanges items\n if num_list[i] < num_list[i - 1]:\n temp = num_list[i]\n num_list[i] = num_list[i - 1]\n num_list[i - 1] = temp\n swapped = True\n start_index += 1", "def sort(self):\n self.chain_list.sort()\n for chain in self.chain_list:\n chain.sort()", "def test_components_dff_sorting():\n \n ic = IC(\"Mixed\", {\"in\": 1}, {\"out\": 1})\n nand1 = Nand()\n dff1 = DFF()\n nand2 = Nand()\n ic.wire(Connection(root, \"in\", 0), Connection(nand1, \"a\", 0))\n ic.wire(Connection(root, \"in\", 0), Connection(nand1, \"b\", 0))\n ic.wire(Connection(nand1, \"out\", 0), Connection(dff1, \"in_\", 0))\n ic.wire(Connection(dff1, \"out\", 0), Connection(nand2, \"a\", 0))\n ic.wire(Connection(dff1, \"out\", 0), Connection(nand2, \"b\", 0))\n ic.wire(Connection(nand2, \"out\", 0), Connection(root, \"out\", 0))\n\n # Note: relative order of the Nands doesn't really matter here\n assert ic.flatten().sorted_components() == [nand2, nand1, dff1]", "def process(container_: any):\n\n while True:\n total_swaps = 0\n for idx in range(0, len(container_)-1, 1):\n if container_[idx] > container_[idx+1]:\n container_[idx + 1], container_[idx] = container_[idx], container_[idx + 1]\n total_swaps += 1\n if total_swaps == 0:\n break\n return container_", "def sort(self):\n self.notes.sort()", "def _sort_by_duration(self) -> None:\n total_samples = len(self.paths)\n if total_samples == 0:\n return\n samples = zip(self.paths, self.durations, self.transcriptions)\n sorted_samples = sorted(samples, key=lambda sample: sample[1])\n self.paths, self.durations, self.transcriptions = [\n list(c) for c in zip(*sorted_samples)\n ]\n assert (\n total_samples\n == len(self.paths)\n == len(self.durations)\n == len(self.transcriptions)\n ), \"_sort_by_duration len mis-match\"", "def order_qubits(fermionic_term):\n coeff = fermionic_term.coeff\n pauli_op = fermionic_term.op\n qbits = fermionic_term.qbits\n\n ind_c = pauli_op.index(\"c\")\n qbits_C = qbits[:ind_c]\n qbits_c = qbits[ind_c:]\n new_qbits = []\n \n for qbits_op in [qbits_C, qbits_c]:\n qbits_temp = qbits_op[:]\n ordered = False\n while not ordered:\n ind = 0\n while ind < len(qbits_temp) - 1 and qbits_temp[ind] <= qbits_temp[ind + 1]:\n if qbits_temp[ind] == qbits_temp[ind + 1]:\n return \n ind += 1\n if ind < len(qbits_temp) - 1:\n ind += 1\n new_ind = 0\n while qbits_temp[new_ind] < qbits_temp[ind]:\n new_ind += 1\n elt_not_in_order = qbits_temp.pop(ind)\n qbits_temp.insert(new_ind, elt_not_in_order)\n coeff *= (-1)**(ind - new_ind)\n else:\n ordered = True\n new_qbits += qbits_temp\n return Term(coefficient=coeff, pauli_op=pauli_op, qbits=new_qbits)", "def sort_output_desc_asc(word_counts):\n\n # sort by item (-item[1] refers to reverse list of second item)\n sorted_items = sorted(word_counts.items(), key=lambda item: (-item[1], item[0]))\n \n for key, value in sorted_items:\n print \"{} {}\".format(value, key)", "def to_bplist(self):\n ts_type = self.ts_types['bplist']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n self.out_bplist = str(int((dt_obj - self.epoch_2001).total_seconds()) - int(dt_tz))\n ts_output = str(\"{}\\t\\t{}\".format(ts_type, self.out_bplist))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_bplist = ts_output = False\n return self.out_bplist, ts_output", "def bubble_sort(items):\n out = items.copy() # in place protection on items\n for i in range(len(out)):\n for j in range(len(out)-1-i):\n if out[j] > out[j+1]:\n out[j], out[j+1] = out[j+1], out[j] # Swap!\n\n return out", "def sort_rings(ring_list, om_pickle_file):\n basic_output_on.dprint(\"\\nSorting closed rings...\",'nr')\n bdry_ring = max(ring_list, key=lambda rg: rg.maxR)\n outside_point = bdry_ring.center + 2*bdry_ring.maxR # is outside all rings\n\n sorted_closed_ring_indices = ['core']\n sorted_closed_ring_indices += \\\n sorted([rl_ind for rl_ind, r in enumerate(ring_list) if r.isClosed()],\n key=lambda idx: ring_list[idx].maxR)\n\n closed_pairs = [ClosedPair(ring_list,\n outside_point,\n sorted_closed_ring_indices[k-1],\n sorted_closed_ring_indices[k])\n for k in range(1, len(sorted_closed_ring_indices))]\n\n # Find the lines to the boundary and the path given\n if not use_alternative_sorting_method:\n center = ring_list[0].center\n d = 1.5 * bdry_ring.maxR\n pts = [center - d + d*1j, center - d - d*1j,\n center + d - d*1j, center + d + d*1j]\n rectangle_containing_bdry = \\\n Path(*[Line(pts[i], pts[(i+1) % 4]) for i in range(4)])\n for r in ring_list:\n if not r.isClosed():\n r.findLines2Bdry(rectangle_containing_bdry)\n\n # figure out which open (incomplete) rings live between which closed rings\n basic_output_on.dprint(\n \"Done, closed rings sorted.\\nNow determining which open rings \"\n \"lie between which closed pairs of rings...\", 'nr'\n )\n start_time = current_time()\n unlocated_open_ring_indices = \\\n set(i for i, r in enumerate(ring_list) if not r.isClosed())\n\n for cp in closed_pairs:\n cp.contents = [r_idx for r_idx in unlocated_open_ring_indices\n if cp.contains(r_idx)]\n unlocated_open_ring_indices -= set(cp.contents)\n\n # there should not be any unlocated open ring indices\n # in case there are, this is likely caused by intersections\n if unlocated_open_ring_indices:\n debug_unlocated_rings_and_raise_error(\n unlocated_open_ring_indices, ring_list, closed_pairs)\n\n basic_output_on.dprint(\n \"\\rFinished locating open rings. Total time elapsed: %s\"\n \"\" % format_time(current_time()-start_time))\n\n# ###DEBUG ONLY TEST slideshow (of which rings are put in which closed ring pairs)\n# basic_output_on.dprint(\"creating slideshow of which rings are located between which closed ring pairs...\",'nr')\n# from os import path as os_path\n# from options4rings import output_directory\n# from andysSVGpathTools import svgSlideShow\n# save_dir = os_path.join(output_directory,'debug','slideshow_closed_pair_inclusions')\n# pathcolortuplelist = []\n# paths = [ring.path for ring in ring_list]\n# for cp in closed_pairs:\n# colors = ['yellow']*len(paths)\n# if cp.inner_index !='core':\n# colors[cp.inner_index] = 'red'\n# colors[cp.outer_index] = 'green'\n# for i in cp.contents:\n# colors[i] = 'blue'\n# pathcolortuplelist.append((paths,colors))\n# svgSlideShow(pathcolortuplelist,save_directory=save_dir,clear_directory=True,suppressOutput=not basic_output_on.b)\n# ###End of DEBUG ONLY TEST slideshow (of which rings are put in which closed ring pairs)\n\n # sort the open rings inside each pair of closed rings\n start_time = current_time()\n \n ordering_matrices_pickle_extant = False\n if look_for_user_sort_input:\n try:\n ordering_matrices = pickle.load(open(om_pickle_file, \"rb\"))\n ordering_matrices_pickle_extant = True\n except:\n from warnings import warn\n warn(\"No ordering matrices pickle file found.\");sleep(1)\n\n class RingIndexCmp:\n def __init__(self, outer_closed_path):\n self.boundary = outer_closed_path\n\n if opt.use_alternative_sorting_method:\n def __call__(self, idx1, idx2):\n return ring1_isoutside_ring2_cmp_alt(\n ring_list, idx1, idx2, boundary=self.boundary\n )\n else:\n def __call__(self, idx1, idx2):\n return ring1_isoutside_ring2_cmp(\n ring_list[idx1], ring_list[idx2], outside_point,\n self.boundary\n )\n\n basic_output_on.dprint(\"Sorting open rings inside each cp...\")\n start_time_cp_sorting = current_time()\n et = 0\n cp_oms = []\n flag_count = 0\n num_seg_pairs2check = sum([sum([len(ring_list[i].path)*(len(ring_list[j].path)-1)/2 for (i,j) in combinations(cp.contents,2)]) for cp in closed_pairs])\n num_seg_pairs_checked = 0\n cyclic_dependencies = []\n for k, cp in enumerate(closed_pairs):\n if not len(cp.contents):\n if not ordering_matrices_pickle_extant:\n cp_oms.append([])\n continue\n if ordering_matrices_pickle_extant:\n om = ordering_matrices[k]\n #THIS BLOCK IS REPLACED BELOW (DELETE BLOCK)...\n# for i in len(om):\n# for j in len(om):\n# if isnan(om[i,j]):\n# om[i,j] = ask_user_to_sort(i,j,ring_list,make_svg=True,ask_later=False)\n# om[j,i] = -om[i,j] #...THIS BLOCK IS REPLACED BELOW (DELETE BLOCK)\n tmp_time = current_time()\n for i,j in transpose(where(isnan(om))):\n if i<j:\n om[i,j] = ask_user_to_sort(cp.contents[i], \n cp.contents[j],\n ring_list,make_svg=True, \n ask_later=False)\n om[j,i] = -om[i,j]\n start_time_cp_sorting -= current_time() - tmp_time \n else:\n om = createOrderingMatrix(cp.contents, RingIndexCmp(cp.outer.path))\n cp_oms.append(om)\n try:\n assert not any(flattenList(isnan(om)))\n except AssertionError:\n flag_count += 1\n pass\n num_seg_pairs_checked += sum(\n len(ring_list[i].path) * (len(ring_list[j].path) - 1) / 2\n for i, j in combinations(cp.contents, 2)\n )\n\n try: # lazy fix for test cases where num_seg_pairs2check==0\n percent_complete = num_seg_pairs_checked/num_seg_pairs2check\n except ZeroDivisionError:\n percent_complete = k/len(closed_pairs)\n pass\n\n if not flag_count:\n psorting, cp_cyclic_dependencies = \\\n topo_sorted(cp.contents, RingIndexCmp(cp.outer.path), ordering_matrix=om)\n if cp_cyclic_dependencies:\n cyclic_dependencies.append(cp_cyclic_dependencies)\n\n cp.contents = [cp.contents[index] for index in flattenList(psorting)]\n cp.contents_psorting = psorting\n et_tmp = current_time() - start_time_cp_sorting\n \n if et_tmp > et + 3:\n et = et_tmp\n etr = (1-percent_complete)*et/percent_complete\n basic_output_on.dprint(\"%s percent complete. Time Elapsed = %s | ETR = %s\"%(int(percent_complete*100),format_time(et),format_time(etr)))\n\n if cyclic_dependencies:\n deps_string = '\\n'.join(map(str, cyclic_dependencies))\n message = f\"The following cyclic dependencies were found:\\n{deps_string}\"\n message += \"\\nPlease see the following debug files for visuals:\\n\"\n for i, cp_cyclic_dependencies in enumerate(cyclic_dependencies):\n for k, v in cp_cyclic_dependencies.items():\n paths, path_colors = [], ''\n paths.append(ring_list[k].path)\n paths.extend([ring_list[vk].path for vk in v])\n path_colors += 'r' + 'b' * len(v)\n fp = os.path.join(opt.output_directory_debug,\n f'cyclic_dependency_{i}_{k}.svg')\n disvg(paths, path_colors, filename=fp)\n message += f'{fp}\\n'\n raise ValueError(message)\n\n #Output problem cases for manual sorting\n from options4rings import output_directory\n from os import path as os_path\n from andysmod import output2file\n manual_sort_csvfile = os_path.join(output_directory,\"interactive_sorting\",ring_list[0].svgname,\"manual_comparisons.csv\")\n str_out = ''\n if flag_count:\n pickle.dump(cp_oms, open(om_pickle_file, \"wb\"))\n output2file(str_out,filename=manual_sort_csvfile,mode='w')\n for k,om in enumerate(cp_oms):\n cp = closed_pairs[k]\n problem_pairs = [(cp.contents[i],cp.contents[j]) for i,j in transpose(where(isnan(om))) if i<j]\n problem_pairs = sorted(problem_pairs,key=itemgetter(0))\n for (idx_i,idx_j) in problem_pairs:\n str_out+='%s,%s,\\n'%(idx_i,idx_j)\n output2file(str_out,filename=manual_sort_csvfile,mode='a')\n\n raise Exception(\"There are %s rings pairs that need to be manually sorted. Please set 'look_for_user_sort_input=True' and run this svg again. Note: When you run again, there will be an interactive interface to help you sort, but it may be easier to manually enter the needed comparisons in\\n%s\"%(flag_count,manual_sort_csvfile))\n basic_output_on.dprint(\"Done with inner ring sorting (in %s). Finished with %s error flags.\"%(format_time(current_time()-start_time),flag_count))\n\n # Note: sort_lvl info records the number of other rings in the same \n # sort level, so in the future I can output psort_index values as 3.0, 3.1, etc\n ring_sorting = [cp.contents+[cp.outer_index] for cp in closed_pairs]\n ring_sorting = flattenList(ring_sorting)\n sort_lvl_info = []\n# for cp in closed_pairs:\n# for sort_lvl in cp.contents_psorting:\n# sort_lvl_info += [len(sort_lvl)]*len(sort_lvl)\n# sort_lvl_info += [1] # for outer ring in cp\n return ring_sorting, sort_lvl_info", "def ot2bieos_ts(ts_tag_sequence):\n n_tags = len(ts_tag_sequence)\n new_ts_sequence = []\n prev_pos = '$$$'\n for i in range(n_tags):\n cur_ts_tag = ts_tag_sequence[i]\n if cur_ts_tag == 'O':\n new_ts_sequence.append('O')\n cur_pos = 'O'\n else:\n cur_pos, cur_sentiment = cur_ts_tag.split('-')\n # cur_pos is T\n if cur_pos != prev_pos:\n # prev_pos is O and new_cur_pos can only be B or S\n if i == n_tags - 1:\n new_ts_sequence.append('S-%s' % cur_sentiment)\n else:\n next_ts_tag = ts_tag_sequence[i + 1]\n if next_ts_tag == 'O':\n new_ts_sequence.append('S-%s' % cur_sentiment)\n else:\n new_ts_sequence.append('B-%s' % cur_sentiment)\n else:\n # prev_pos is T and new_cur_pos can only be I or E\n if i == n_tags - 1:\n new_ts_sequence.append('E-%s' % cur_sentiment)\n else:\n next_ts_tag = ts_tag_sequence[i + 1]\n if next_ts_tag == 'O':\n new_ts_sequence.append('E-%s' % cur_sentiment)\n else:\n new_ts_sequence.append('I-%s' % cur_sentiment)\n prev_pos = cur_pos\n return new_ts_sequence", "def de_zigzag(zigzag_blocks):\n blocks = []\n zigzag_arr = []\n last_DC = 0\n for zb in zigzag_blocks:\n for s, v in zb:\n if s == _ZRL:\n zigzag_arr.extend([0] * 15)\n elif s == _EOB:\n zigzag_arr.extend([0] * (64 - len(zigzag_arr)))\n blocks.append(zigzag(zigzag_arr))\n zigzag_arr.clear()\n elif s == _DC:\n last_DC += v\n zigzag_arr.append(last_DC)\n else: # AC\n zigzag_arr.extend([0] * s)\n zigzag_arr.append(v)\n return blocks", "def bubble_sort(items):\n for i in range(len(items)):\n for j in range(len(items) - 1 - i):\n if items[j] > items[j + 1]:\n items[j], items[j + 1] = items[j + 1], items[j]\n return items", "def bubble_sort(items):\n # TODO: Repeat until all items are in sorted order\n # TODO: Swap adjacent items that are out of order\n for x in range(len(items)-1):\n if items[x]>items[x+1]:\n temp = items[x]\n items[x] = items[x+1]\n items[x+1] = temp", "def sort_and_aggregate_periods(budget_model):\n # Calculate total number of months.\n periods = budget_model[\"periods\"] \n total_number_of_months = len(periods) \n budget_model[\"total_number_of_months\"] = total_number_of_months\n\n # Get the reference to the total revenue in the budget model.\n total_revenue = budget_model[\"total_revenue\"]\n\n # Initialize variables used to calculate greatest increase in revenue.\n greatest_increase_revenue = 0\n greatest_increase_name = \"\"\n\n # Initialize variables used to calculate greatest decrease in revenue.\n greatest_decrease_revenue = 0\n greatest_decrease_name = \"\"\n\n # Retrieve sort keys for budget model and sort them into chronological order.\n period_keys = periods.keys()\n period_key_list = list(period_keys)\n period_key_list.sort()\n\n # Initialize previous revenue.\n # There is no revenue change for the first period.\n previous_revenue = periods[period_key_list[0]][\"revenue\"]\n total_revenue_change = 0\n\n # Calculate aggregations by processing periods in chronological order.\n for period_key in period_key_list:\n period = periods[period_key]\n total_revenue = total_revenue + period[\"revenue\"]\n\n budget_model[\"total_revenue\"] = total_revenue\n\n revenue = period[\"revenue\"]\n revenue_change = revenue - previous_revenue\n total_revenue_change = total_revenue_change + revenue_change\n \n if revenue_change > greatest_increase_revenue:\n greatest_increase_revenue = revenue_change\n greatest_increase_name = period[\"name\"]\n\n if revenue_change < greatest_decrease_revenue:\n greatest_decrease_revenue = revenue_change\n greatest_decrease_name = period[\"name\"]\n\n previous_revenue = revenue\n\n # Write aggregations to the budget model.\n budget_model[\"greatest_increase\"] = {\"name\": greatest_increase_name, \"revenue\": greatest_increase_revenue}\n budget_model[\"greatest_decrease\"] = {\"name\": greatest_decrease_name, \"revenue\": greatest_decrease_revenue}\n budget_model[\"average_revenue_change\"] = round(total_revenue_change / total_number_of_months, 0)", "def reverse_elements(seq):\n\n new_seq = []\n\n i = -1\n\n while i >= -len(seq):\n new_seq.append(seq[i])\n i -= 1\n\n return format_seq(seq, new_seq)", "def gnome_sort(items):\n i = 0\n n = len(items)\n while i < n:\n if i and items[i] < items[i-1]:\n items[i], items[i-1] = items[i-1], items[i]\n i -= 1\n else:\n i += 1\n return items", "def _sort(self):\n self.population.sort()\n self.population.reverse()", "def sort_prices(list_of_tuples):\n list_of_tuples.sort(key = get_price, reverse = True)\n return list_of_tuples", "def _descending_values(d):\n d_tuples = list([(ky, float(val)) for ky, val in d.items()])\n d_tuples.sort(key=lambda t: t[1], reverse=True)\n return OrderedDict(d_tuples)", "def bieos2ot(tag_sequence):\n new_sequence = []\n for t in tag_sequence:\n assert t == 'B' or t == 'I' or t == 'E' or t == 'O' or t == 'S'\n if t == 'O':\n new_sequence.append(t)\n else:\n new_sequence.append('T')\n assert len(new_sequence) == len(tag_sequence)\n return new_sequence", "def solve_buses(prepared_buses):\n T, c = functools.reduce(combine_signals, prepared_buses)\n return T - c", "def bank_sorter(bank):\n keys = np.argsort(bank[:,-1])\n for j in range(len(bank)):\n # sort the paramterized vectors\n sorted_bank[j,:] = bank[keys[j],:]\n\n # call decoder and get symbol\n symbol_bank.append(decode(sorted_bank[j,0:3]))\n final_bank = [name for name in symbol_bank if (name in results_bank)]\n symbol_bank_select = symbol_bank[0:50]\n return \"all tested = \" + str(symbol_bank_select), \"matches = \" + str(final_bank)", "def toposort(data):\n\n\n # Ignore self dependencies.\n for k, v in data.items():\n v.discard(k)\n # Find all items that don't depend on anything.\n extra_items_in_deps = \\\n reduce(set.union, data.itervalues()) - set(data.iterkeys())\n # Add empty dependences where needed\n data.update({item:set() for item in extra_items_in_deps})\n while True:\n ordered = set(item for item, dep in data.iteritems() if not dep)\n if not ordered:\n break\n yield ordered\n data = {item: (dep - ordered)\n for item, dep in data.iteritems()\n if item not in ordered}\n assert not data, \\\n \"Cyclic dependencies exist among these items:\\n{}\".format(\n '\\n'.join(repr(x) for x in data.iteritems()))", "def comp():\n from bbst import Bst\n return Bst([10, 6, 4, 8, 7, 9, 13, 11, 14, 12, 15])", "def bubble_sort(arr:Sequence[List]) -> Sequence[List]:\n n = len(arr)\n for i in range(n-1):\n for j in range(n-i-1):\n if arr[j] > arr[j+1]:\n arr[j], arr[j+1] = arr[j+1], arr[j]\n return arr", "def test_reverse_sort_lines(self):\n before_b = \"\"\"\\\n a\n d\n e\n z\n x\n \"\"\"\n after_b = \"\"\"\\\n z\n x\n e\n d\n a\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"1.0\", \"5.1\"),\n after_sel=(\"1.0\", \"5.1\"),\n command_name=\"reverse-sort-lines\",\n )", "def sort_012(input_list):\n # Initial two pointers to beginning and end\n # Start another pointer to traverse\n # if list[curr] == 0, then beg++, travel++\n # if list[curr] == 2, then swap with end, and end--, curr++\n # if list[curr] == 1, curr++\n \n # if list[end] == 2, then end-- (to optimize swaps)\n \n beg = curr = 0\n end = len(input_list) - 1\n while curr <= end:\n if input_list[curr] == 0:\n input_list[curr], input_list[beg] = \\\n input_list[beg], input_list[curr]\n beg += 1\n curr += 1\n elif input_list[curr] == 2:\n input_list[curr], input_list[end] = \\\n input_list[end], input_list[curr]\n end -= 1\n elif input_list[curr] == 1:\n curr += 1\n \n return input_list", "def sort_2(l):\n l.reverse()", "def toposort(prereqs_d):\r\n\r\n# all1 = set(prereqs_d.keys())\r\n# all2 = set()\r\n# for x, y in prereqs_d.items():\r\n# all2.update(y)\r\n# print all1.difference(all2)\r\n\r\n seq = []\r\n done = set()\r\n postreqs_d = {}\r\n for x, prereqs in prereqs_d.items():\r\n for prereq in prereqs:\r\n postreqs_d.setdefault(prereq, set()).add(x)\r\n next = set([k for k in prereqs_d if not prereqs_d[k]])\r\n while next:\r\n bases = next\r\n next = set()\r\n for x in bases:\r\n done.add(x)\r\n seq.append(x)\r\n for x in bases:\r\n for postreq in postreqs_d.get(x, []):\r\n if not prereqs_d[postreq].difference(done):\r\n next.add(postreq)\r\n if len(prereqs_d) != len(seq):\r\n raise Exception(\"Cannot sort topologically: there might be cycles, \"\r\n \"prereqs_d does not have a key for each element or \"\r\n \"some orderings contain invalid elements.\")\r\n return seq", "def rearrange_digits(input_list):\n x = 0\n y = 0\n \n sorted_item = merge_sort(input_list)[::-1]\n \n for i in range(0, len(sorted_item), 2):\n x = x * 10 + sorted_item[i]\n \n for j in range(1, len(sorted_item), 2):\n y = y * 10 + sorted_item[j]\n \n return [x, y]", "def get_bus_from_comp_formula(cf: str):\r\n bus_found = []\r\n if \"(\" in cf or \")\" in cf:\r\n possible_bus = re.findall(r'\\((.*?)\\)', cf)\r\n for pbu in possible_bus:\r\n if \"(\" in pbu or \")\" in pbu:\r\n # print(pbu) # always good to check\r\n continue\r\n pbuc = Composition(pbu)\r\n bu_found = find_bu_by_comp(pbuc)\r\n if bu_found is None:\r\n pbuc = get_no_hydrogen_composition(pbuc)\r\n bu_found = find_bu_by_comp(pbuc)\r\n if bu_found is None:\r\n continue\r\n bus_found.append(bu_found)\r\n else:\r\n pbuc = Composition(cf)\r\n bu_found = find_bu_by_comp(pbuc)\r\n if bu_found is None:\r\n pbuc = get_no_hydrogen_composition(pbuc)\r\n bu_found = find_bu_by_comp(pbuc)\r\n if bu_found is None:\r\n return []\r\n bus_found.append(bu_found)\r\n return sorted(set([b.buid for b in bus_found]))", "def bubble_sort(items):\n for num in range(len(items)-1,0,-1):\n for j in range(num):\n if items[j]>items[j+1]:\n temp = items[j]\n items[j] = items[j+1]\n items[j+1] = temp\n return items", "def cocktailsort(arr):\n left, right = 0, len(arr) - 1\n while left < right:\n for i in range(left, right):\n if arr[i] > arr[i + 1]:\n swap(arr, i, i + 1)\n right -= 1\n for i in range(right, left, -1):\n if arr[i] < arr[i - 1]:\n swap(arr, i, i - 1)\n left += 1", "def PreprocessBWT(bwt):\r\n asc_bwt = list(bwt)\r\n asc_bwt.sort()\r\n\r\n starts = dict()\r\n for idx, char in enumerate(asc_bwt):\r\n if char in starts:\r\n continue\r\n starts[char] = idx\r\n\r\n occ_count_before = defaultdict(dict)\r\n for idx in range(1, len(bwt)+1):\r\n occ_count_before[idx] = copy.copy(occ_count_before[idx - 1])\r\n char = bwt[idx - 1]\r\n occ_count_before[idx].setdefault(char, 0)\r\n occ_count_before[idx][char] += 1\r\n\r\n return starts, occ_count_before", "def get_beers_ordered_by_ibu():\n response = requests.get(\n \"https://sandbox-api.brewerydb.com/v2/\"\n + \"beers/?order=ibu&sort=DESC&withBreweries=Y\"\n + \"&key=cb1ce0c7f124fd5dd98f2a57d19120c4\",\n )\n full_beer_data = response.json()[\"data\"]\n reduced_beer_data = []\n for beer in full_beer_data:\n new_beer = {}\n new_beer[\"name\"] = beer[\"nameDisplay\"]\n new_beer[\"brewery\"] = beer[\"breweries\"][0][\"nameShortDisplay\"]\n new_beer[\"ibu\"] = beer[\"ibu\"]\n reduced_beer_data.append(new_beer)\n return reduced_beer_data", "def bubble_sort(list):\n for i in range(1, len(list) - 1):\n for j in range(len(list) - 1, i-1, -1):\n if list[j - 1] > list[j]:\n x = list[j]\n list[j] = list[j - 1]\n list[j - 1] = x\n return list", "def sort_012(input_list):\n i = 0\n next_0 = 0\n next_2 = len(input_list) - 1\n\n while i <= next_2:\n if input_list[i] == 0:\n input_list[i] = input_list[next_0]\n input_list[next_0] = 0\n next_0 += 1\n i += 1\n elif input_list[i] == 2:\n input_list[i] = input_list[next_2]\n input_list[next_2] = 2\n next_2 -= 1\n else:\n i += 1\n\n return input_list", "def bubble_sort(data_list_or_tuple):\n data_list = list(data_list_or_tuple)\n for count, _ in enumerate(data_list, 1):\n for x in range(len(data_list)-count):\n if data_list[x] > data_list[x+1]:\n data_list[x], data_list[x+1] = data_list[x+1], data_list[x]\n return data_list", "def order_chromosomal_contigs(chr_blast_output):\n ordered_chr_contigs = []\n current_contig = \"null\"\n current_contig_direction = 0\n current_contig_hits = 0\n\n with open(chr_blast_output) as blast_matches:\n for hit in blast_matches:\n hit_data = hit.rstrip(\"\\n\").split(\"\\t\")\n core_gene_dir = int(hit_data[0].split(\"|\")[1])\n if float(hit_data[2]) >= 90.0:\n new_contig = hit_data[1]\n new_contig_direction = core_gene_dir*np.sign(int(hit_data[9])-int(hit_data[8]))\n \n if new_contig == current_contig and new_contig_direction == current_contig_direction:\n current_contig_hits += 1\n else: \n contig_tuple = (current_contig, current_contig_direction, current_contig_hits)\n ordered_chr_contigs.append(contig_tuple)\n current_contig = new_contig\n current_contig_direction = new_contig_direction\n current_contig_hits = 1\n\n contig_tuple = (current_contig, current_contig_direction, current_contig_hits)\n ordered_chr_contigs.append(contig_tuple)\n ordered_chr_contigs.pop(0)\n\n #If hits to a contig are not contiguous, keep only the longest run \n chr_contig_dict = {} #stores the longest run for each contig\n remove_list = [] #stores the shorter runs for deletion\n n = -1\n for entry in ordered_chr_contigs:\n n += 1\n contig = entry[0]\n hits = entry[2]\n if contig not in chr_contig_dict:\n chr_contig_dict[contig] = (n, entry)\n elif hits > chr_contig_dict[contig][1][2]:\n remove_list.append(chr_contig_dict[contig])\n chr_contig_dict[contig] = (n, entry)\n else:\n remove_list.append((n, entry))\n\n #The first contig will usually also be the last - both should be kept \n for item in remove_list:\n \n if int(item[0]) == 0 or int(item[0]) == len(ordered_chr_contigs)-1:\n remove_list.remove(item)\n \n remove_list.sort(reverse = True)\n for item in remove_list:\n position = item[0]\n ordered_chr_contigs.pop(position)\n \n return ordered_chr_contigs", "def test_sort_inputs_0a6a357e(self):\n inputs = bip69.get_inputs_from_rpc_json(self.tx_json_0a6a357e)\n bip69_inputs = bip69.sort_inputs(inputs)\n self.assertEqual(bip69_inputs[0],\n (('0e53ec5dfb2cb8a71fec32dc9a634a35b7e24799295ddd52782'\n '17822e0b31f57'), 0))\n self.assertEqual(bip69_inputs[10],\n (('7d037ceb2ee0dc03e82f17be7935d238b35d1deabf953a892a4'\n '507bfbeeb3ba4'), 1))", "def dmc_order(self):\n return sorted(self.lookup_table, key=lambda clr: int(clr.id) if clr.id.isdigit() else 0)", "def test_bubblesort_sorts_list():\n from bubblesort import bubblesort\n unsorted_list = [6, 4, 7, 9, 0, 2]\n assert bubblesort(unsorted_list) == [0, 2, 4, 6, 7, 9]", "def normalized(self):\n revnums = self.sorted()\n revnums.reverse()\n ret = []\n while revnums:\n s = e = revnums.pop()\n while revnums and revnums[-1] in (e, e+1):\n e = revnums.pop()\n ret.append((s, e))\n return ret", "def sort_points_by_Y(list_of_points):\n sorted_y = sorted(list_of_points, key= lambda pt: pt.getY())\n sorted_y.reverse()\n return sorted_y", "def _toposort_with_ordered_mech_tuples(self, data):\n result = []\n for dependency_set in toposort(data):\n d_iter = iter(dependency_set)\n result.extend(sorted(dependency_set, key=lambda item : next(d_iter).mechanism.name))\n return result", "def insertBids(previous_bids, received_bids):\n\n new_bids = []\n\n while len(previous_bids) > 0 and len(received_bids) > 0:\n bid = None\n if Decimal(previous_bids[0][0]) > Decimal(received_bids[0][0]):\n bid = previous_bids.pop(0)\n elif Decimal(previous_bids[0][0]) < Decimal(received_bids[0][0]):\n bid = received_bids.pop(0)\n else:\n previous_bids.pop(0)\n bid = received_bids.pop(0)\n \n if Decimal(bid[1]) > Decimal(0):\n new_bids.append(bid)\n\n if len(previous_bids) > 0:\n new_bids.extend(previous_bids)\n elif len(received_bids) > 0:\n new_bids.extend(received_bids)\n\n return new_bids", "def sort_012(input_list):\r\n \r\n # Positions 0's to the start of the array and 2's to the end.\r\n # All remaining are 1's.\r\n\r\n next_0_index = 0\r\n next_2_index = len(input_list) - 1\r\n\r\n i = 0\r\n \r\n # Traverse the array once.\r\n # Last index to be checked is next_2_index because it is \r\n # one position before the last placed 2.\r\n while i < next_2_index + 1:\r\n\r\n element = input_list[i]\r\n\r\n if element == 0:\r\n\r\n # Position at next_0_index and put that element in this 0's place\r\n input_list[i], input_list[next_0_index] = input_list[next_0_index], input_list[i]\r\n\r\n # next_0_index is now taken by a 0, next index is to its right\r\n next_0_index += 1\r\n\r\n # Increment to check next element. All from i and before are 0's\r\n i += 1\r\n\r\n if element == 2:\r\n \r\n # Position at next_2_index and put that element in this 2's place\r\n input_list[i], input_list[next_2_index] = input_list[next_2_index], input_list[i]\r\n \r\n # next_2_index is now taken by a 2, next index is to its left\r\n next_2_index -= 1\r\n \r\n if element == 1:\r\n\r\n # Leave as is and proceed to next. Might be replaced by a 0 later\r\n i += 1\r\n\r\n return input_list", "def _process_data(data):\n for array in data:\n # Check if time is inverted. If so, reverse array while keeping the time/data structure.\n if array and len(array) > 2 and array[0] > array[2]:\n buff_1 = array[::2][::-1]\n buff_2 = array[1::2][::-1]\n array[::2] = buff_1\n array[1::2] = buff_2\n return data", "def bubble_sort(a_list):\n for item in reversed(range(len(a_list))):\n for i in range(item):\n if a_list[i] > a_list[i + 1]:\n a_list[i], a_list[i + 1] = a_list[i + 1], a_list[i]\n return a_list", "def ordered_accounts(filtered_accounts: List[Account]) -> List[Account]:\n return sorted(\n filtered_accounts, key=lambda x: (x.IntervalEnd, -x.PK), reverse=True\n )", "def comp2(numb):\n\tc1 = comp1(numb)\n\tc22 = []\n\tif int(numb) < 0:\n\t\tlsb = 0\n\t\twhile lsb == 0:\n\t\t\tlsb = int(not c1.pop())\n\t\t\tc22.append(lsb)\n\t\tc1.reverse()\n\t\tc22 = c22 + c1\n\t\tc22.reverse()\n\telse:\n\t\tc22 = c1\n\treturn c22" ]
[ "0.5366377", "0.5326399", "0.532053", "0.5228897", "0.5217887", "0.5176579", "0.5169902", "0.51473904", "0.513209", "0.5101623", "0.50995326", "0.5060153", "0.50267196", "0.5010616", "0.5007883", "0.4982695", "0.49244604", "0.49026328", "0.49025616", "0.4894752", "0.48676687", "0.48411286", "0.48391026", "0.48261857", "0.48255587", "0.47989455", "0.47885942", "0.475366", "0.47492263", "0.47466934", "0.47340056", "0.47191724", "0.47162145", "0.47073627", "0.47073627", "0.47039387", "0.46990138", "0.46718588", "0.46662822", "0.46617368", "0.46596542", "0.46465814", "0.4644603", "0.4640533", "0.46302775", "0.462917", "0.46258542", "0.46178344", "0.46164185", "0.4613784", "0.46066323", "0.4599602", "0.4593657", "0.45818081", "0.45770276", "0.4573783", "0.45736456", "0.45718858", "0.45706564", "0.45605463", "0.45592758", "0.45568943", "0.4553857", "0.45464927", "0.45412764", "0.45337424", "0.4528645", "0.4526961", "0.4526685", "0.4524056", "0.4524043", "0.4512408", "0.45067662", "0.4505925", "0.44896832", "0.44886416", "0.44876164", "0.44860828", "0.44812065", "0.44804293", "0.44777414", "0.44719562", "0.44707322", "0.44700804", "0.44682825", "0.44676265", "0.4466636", "0.44657683", "0.44653752", "0.44646794", "0.44639018", "0.44600627", "0.44599417", "0.44533595", "0.4452538", "0.44482872", "0.44474754", "0.44458863", "0.44441682", "0.44420627" ]
0.6246531
0
period of combined signal is lcm of the periods of its components >>> lcm(3, 9) 9 >>> lcm(4, 9) 36
def lcm(x, y): return x*y//gcd(x,y)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lcm(a, b):\n\treturn a * b // gcm(a, b)", "def lcm(a, b):\r\n return a * b / fr.gcd(a, b)", "def lcm(a, b):\r\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n return a * b // gcd(a, b)", "def lcm(a, b):\n\n\treturn (a * b)/gcd(a, b)", "def lcm(a, b):\n return (a * b) // gcd(a, b)", "def lcm(a, b):\n return (a * b) // gcd(a, b)", "def lcm(self, a, b):\n raise NotImplementedError", "def lcm(a, b):\n return a * b / gcd(a, b)", "def lcm(a, b):\n return a * b / gcd(a, b)", "def lcm(a, b):\n return a * b / gcd(a, b)", "def lcm(num1, num2):\n return num1 * num2 // fractions.gcd(num1,num2)", "def lcm(*args):\r\n\treturn functools.reduce(lambda x, y: x * y / gcd(x, y), args)", "def lcm(numbers):\r\n if len(numbers) == 2: \r\n num0 = numbers[0] \r\n num1 = numbers[1] \r\n return num0 * num1 / gcd(num0, num1) \r\n else: \r\n for i in range(len(numbers)): \r\n return lcm([numbers[0], lcm(numbers[1:])])", "def lcm(a, b):\n if not (a or b):\n return 0\n else:\n a = abs(a)\n b = abs(b)\n return a*b/gcd(a,b)", "def lcm(a, b):\n return abs(a*b) / gcd(a, b) if a and b else 0", "def lcm(n1, n2):\n \n if n1 > 0 and n2 > 0:\n gcd = gcd_ea(n1,n2)\n lcm = (n1 * n2) / gcd\n\n return lcm", "def lcm(*numbers): \n def lcm(a, b):\n return (a * b) // gcd(a, b)\n return reduce(lcm, numbers, 1)", "def lcm(numbers):\r\n if len(numbers) == 2:\r\n num0 = numbers[0]\r\n num1 = numbers[1]\r\n return num0 * num1 / gcd(num0, num1)\r\n else:\r\n for i in range(len(numbers)):\r\n return lcm([numbers[0], lcm(numbers[1:])])", "def lcm(a: int, b: int) -> int:\n return (a * b) // gcd(a, b)", "def lcm(a,b):\n if a==0:\n return b\n if b==0:\n return a\n c=gcd(a,b)\n lcm=a*b/c\n return lcm", "def lcm(a: int, b: int) -> int:\n return a * b // gcd(a, b)", "def lcm(a: int, b: int) -> int:\n return a * b // gcd(a, b)", "def lcm(multiple_list):\n res = 1\n for n in multiple_list:\n if res % n != 0:\n res *= n//fractions.gcd(res,n)\n return res", "def lcm(L):\n lcm, M = 1, []\n for i in L:\n for j in M:\n if i % j == 0:\n i //= j\n while i > 1:\n lcm *= low_prime(i)\n M.append(low_prime(i))\n i //= low_prime(i)\n return lcm", "def lcm_for_two(a, b):\n\t\n\treturn a // gcd_for_two(a, b) * b", "def lcm(a, b):\n\n if a == b == 0:\n return 0\n\n return (a * b) // gcd(a, b)", "def lcm(*numbers):\n def lcm(a, b):\n return (a * b) // gcd(a, b)\n return reduce(lcm, numbers, 1)", "def lcm(a: int, b: int):\n return (a * b) // euclid(a, b)", "def lcm(\n numbers: List[int]\n) -> int:\n current_product = 1\n current_gcd = 1\n for num in numbers:\n current_gcd = gcd(current_gcd, num)\n current_product *= num\n return current_product // current_gcd", "def lcm(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_lcm(F, G, lev, dom))", "def lcm(*numbers):\n def lcm(a, b):\n return (a * b) // gcd(a, b)\n\n return reduce(lcm, numbers, 1)", "def lcm(self, a, b):\n return a*b", "def lcms(argg: range) -> int:\n l = 1\n for arg in argg:\n l = lcm(l, arg)\n return l", "def calculate_lcm(a, b):\n return a * b / calculate_gcd(a, b)", "def four_num_lcm(*numbers):\n\n return reduce(calculate_lcm,numbers)", "def lcm2(a, b):\n return a * b / gcd(a, b)", "def lcm(x,y):\n #Initialize counter & condition\n counter = 1\n condition = False\n #While loop iterates until LCM condition is satisfied\n while condition == False :\n counter = counter + 1\n condition = (counter % x == 0) and (counter % y == 0)\n return counter", "def lcm(a, b):\n if not isinstance(a, int):\n a = int(a)\n if not isinstance(b, int):\n b = int(b)\n return abs(a*b) / gcd(a, b)", "def lcm(x, y):\n\n # choose the greater number\n if x > y:\n greater = x\n else:\n greater = y\n\n while True:\n if (greater % x == 0) and (greater % y == 0):\n lcm = greater\n break\n greater += 1\n\n return lcm", "def lcm(x, y):\r\n\r\n # choose the greater number\r\n if x > y:\r\n greater = x\r\n else:\r\n greater = y\r\n\r\n while(True):\r\n if((greater % x == 0) and (greater % y == 0)):\r\n lcm = greater\r\n break\r\n\r\n greater += 1\r\n\r\n return lcm", "def lcm(x, y):\n\n # choose the greater number\n if x > y:\n greater = x\n else:\n greater = y\n\n while (True):\n if ((greater % x == 0) and (greater % y == 0)):\n lcm = greater\n break\n greater += 1\n\n return lcm", "def lcm(*nums):\n\t\n\treturn reduce(lcm_for_two, nums)", "def lcm(x, y):\n lcm = (x*y)//gcd(x,y)\n return(lcm)", "def lcm(x, y):\n\n # choose the greater number\n if x > y:\n greater = x\n else:\n greater = y\n\n while(True):\n if((greater % x == 0) and (greater % y == 0)):\n lcm = greater\n break\n greater += 1\n\n return lcm", "def lcm(self, other) -> \"GCD\":\n common_factors: Dict[Basic, int] = {}\n size = 0\n other_factors = deepcopy(other.factors)\n\n for expr1, power1 in self.factors.items():\n # TODO: Add special method for lcm of integers\n if expr1 in other.factors:\n power2 = other.factors[expr1]\n\n size += max(power1, power2)\n common_factors[expr1] = max(power1, power2)\n del other_factors[expr1]\n else:\n size += power1\n common_factors[expr1] = power1\n\n for expr2, power2 in other_factors.items():\n size += other.factors[expr2]\n common_factors[expr2] = power2\n\n return GCD(size, 1, common_factors)", "def calculate_lcm(num1, num2):\n\n lcm = (num1 * num2) // gcd(num1, num2)\n return lcm", "def lcm3(a, b, c):\n return lcm(lcm(a, b), c)", "def tst_functn():\n a=15\n b=20\n Least_common_factor=lcm(a,b)\n print(\"LCM is: \")\n print(Least_common_factor)\n\n a=0\n b=51\n Least_common_factor=lcm(a,b)\n print(\"LCM is: \")\n print(Least_common_factor)\n\n a=12\n b=0\n Least_common_factor=lcm(a,b)\n print(\"LCM is: \")\n print(Least_common_factor)", "def find_lcm(num_1, num_2):\n max_num = num_1 if num_1 > num_2 else num_2\n lcm = max_num\n while True:\n if ((lcm % num_1 == 0) and (lcm % num_2 == 0)):\n break\n lcm += max_num\n return lcm", "def lcm(*values):\n\tvalues = set([abs(int(v)) for v in values])\n\tif values and 0 not in values:\n\t\tn = n0 = max(values)\n\t\tvalues.remove(n)\n\t\twhile any( n % m for m in values ):\n\t\t\tn += n0\n\t\treturn n\n\treturn 0", "def lcm(num1, num2):\n\n if num1 > num2:\n bigger = num1\n else:\n bigger = num2\n while True:\n if bigger % num1 == 0 and bigger % num2 == 0:\n return bigger\n bigger += 1", "def main(n=20):\n return functools.reduce(lcm, range(1, 20))", "def _lcm_f(a, b):\n return int((a * b) / _gcd_f(a, b))", "def lcmm(*args): \r\n return reduce(lcm, args)", "def lcmm(*args): \n return reduce(lcm, args)", "def lcmm(*args): \n return reduce(lcm, args)", "def lcmm(*args): \n return reduce(lcm, args)", "def lcmm(listlcm):\n return reduce(lcm, listlcm)", "def lcmm(*args): \n return functools.reduce(lcm, args) # maybe remove need for reduce() later, since discouraged in python3.", "def main():\n num_1 = 12\n num_2 = 76\n print(find_lcm(num_1, num_2))", "def lcmm(*args):\n return reduce(lcm, args)", "def multiple(a, b):\n from fractions import gcd\n def lcm(x,y):\n \treturn (x*y)//gcd(x,y)\n #return lcm(a,b)\n \n def gcd(x,y):\n if y > x:\n x, y = y, x\n while y != 0:\n x, y = y, x % y\n return x\n return (a*b) // gcd(a,b)", "def lcmm(*args):\n\treturn reduce(lcm, args)", "def lcmList(numList):\n return reduce(lcm, numList)", "def lcm(x: int, y: int) -> int:\n assert isinstance(x, int) and isinstance(y, int) and x > 0 and y > 0\n return int(x * y / gcd(x, y))", "def combine_signals(longer,shorter):\n T_0, c_0 = longer\n T_1, c_1 = shorter\n\n # Period is the lcm of the provided periods\n T_result = lcm(T_0,T_1)\n \n # Determine phase by searching soutions of longer that fall between the\n # start position and start + T_result\n for i in range(T_0-c_0,T_result+c_0,T_0):\n v0 = (i + c_0) % T_0\n v1 = (i + c_1) % T_1\n if not( v0 or v1):\n return (T_result,T_result-i)", "def lcm(*nums):\n factors_list = [factors(i) for i in nums]\n all_factors = set(chain(*factors_list))\n factor_counts = [{n: fl.count(n) for n in set(fl)} for fl in factors_list]\n result = 1\n for factor in all_factors:\n max_occurences = max([fc.get(factor, 0) for fc in factor_counts])\n result *= factor ** max_occurences\n return result", "def ppcm_denominateurs(self):\n\t\tl = []\n\t\tn = 1\n\t\tif self.__valide:\n\t\t\tfor m in self.liste_decroissante():\n\t\t\t\t\"\"\" les denominateurs sont positifs \"\"\"\n\t\t\t\te = m.get_coefficient().get_denom().valeur()\n\t\t\t\tif not (e in l):\n\t\t\t\t\tl.append(e)\n\t\t\t\tn *= e\n\t\treturn n / pgcd_liste(l)", "def lcm(self, other):\n if not isinstance(other, Factorization):\n raise NotImplementedError(\"can't take lcm of factorization and non-factorization\")\n\n if len(self) and len(other):\n try:\n # first get the two factorizations to have the same\n # universe\n U = Sequence([self[0][0], other[0][0]]).universe()\n self = self.base_change(U)\n other = other.base_change(U)\n except TypeError:\n raise TypeError(\"Cannot take the lcm of %s and %s because they cannot be coerced into a common universe\"%(self,other))\n\n if self.is_commutative() and other.is_commutative():\n d1 = dict(self)\n d2 = dict(other)\n s = {}\n for a in set(d1).union(set(d2)):\n s[a] = max(d1.get(a,0),d2.get(a,0))\n return Factorization(list(s.iteritems()))\n else:\n raise NotImplementedError(\"lcm is not implemented for non-commutative factorizations\")", "def part2(lines, full):\n\n # 836024966345345\n buses = {\n offset: int(v) for offset, v in enumerate(lines[1].split(\",\")) if v.isnumeric()\n } # key is offset, value is bus ID\n\n lcm = {}\n vals = list(buses.items())\n # Compute LCM (Least Common Multiple's) for each bus\n # E.g. for buses 7, 13, 59\n # For buses 7 and 13 the LCM is 7 * 13 = 91\n # For buses 7, 13 and 59 the LCM is 7 * 13 * 59 = 5369\n lcm[0] = vals[0]\n # ugly code incoming.\n for idx, item in enumerate(vals[1:-1], 1):\n offset, busid = item\n # Store the offset for the next bus\n lcm[idx] = vals[idx + 1][0], busid * lcm[idx - 1][1]\n\n # Start by finding t at which the first 2 buses meet (this is before their LCM)\n t = buses_intersect_at(vals[0][1], vals[1][1], vals[1][0] - vals[0][0])\n\n # Now for each bus..\n for idx, lcmtup in list(lcm.items())[1:]:\n offset, lcm_for_buses = lcmtup\n\n # .. keep incrementing time with the LCM (of this and the past buses) until they align\n busid = buses[offset]\n while ((t + offset) % busid) != 0:\n t += lcm_for_buses\n return t", "def period(self):\n from sage.arith.all import gcd\n\n g = 0\n\n for component in self.strongly_connected_components():\n levels = dict((s, None) for s in component)\n vertices_in_scc = levels # considers level as a set\n s = component[0]\n levels[s] = 0\n this_level = [s]\n l = 1\n while this_level:\n next_level = []\n for u in this_level:\n # we have levels[u] == l-1\n for v in self.neighbor_out_iterator(u):\n # ignore edges leaving the component\n if v not in vertices_in_scc:\n continue\n level_v = levels[v]\n if level_v is not None: # Non-Tree Edge\n g = gcd(g, l - level_v)\n if g == 1:\n return 1\n else: # Tree Edge\n next_level.append(v)\n levels[v] = l\n this_level = next_level\n l += 1\n\n return g", "def least_common_multiple_func(self, other_denominator) -> int:\n least_common_mult = self.denominator_b\n while (least_common_mult % self.denominator_b + least_common_mult % other_denominator) != 0:\n least_common_mult += 1\n return least_common_mult", "def period(self) -> int:", "def lcmu(a, b):\n return (abs(a)*abs(b))//gcdi(a, b)", "def lowest_common_multiple(a, b):\n # 两个数字相乘后除以最大公约数 = 两个数字的最小公倍数\n return a * b // gcd(a, b)", "def order(self):\n return reduce(lcm,[1]+[len(cycle) for cycle in self.cyclic_form])", "def get_clock_divisor(self):\n return self.o.read_register(self.dev_id, CLOCK_DIVISOR)", "def mod(dividends, divisor):\n\n output = np.zeros(len(dividends))\n\n for i in tqdm(range(len(dividends))): \n output[i] = dividends[i]\n done=False\n while (not done):\n if output[i] >= divisor:\n output[i] -= divisor\n elif output[i] < 0.:\n output[i] += divisor\n else:\n done=True\n\n return output", "def least_common_multiple(number1, number2):\n return number1 * number2 // math.gcd(number1, number2)", "def getPeriod(self,coefficients):\n return max([prod(coefficients[i]) for i in range(4)])", "def kl_period(triple):\n # fmt: off\n L1toC2 = (\n 16 * triple.a2 * (1 - triple.e2**2)**(3 / 2.0) / triple.m3 *\n (triple.a2 / triple.a1)**2 * sqrt(triple.m1 * triple.a1) / (2 * np.pi)\n )\n # fmt: on\n\n return L1toC2 * kl_period_norm(triple.Hhatquad, triple.Th) / 15", "def greatest_common_divisor(a: int, b: int) -> int:\n#[SOLUTION]\n while b:\n a, b = b, a % b\n return a", "def gcd(a,b):\r\n while b:\r\n a, b = b, a % b\r\n return a", "def xgcd(self, a):\n s, old_s = [0], [1]\n t, old_t = [1], [0]\n r, old_r = self.coef, a\n\n while sum(r) != 0:\n quotient = self.poly_round_div(old_r, r)\n old_r, r = r, [sum(x) % self.mod for x in\n itertools.zip_longest(old_r, [x * (-1) for x in self._mul_coef(quotient, r)], fillvalue=0)]\n old_s, s = s, [sum(x) % self.mod for x in\n itertools.zip_longest(old_s, [x * (-1) for x in self._mul_coef(quotient, s)], fillvalue=0)]\n old_t, t = t, [sum(x) % self.mod for x in\n itertools.zip_longest(old_t, [x * (-1) for x in self._mul_coef(quotient, t)], fillvalue=0)]\n while len(r) and r[-1] == 0:\n r.pop()\n return old_r, old_s, old_t\n # # old_r[0]이 1이 아닌경우는 old_r[0]으로 나눠야 한다.\n # old_s_inv = mul_inverse_mod(old_r[0], self.mod)\n # # result = [ x % 3 for x in self.poly_mul2(old_s, [old_s_inv])]\n # result = [x % self.mod for x in self._mul_coef(old_s, [old_s_inv])]\n # return result + ([0] * (len(self.irr_coef) - len(result)))", "def gcd(a, b):\r\n while b: \r\n a, b = b, a % b\r\n return a", "def gcm(a, b):\n\twhile b:\n\t\ta, b = b, a % b\n\treturn a", "def _get_m(self, ks: List[int]) -> int:\n\n base = 1\n for c in ks:\n base = base * c // gcd(base, c)\n return base", "def divider_ref(dividend, divisor):\n rom_size = 2**8\n rom = [0 for _ in range(rom_size)]\n rom = [0] + [int(round(((2**16)-1)/float(ii)))\n for ii in range(1, rom_size)]\n rom = tuple(rom)\n divisor_reciprocal = rom[divisor]\n if dividend < 0:\n dividend_d1 = -dividend\n else:\n dividend_d1 = dividend\n mult = (dividend_d1 * divisor_reciprocal)\n mult_s = mult/(2**16)\n if dividend < 0:\n mult_s = -mult_s\n round_ = int((mult/(2**15)) % 2)\n if round_ == 1:\n if dividend >= 0:\n mult_s = mult_s + 1\n else:\n mult_s = int(mult_s - 1)\n return int(mult_s)", "def mcd(a, b):\n while(b != 0):\n a,b = b,a%b\n return a", "def gcd(integer_m, integer_n):\n while integer_m%integer_n != 0:\n oldm = integer_m\n oldn = integer_n\n\n integer_m = oldn\n integer_n = oldm%oldn\n return integer_n", "def gcd(a, b):\n while b:\n a, b = b, a % b\n return a" ]
[ "0.71418875", "0.69032246", "0.6865901", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.6818754", "0.67949474", "0.67771405", "0.67771405", "0.6775764", "0.67721605", "0.67721605", "0.67721605", "0.665062", "0.66015226", "0.659187", "0.6576261", "0.6552049", "0.6542266", "0.6528849", "0.6527448", "0.65259486", "0.65132046", "0.64966893", "0.64966893", "0.64869934", "0.6484255", "0.64636976", "0.6460933", "0.64479756", "0.6446136", "0.6445394", "0.64318186", "0.6427428", "0.64238966", "0.6412323", "0.63886046", "0.63622475", "0.63493246", "0.6323778", "0.6310872", "0.6306663", "0.6298664", "0.62838125", "0.627514", "0.6274833", "0.6266796", "0.62641335", "0.62051094", "0.61770594", "0.6161821", "0.6144745", "0.6118971", "0.6118142", "0.60619634", "0.6006923", "0.6004392", "0.5998988", "0.5998988", "0.5998988", "0.59923714", "0.5957029", "0.59271085", "0.58953714", "0.58864707", "0.58606094", "0.5853981", "0.57501835", "0.569879", "0.5624477", "0.5622464", "0.54723763", "0.5421924", "0.54181117", "0.535142", "0.534732", "0.52369595", "0.52044374", "0.5175772", "0.51712483", "0.5162084", "0.51576066", "0.5146476", "0.5114604", "0.51039094", "0.5091304", "0.5083102", "0.5047475", "0.50136995", "0.50105935", "0.50059026", "0.50051063", "0.50036824", "0.49942973" ]
0.6627561
20
>>> combine_signals((5,2), (3,1)) (15, 7) >>> combine_signals((3,1), (2,0)) (6, 4) >>> combine_signals((13,1),(12,0)) (156, 144)
def combine_signals(longer,shorter): T_0, c_0 = longer T_1, c_1 = shorter # Period is the lcm of the provided periods T_result = lcm(T_0,T_1) # Determine phase by searching soutions of longer that fall between the # start position and start + T_result for i in range(T_0-c_0,T_result+c_0,T_0): v0 = (i + c_0) % T_0 v1 = (i + c_1) % T_1 if not( v0 or v1): return (T_result,T_result-i)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def combine_signals(series1: pd.Series, series2: pd.Series) -> pd.Series:\n return ((np.sign(series1) == np.sign(series2)) * series1).astype(int, copy=False)", "def solve_buses(prepared_buses):\n T, c = functools.reduce(combine_signals, prepared_buses)\n return T - c", "def pick_signals(processor, source = 'input'):\n\n if source == 'input':\n bin_edges = processor.input_parameters['bin_edges']\n raw_signal = processor.input_signal\n elif source == 'output':\n bin_edges = processor.output_parameters['bin_edges']\n raw_signal = processor.output_signal\n else:\n raise ValueError('Unknown value for the data source')\n t = np.zeros(len(raw_signal)*4)\n bins = np.zeros(len(raw_signal)*4)\n signal = np.zeros(len(raw_signal)*4)\n value = 1.\n\n for i, edges in enumerate(bin_edges):\n t[4*i] = edges[0]\n t[4*i+1] = edges[0]\n t[4*i+2] = edges[1]\n t[4*i+3] = edges[1]\n bins[4*i] = 0.\n bins[4*i+1] = value\n bins[4*i+2] = value\n bins[4*i+3] = 0.\n signal[4*i] = 0.\n signal[4*i+1] = raw_signal[i]\n signal[4*i+2] = raw_signal[i]\n signal[4*i+3] = 0.\n value *= -1\n\n z = t * c\n return (t, z, bins, signal)", "def caculate_signals(self):\n\t\traise NotImplementedError(\"Should implement calculate_signals()\")", "def test_merge_signals_with_duplicate_attributes(self):\n blk = MergeStreams()\n signal_1 = Signal({\"A\": 1})\n signal_2 = Signal({\"A\": 2})\n merged_signal = blk._merge_signals(signal_1, signal_2)\n self.assertDictEqual(merged_signal.to_dict(), signal_2.to_dict())", "def _merge_and_reduce(self, signals):\n\n if self.s_filter:\n\n signals = clean(signals,\n standardize=self.standardize,\n low_pass=self.low_pass,\n high_pass=self.high_pass,\n t_r=self.tr)\n \n return signals", "def tuple_merge(tuples):\n\n\t# Add your code here\n\treturn", "def calculate_signals(self):\n raise NotImplementedError(\"Should implement calculate_signals()\")", "def add_signals(self, signals):\n\n self.signals = {**self.signals, **signals} # merge the two", "def extend_signals(signals, length=None, samplerate=None):\n if length is None:\n return signals\n if samplerate is not None:\n length = round(samplerate * length)\n\n def extend(signal):\n padding = length - signal.shape[-1]\n if padding < 1:\n return signal.copy()\n padding = np.zeros(signal.shape[:-1] + (padding,))\n padded = np.concatenate([signal, padding], axis=-1)\n return padded\n\n return _apply_to_signals(extend, signals)", "def mix_signals(n_samples, M, version=None, duration=4):\n time = np.linspace(0, duration, n_samples) # list of time index \n \n s1 = np.sin(2 * time) # sinusoidal\n s2 = np.sign(np.sin(3 * time)) # square signal\n s3 = signal.sawtooth(2 * np.pi * time) # saw tooth signal\n s4 = np.sin(4 * time) # different sinusoidal\n \n zero_row = np.zeros(n_samples)\n X = np.c_[s1, zero_row, s3, s4, s2].T # version 'none'\n \n if version == 'test':\n X = np.c_[s1, s2, s3].T\n if version == 0:\n X = np.c_[s1, s3, s4, s2].T\n if version == 1:\n X = np.c_[zero_row, s1, zero_row, s3, zero_row, zero_row, s4, s2].T\n \n \" Finding A and Y \" \n N = len(X)\n A = np.random.randn(M,N) # Random mixing matrix\n Y = np.dot(X.T, A.T) # Measurement matrix\n return Y.T, A, X", "def combiner(x):\n return x", "def test_add_signals():\n x = np.linspace(390, 410, 200)\n doublet = [(399, 1), (401, 1)]\n y = add_signals(x, doublet, 1)\n X = np.array([x for x, _ in ADD_SIGNALS_DATASET])\n Y = np.array([y / 2 for _, y in ADD_SIGNALS_DATASET]) # scale to match\n print(y)\n print(Y)\n assert np.array_equal(x, X)\n assert np.array_equal(y, Y)", "def synchronise_signals(in_signal_1, in_signal_2, time_interval = -1, fs = 100):\n\n # signal segmentation\n in_signal_1 = in_signal_1[:time_interval*fs]\n in_signal_2 = in_signal_2[:time_interval*fs]\n\n #in_signal_2 = in_signal_2 - gravitational_filter(in_signal_2, fs)\n in_signal_1 = in_signal_1 * (-1)\n\n #in_signal_1[time_array[0] * fs:time_array[1] * fs] = in_signal_1[time_array[0] * fs:time_array[1] * fs] + 200\n #in_signal_2[time_array[4] * fs:time_array[5] * fs] = in_signal_2[time_array[4] * fs:time_array[5] * fs] + 200\n #in_signal_1[time_array[2] * fs:time_array[3] * fs] = in_signal_1[time_array[2] * fs:time_array[3] * fs] + 200\n #in_signal_2[time_array[6] * fs:time_array[7] * fs] = in_signal_2[time_array[6] * fs:time_array[7] * fs] + 200\n\n\n # signal normalisation\n mean_1, std_1, mean_2, std_2 = [np.mean(in_signal_1), np.std(in_signal_1), np.mean(in_signal_2),\n np.std(in_signal_2)]\n signal_1 = in_signal_1 - mean_1\n signal_1 /= std_1\n signal_2 = in_signal_2 - mean_2\n signal_2 /= std_2\n\n\n # zero padding signals so that they are of same length, this facilitates the calculation because\n # then the delay between both signals can be directly calculated\n # zero padding only if needed\n #if (len(signal_1) != len(signal_2)):\n\n # check which signal has to be zero padded\n # if (len(signal_1) < len(signal_2)):\n\n # pad first signal\n # signal_1 = np.append(signal_1, np.zeros(len(signal_2) - len(signal_1)))\n\n # else:\n\n # pad second signal\n # signal_2 = np.append(signal_2, np.zeros(len(signal_1) - len(signal_2)))\n\n\n N = len(signal_1) + len(signal_2) - 1\n # Calculate the cross-correlation between the two signals.\n #correlation = np.correlate(signal_1, signal_2, 'full')\n f1 = fft(signal_1, N)\n f2 = np.conj(fft(signal_2, N))\n correlation = np.real(ifft(f1 * f2))\n #correlation = fftshift(cc)\n\n\n # calculate tau / shift between both signals\n #tau = int(np.argmax(correlation) - (len(correlation)) / 2)\n tau = np.argmax(correlation)\n print(tau)\n if tau > len(correlation) // 2:\n tau = np.argmax(correlation) - len(correlation)\n print(tau)\n\n # crop signals to original length (removing zero padding)\n #signal_1 = signal_1[:len(in_signal_1)]\n #signal_2 = signal_2[:len(in_signal_2)]\n\n\n # check which signal has to be sliced\n if (tau < 0):\n # tau negative --> second signal lags\n signal_2 = signal_2[np.abs(tau):]\n\n elif (tau > 0):\n # tau positive ---> firs signal lags\n signal_1 = signal_1[np.abs(tau):]\n\n\n # revert signals to orignal scale\n result_signal_1 = signal_1 * std_1 + mean_1\n result_signal_2 = signal_2 * std_2 + mean_2\n\n return tau, result_signal_1, result_signal_2", "def CombineUElements(S,indexes):\n x = set.intersection( *[ set.union(*z) for z in S ] )\n x.difference_update(indexes)\n return(tuple(x))", "def input_signal(self, signal: np.ndarray):\n # Algorithm 3.4 (2)\n signal = self.__check_signal(signal)\n # keep track of number of signals input so far\n self.num_signal += 1\n # Store the input signal?\n self.sigs.append(signal)\n\n # Algorithm 3.4 (1)\n # if number of nodes is smaller than 2, set the input signal as a node.\n if len(self.nodes) < 2:\n self.__add_node(signal)\n return\n\n # Algorithm 3.4 (3)\n winner, dists = self.__find_nearest_nodes(2, signal)\n sim_thresholds = self.__calculate_similarity_thresholds(winner)\n # signal is a new pattern \n if dists[0] > sim_thresholds[0] or dists[1] > sim_thresholds[1]:\n self.__add_node(signal)\n else:\n # Algorithm 3.4 (4)\n self.__increment_edge_ages(winner[0])\n # Algorithm 3.4 (5)\n need_add_edge, need_combine = self.__need_add_edge(winner)\n if need_add_edge:\n # print(\"add edge\")\n # Algorithm 3.4 (5)(a)\n self.__add_edge(winner)\n else:\n # Algorithm 3.4 (5)(b)\n self.__remove_edge_from_adjacent_mat(winner)\n # Algorithm 3.4 (5)(a) need to combine subclasses\n if need_combine:\n self.__combine_subclass(winner)\n # Algorithm 3.4 (6) checked, maybe fixed problem N\n self.__update_density(winner[0])\n # Algorithm 3.4 (7) is embedded in func __update_density()\n # Algorithm 3.4 (8) (a)\n self.__update_winner(winner[0], signal)\n # Algorithm 3.4 (8) (b)\n self.__update_adjacent_nodes(winner[0], signal)\n\n # Algorithm 3.4 (9)\n self.__remove_old_edges()\n\n # Algorithm 3.4 (10)\n if self.num_signal % self.iteration_threshold == 0 and self.num_signal > 1:\n # print(self.won)\n # update self.N based on self.won\n for i in range(len(self.won)):\n if self.won[i]:\n self.N[i] += 1\n for i in range(len(self.won)):\n self.won[i] = False\n print(\"Input signal amount:\", self.num_signal, \"nodes amount:\", len(self.nodes))\n self.__separate_subclass()\n self.__delete_noise_nodes()\n self.total_loop += 1 # ?\n # algo 3.4(11)?\n self.__classify()\n # plot\n # threading.Thread(self.plot_NN())\n \n # clear signals\n self.sigs.clear()", "def signal_to_training( # pylint: disable=too-many-locals\n self,\n signal: Union[Dict, List[Dict]]\n ) -> Tuple[np.ndarray, Tuple[np.ndarray, ...], np.ndarray, Dict[str, Any]]:\n dict_list = list(signal) if isinstance(signal, list) else list((signal, ))\n\n # Initialize the return values\n time_length = len(dict_list[0]['signal']['time']['data']) # type: ignore\n length = int(time_length / 2)\n signals = np.zeros((0, time_length))\n result_r = np.zeros((0, length))\n result_b = np.zeros((0, length))\n result_h = np.zeros((0, length))\n result_m = np.zeros((0, length))\n result_p = np.zeros((0, length))\n answer = np.zeros((0, length))\n config = {\n 'SNR': [],\n 'count': [],\n 'frequencies': [],\n 'amplitudes': [],\n 'minamplitude': [],\n 'mindist': []\n } # type: Dict[str, Any]\n\n # Calculate window functions\n window_bartlett = np.bartlett(time_length)\n window_hanning = np.hanning(time_length)\n window_meyer = self._meyer_wavelet(time_length)\n window_poisson = exponential(time_length, sym=True, tau=(time_length/2)*(8.69/60.0))\n\n # Loop all data entries\n for data in dict_list:\n time = np.asarray(data['signal']['time']['data'])\n signals = np.concatenate((signals, np.reshape(time, (1,) + time.shape)))\n config['SNR'].append(data['signal']['SNR'])\n\n # Assemble the FFTs\n fft = np.fft.fft(time)[:length] / time_length\n result_r = np.concatenate((result_r, np.reshape(fft, (1,) + fft.shape)))\n fft = np.fft.fft(time * window_bartlett)[:length] / time_length\n result_b = np.concatenate((result_b, np.reshape(fft, (1,) + fft.shape)))\n fft = np.fft.fft(time * window_hanning)[:length] / time_length\n result_h = np.concatenate((result_h, np.reshape(fft, (1,) + fft.shape)))\n fft = np.fft.fft(time * window_meyer)[:length] / time_length\n result_m = np.concatenate((result_m, np.reshape(fft, (1,) + fft.shape)))\n fft = np.fft.fft(time * window_poisson)[:length] / time_length\n result_p = np.concatenate((result_p, np.reshape(fft, (1,) + fft.shape)))\n\n # Assemble all the frequencies and amplitudes\n count = 0\n freqs = []\n ampls = []\n counting = np.zeros((1, length))\n for subsig in data['signal']['parts']:\n if subsig['signal']['type'] == 'SingleOscillation':\n count += 1\n freq = subsig['signal']['frequency']\n counting[0, int(max(0, min(length - 1, round(freq))))] += 1\n freqs.append(freq)\n ampls.append(subsig['signal']['amplitude'])\n config['count'].append(count)\n\n # Sort frequencies and amplitudes by frequency\n np_freqs = np.asarray(freqs)\n sorting = np.unravel_index(np.argsort(np_freqs), np_freqs.shape)\n np_freqs = np_freqs[sorting]\n np_ampls = np.asarray(ampls)[sorting]\n\n # Assemble some statistics\n config['mindist'].append(999999. if len(np_freqs) < 2 else np.min(np.diff(np_freqs)))\n config['minamplitude'].append(np.min(np_ampls) if len(np_ampls) > 0 else 999999.)\n config['frequencies'].append(np_freqs)\n config['amplitudes'].append(np_ampls)\n answer = np.concatenate((answer, counting))\n\n # Assemble results\n ffts = (result_r, result_b, result_h, result_m, result_p)\n return signals, ffts, answer, config", "def joint_pairs(self):\n return ((1, 4), (2, 5), (3, 6), (14, 11), (15, 12), (16, 13))", "def combine_event_functions(event_fn, t0, y0):\n with torch.no_grad():\n initial_signs = torch.sign(event_fn(t0, y0))\n\n def combined_event_fn(t, y):\n c = event_fn(t, y)\n return torch.min(c * initial_signs)\n\n return combined_event_fn", "def function(tuples1, tuples2):\n # Add your code here\n \n #Main idea is to sort the tuples separately and concatenate them later.\n tuples1.sort() #Default argument is always the first element in a tuple.\n tuples2.sort(key = lambda x: x[1])\n\t\n\treturn tuples1 + tuples2\n \"\"\" Alternate method:\n return sorted(tuples1, key = lambda x: x[0]) + sorted(tuples2, key = lambda x: x[1])\n \"\"\"", "def calculate_signals(self):\n\t\traise NotImplementedError(\n\t\t\t\"Should implement calculate_signals()\\n\" + \\\n\t\t\t\"By calling this method to calculate 'Signal' Events\"\n\t\t)", "def add(a, b):\n return tuple(x+y for x,y in zip(a,b))", "def WcCombiner(intermediates):\n\n # the use of the defaultdict data structures simplifies the summation of values (counts) of the intermediate\n # dictionaries. It only requires one statement, instead of 2, for creating a new key, value pair or\n # updating its values.\n result = defaultdict(int)\n\n # the following loop iterates over the first dictionary key and value pairs and then iterates over the next dictionary's\n # pairs. It continues until it iterates over all dictionaries that are members of the intermediates. While iterating,\n # a new dictionary is created, result, to hold all the pairs of the intermediate dictionaries, thus effectively\n # merging all of them.\n for k,v in chain(*intermediates):\n result[k] += v\n return result", "def add(self, signal_list):\n result = []\n for signals in signal_list:\n result.append(\n signals * signal.blackmanharris(\n len(signals),\n sym=False\n )\n )\n return result", "def base_to_signal_mapping(grp):\n\n position_in_signal = [0 for _ in range(5)]\n for i in range(1, len(grp)):\n position_in_signal += [i for _ in range(grp[i][5])]\n # position_in_signal += [grp[i][0] for _ in range(grp[i][5])]\n\n # print(position_in_signal)\n return position_in_signal", "def adc(self, signal):", "def combine(self,lo):\n lo = Connectivity(lo) \n if self.shape[1] < 2 or lo.shape[1] != 2:\n raise ValueError,\"Can only combine plex>=2 with plex==2\"\n elems = lo[self]\n elems1 = roll(elems,-1,axis=1)\n for i in range(elems.shape[1]):\n flags = (elems[:,i,1] != elems1[:,i,0]) * (elems[:,i,1] != elems1[:,i,1])\n elems[flags,i] = roll(elems[flags,i],1,axis=1)\n return Connectivity(elems[:,:,0])", "def additive_mixing(s, n):\n mixed_audio = s + n\n \n alpha = 1. / np.max(np.abs(mixed_audio))\n mixed_audio *= alpha\n s *= alpha\n n *= alpha\n return mixed_audio, s, n, alpha", "def additive_mixing(s, n):\n mixed_audio = s + n\n \n alpha = 1. / np.max(np.abs(mixed_audio))\n mixed_audio *= alpha\n s *= alpha\n n *= alpha\n return mixed_audio, s, n, alpha", "def combine(k1, k2, k3, k4):\n\n return k1 + (k2 * 2.0) + (k3 * 2.0) + k4", "def calculate_signal(self, exclude = [], **kwargs): \n\n if self.verbose > 1:\n print(\"MultiLinearSpectra.calculate_signal()\") \n \n for m in range(len(self.mess)):\n if m not in exclude and self.mess[m][\"class\"] not in exclude:\n if hasattr(self.mess[m][\"object\"], \"calculate_signal\"):\n self.mess[m][\"object\"].calculate_signal()", "def emit(self, signal, value=None, gather=False):\n results = [] if gather else True\n if hasattr(self, 'connections') and signal in self.connections:\n for condition, values in self.connections[signal].items():\n if condition is None or condition == value or (callable(condition) and condition(value)):\n for slot, transform in values.items():\n if transform is not None:\n if callable(transform):\n used_value = transform(value)\n elif isinstance(transform, str):\n used_value = transform.format(value=value)\n else:\n used_value = transform\n else:\n used_value = value\n\n if used_value is not None:\n if(accept_arguments(slot, 1)):\n result = slot(used_value)\n elif(accept_arguments(slot, 0)):\n result = slot()\n else:\n result = ''\n else:\n result = slot()\n\n if gather:\n results.append(result)\n\n return results", "def _combine_relational(self, r0, t0, r1, t1, now):\n # logger.debug(\"combine_relational t0 {0} now {1}\".format(t0, now))\n assert(t0<=now)\n assert(t1 is None or t1<=now)\n\n x0, y0=r0\n x1, y1=r1\n\n # Special case where global detection hasn't happened yet.\n if t1 is None:\n t1=t0\n x1=[t0]\n y1=[y1[0]]\n\n absx=[(x+t0, 0, i) for (i, x) in enumerate(x0)]\n absx.extend([(x+t1, 1, i) for (i, x) in enumerate(x1)])\n y=[y0, y1]\n\n xx=list()\n yy=list()\n hazards=[0.0, 0.0]\n for t, j, i in sorted(absx, key=lambda x: x[0]):\n if not xx or t!=xx[-1]:\n xx.append(t)\n hazards[j]=y[j][i]\n yy.append(hazards[0]*hazards[1])\n else:\n hazards[j]=y[j][i]\n yy[-1]=(hazards[0]*hazards[1])\n\n xl=[x for x in xx if x<=now]\n xend=len(xl)-1\n xx=xx[xend:]\n yy=yy[xend:]\n xx[0]=now\n\n return ([x-now for x in xx], yy)", "def test_merge(self):\n # n.b. non-zero values. Zero time signals are ignored.\n kb_read1 = TimeSignal.from_values('kb_read', [0.0], [1.0], priority=8)\n kb_read2 = TimeSignal.from_values('kb_read', [0.0], [1.0], priority=10)\n kb_write1 = TimeSignal.from_values('kb_write', [0.0], [1.0], priority=8)\n\n # Test that we take the union of the available time series\n job1 = ModelJob(label=\"label1\", timesignals={'kb_read': kb_read1})\n job2 = ModelJob(label=\"label1\", timesignals={'kb_write': kb_write1})\n job1.merge(job2)\n\n self.assertEqual(len(job1.timesignals), len(signal_types))\n self.assertEqual(job1.timesignals['kb_read'], kb_read1)\n self.assertEqual(job1.timesignals['kb_write'], kb_write1)\n\n # (The other time signals should still be None)\n for ts_name in signal_types:\n if ts_name in ['kb_read', 'kb_write']:\n continue\n self.assertIn(ts_name, job1.timesignals)\n self.assertIsNone(job1.timesignals[ts_name])\n\n # check that when merging we take the signal with highest priority index\n job1 = ModelJob(label=\"label1\", timesignals={'kb_read': kb_read1})\n job2 = ModelJob(label=\"label1\", timesignals={'kb_read': kb_read2})\n job1.merge(job2)\n self.assertEqual(job1.timesignals['kb_read'], kb_read2)", "def _process_last(self, first, second):\n if not self.can_combine(first, second):\n # no combining\n self.combined.append(first)\n self.combined.append(second)\n else:\n # combine and terminate\n self.move_cursors_to_end(second)\n self.combine_and_select_block(first)", "def brepalgo_ConcatenateWire(*args):\n return _BRepAlgo.brepalgo_ConcatenateWire(*args)", "def join_bits(byteseq) -> int:\n return reduce(lambda acc, bit: (acc << 1) | int(bit), byteseq)", "def CombineVertex(self, *args):\n return _ShapeBuild.ShapeBuild_Vertex_CombineVertex(self, *args)", "def gen_signals(num_signals, sig_len, SNR_dB, sig_type, noise_type):\n\n SNR = 10 ** (SNR_dB / 10)\n if sig_type == \"sin\":\n return gen_complex_sinusoid(num_signals, sig_len, SNR, noise_type)\n elif sig_type == \"chirp_narrow\":\n return gen_chirps(num_signals, sig_len, SNR, .1/sig_len, noise_type)\n elif sig_type == \"chirp_1\":\n return gen_chirps(num_signals, sig_len, SNR, 1 / sig_len, noise_type)\n elif sig_type == \"chirp_2\":\n return gen_chirps(num_signals, sig_len, SNR, 2 / sig_len, noise_type)\n elif sig_type == \"noise\":\n return gen_noise(num_signals, sig_len)\n elif sig_type == \"bpsk\":\n return gen_bpsk(num_signals, sig_len, 16, SNR, noise_type)\n elif sig_type == \"ham_bpsk\":\n return gen_ham_bpsk(num_signals, sig_len, 16, SNR, noise_type)\n elif sig_type == \"ar_noise\":\n return gen_ar_noise(num_signals, sig_len)", "def union(x):\n if len(x) < 2:\n return x\n\n # Make sure everybody have the same shape\n first_shape = tuple(x[0].shape)\n for pixmap in x[1:]:\n if first_shape != tuple(pixmap.shape):\n return []\n\n return [np.bitwise_or.reduce(np.array(x).astype(int))]", "def combine(lower, upper):\n units = zip(upper, lower[:-1], lower[1:])\n return tuple(max(A+B, A+C) for A, B, C in units)", "def add_edge_length(self, a, b):\n return tuple(sum(x) for x in zip(a, b))", "def combined_gaussian(amps, fwhms, means, x):\n if len(amps) > 0.:\n for i in range(len(amps)):\n gauss = gaussian(amps[i], fwhms[i], means[i], x)\n if i == 0:\n combined_gauss = gauss\n else:\n combined_gauss += gauss\n else:\n combined_gauss = np.zeros(len(x))\n return combined_gauss", "def combined_step_count(intersection_coords, wire_one_map, wire_two_map):\n return wire_one_map[intersection_coords] + wire_two_map[intersection_coords]", "def union(tuple1, tuple2):\n if tuple1 is None:\n return tuple2\n if tuple2 is None:\n return tuple1\n l = list(tuple1)\n l.extend([e for e in tuple2 if e not in tuple1])\n return tuple(l)", "def sum_tuples(t1, t2):\n return tuple(sum(t) for t in zip(t1, t2))", "def signal(self, orientation):\n #return np.array([src.emission(orientation) for src in self.virtualsources]).sum(axis=0)\n #signal = 0.0\n #for src in self.virtualsources:\n #signal += src.emission(orientation)\n #return signal\n #print(orientation)\n return sum((src.emission(orientation.copy()) for src in self.virtualsources))", "def brepalgo_ConcatenateWireC0(*args):\n return _BRepAlgo.brepalgo_ConcatenateWireC0(*args)", "def test_merge_ignores_empty_timesignals(self):\n kb_read = TimeSignal.from_values('kb_read', [0.0], [1.0])\n kb_write = TimeSignal.from_values('kb_write', [0.0], [0.0]) # n.b. zero data\n\n job1 = ModelJob(label=\"label1\", timesignals={'kb_read': kb_read})\n job2 = ModelJob(label=\"label1\", timesignals={'kb_write': kb_write})\n\n self.assertIsNone(job1.timesignals['kb_write'])\n self.assertIsNotNone(job2.timesignals['kb_write'])\n job1.merge(job2)\n self.assertIsNone(job1.timesignals['kb_write'])", "def xor(x):\r\n if signal(x[0]) == signal(x[1]):\r\n return 1.0\r\n return 0.0", "def concatenate(data, axis):\n if not isinstance(data, Call):\n data = list(data)\n if not data:\n raise ValueError(\"relay.concatenate requires data to be non-empty.\")\n if not isinstance(data, Call):\n data = Tuple(data)\n if not isinstance(axis, int):\n raise ValueError(\"For now, we only support integer axis\")\n return _make.concatenate(data, axis)", "def test_read_multiple_specified_signals(self):\n cwd = os.path.dirname(os.path.abspath(__file__))\n test_dir = os.path.join(cwd, 'test_files/')\n signals = read_signals(test_dir, ['test2', 'test3'])\n self.assertEquals(len(signals), 2)", "def combine(rvs):\n\n combined = []\n used = [False] * len(rvs)\n for idx, (cond, rv) in enumerate(rvs):\n if used[idx]:\n continue\n used[idx] = True\n for idx2, (cond2, rv2) in enumerate(rvs):\n if used[idx2]:\n continue\n solver = z3.Solver()\n e = symnot(symeq(rv, rv2))\n solver.add(unwrap(e))\n c = solver.check()\n if c == z3.unsat:\n used[idx2] = True\n cond = wrap(z3.Or(unwrap(cond), unwrap(cond2)))\n combined.append((cond, rv))\n return combined", "def getCombination(mu1, mu2, sig1, sig2, confidence1, confidence2):\n\tglobal alpha, beta, gamma\n\n\t#Standard Bayesian\n\t# sigNew = math.sqrt(math.pow(sig1, 2) + math.pow(sig2, 2))\n\t# muNew = u1 + u2\n\t# return muNew, sigNew \n\n\t##In accordance with the nature papers:\n\tsigNew = (math.pow(sig1,2) * math.pow(sig2, 2)) \\\n\t/ float((math.pow(sig1,2) + math.pow(sig2, 2)))\n\tinv1 = 1 / float((math.pow(sig1, 2)))\n\tinv2 = 1 / float((math.pow(sig2, 2)))\n\tsumInverses = inv1 + inv2\n\n\t##inverse standard deviations squared\n\t# w1 = inv1 / float(sumInverses)\n\t# w2 = inv2 / float(sumInverses)\n\n\t## equal weighting\n\t# w1 = .5\n\t# w2 = .5\n\n\t## weightings based off of confidence\n\t# summation = confidence1 + confidence2\n\t# w1 = confidence1 / float(summation)\n\t# w2 = confidence2 / float(summation)\n\n\t##weightings with exponentials\n\t# w1 = w1**.001\n\t# w2 = w2**.001\n\t# newSummation = w1 + w2\n\t# w1 = w1 / float(newSummation)\n\t# w2 = w2 / float(newSummation)\n\n\t##weightings with polynomial factors\n\tw1 = (beta * confidence1 + alpha)**gamma \n\tw2 = (beta * confidence2 + alpha)**gamma \n\tnewSummation = w1 + w2\n\tw1 = w1 / float(newSummation)\n\tw2 = w2 / float(newSummation)\n\n\tmuNew = w1 * mu1 + w2 * mu2\n\treturn muNew, sigNew", "def combine(*selectors):\n def combined_selector(datetime_obj):\n return tuple(selector(datetime_obj) for selector in selectors)\n return combined_selector", "def union(first, second):\n # Put your code here.", "def compose(*funcs):\n # return lambda x: reduce(lambda v, f: f(v), funcs, x)\n if funcs:\n return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)\n else:\n raise ValueError('Composition of empty sequence not supported.')", "def compose(*funcs):\n # return lambda x: reduce(lambda v, f: f(v), funcs, x)\n if funcs:\n return reduce(lambda f, g: lambda *a, **kw: g(f(*a, **kw)), funcs)\n else:\n raise ValueError(\"Composition of empty sequence not supported.\")", "def get_overlapping_conn(conn1: NDArray,\n conn2: NDArray) -> Tuple[NDArray, NDArray]:\n\n conn_union = np.empty((0, 3), dtype=np.int32)\n\n # Get unique components\n if np.ma.is_masked(conn1):\n concomp1 = np.unique(conn1).compressed()\n else:\n concomp1 = np.unique(conn1)\n\n if np.ma.is_masked(conn2):\n concomp2 = np.unique(conn2).compressed()\n else:\n concomp2 = np.unique(conn2)\n\n # Loop through them and connect size and number of overlapping data\n for ix2 in concomp2:\n for ix1 in concomp1:\n # Skip 0 component combination with other components\n if not ix1 == 0 and not ix2 == 0:\n idx = np.where((conn1 == ix1) & (conn2 == ix2))[0]\n if np.count_nonzero(idx) > 0:\n carray = np.array([ix2, ix1, np.count_nonzero(idx)],\n dtype=np.int32, ndmin=2)\n\n conn_union = np.concatenate((conn_union, carray), axis=0)\n\n # Get 0 components in both frames\n elif ix1 == 0 and ix2 == 0:\n idx = np.where((conn1 == ix2) & (conn2 == ix1))[0]\n if np.count_nonzero(idx) > 0:\n carray = np.array([ix2, ix1, np.count_nonzero(idx)],\n dtype=np.int32, ndmin=2)\n\n conn_union = np.concatenate((conn_union, carray), axis=0)\n\n # Find components to correct in Frame 2\n conn_pairs = np.empty((0, 3), dtype=np.int32)\n\n for k in np.unique(conn_union[:, 0]):\n ik = conn_union[:, 0] == k\n # find number of times components is referenced\n count = np.sum(conn_union[:, 0] == k)\n\n if count > 1:\n max_points = np.max(conn_union[ik][:, 2])\n # Select the one with the most points\n ik = np.where((conn_union[:, 0] == k) &\n (conn_union[:, 2] == max_points))[0]\n # Select first if there are more pairs with same num of points\n ik = np.array(ik[0], ndmin=1) if ik.shape[0] > 1 else ik\n\n conn_pairs = np.concatenate((conn_pairs, conn_union[ik]), axis=0)\n\n return conn_pairs", "def vertices_join(labels, points):\n\tpoints = [tuple(item) for item in points]\n\treturn [((labels[index],) + value) for index, value in enumerate(points)]", "def gen_concatenate(iterators):\n for it in iterators:\n yield from it", "def tup_add(t1, t2):\n return tuple(map(operator.add, t1, t2))", "def combine(addresses1, addresses2, data):\n for n, byte in enumerate(data):\n yield (\n addresses1[n] if addresses1 is not None else None,\n addresses2[n] if addresses2 is not None else None,\n byte)", "def combine(combination_input):\n\n output = sum([map(list, itertools.combinations(combination_input, i)) for i in range(len(combination_input) + 1)], [])\n output_final = [sorted(i) for i in output if len(i)>1]\n\n return sorted(output_final)", "def _create_concat(cls, onnx_node, inputs, opset_version):\n factor = onnx_node.attrs[\"axis\"]\n if factor < 0:\n factor = len(inputs[0].shape\n ) + factor # in order to support the negative axis\n _, forward = cls._common_onnx_node_to_singa_op(onnx_node, inputs,\n opset_version)\n return None, forward(axis=factor)", "def curate_interactions(interactions):\n interactions = remove_duplicate_interactions(interactions)\n interactions = compact_interactions(interactions) # already sorted\n return tuple(interactions)", "def combineResult(self, *xpars):\n if len(xpars) == 0:\n xpars = self.__x\n xshape = self.__xshape\n else:\n assert len(xpars) == self.__nx # The input parameter number should be consistent.\n xshape = xpars[0].shape\n #-> Calculate the add model components\n addCmpDict = {}\n for modelName in self._addList:\n mf = self.__modelDict[modelName]\n addCmpDict[modelName] = mf(*xpars)\n #-> Manipulate the model components\n for modelName in self._mltList:\n mf = self.__modelDict[modelName]\n my = mf(*xpars) # multiplied y component\n #--> Multiply the current component to the target models\n for tmn in mf.multiList:\n addCmpDict[tmn] *= my\n #-> Add up all the add models\n result = np.zeros(xshape, dtype=self.dtype)\n #print addCmpDict\n for modelName in self._addList:\n result += addCmpDict[modelName]\n return result", "def emit_signal(self, *args): \n\tif len(args) > 0:\n \tprint 'Emitting ' + args[0]\n \tout = 'Arguments: '\n \tfor i in range(len(args)-1):\n\t\t out += str(args[i+1])\n\t\tprint out\n \tself.emit(QtCore.SIGNAL(args[0]), *args)", "def add_reactions_w_combinatorics(self, node):\n if self.is_all_incoming_edges_activation_edges(node):\n self.add_complex_activation_reactions(node)\n # TODO separate reactions should be here\n elif self.is_all_incoming_edges_inhibition_edges(node):\n raise NotImplementedError\n else: # a mix of inhibition and activation edges\n self.add_separate_mix_reactions(node)", "def bone_pairs(self):\n return ((0, 3), (1, 4), (2, 5), (10, 13), (11, 14), (12, 15))", "def seq_aggregate_with_prop(x, y):\n res = []\n for i in range(0, len(x)):\n res.append(\n (x[i][0], x[i][1], get_aggregation_func_by_name(x[i][0])(x[i][2], y[i][2]))\n )\n return tuple(res)", "def combine_chain_sequence(msa_name, split=False):\n from read_pdb import read_pdb\n fname = \"(combine_pdb_seq)\"\n print(\"{}\\tcat-ting sequences...\".format(fname))\n print(fname + \"\\t\\tFasta file\\tNumber of chains\")\n\n chain_ids = [(msa_name.strip(\".fas\")).split(\"_\")[1], (msa_name.strip(\".fas\")).split('_')[3]]\n pdb_chain, pdb_fasta_seq = read_pdb(msa_name)\n header_seq_dict = dict(zip(pdb_chain, pdb_fasta_seq))\n\n # For each chain in msa find relevant pdb seq and cat the two seqs\n first_seq = header_seq_dict[chain_ids[0]]\n second_seq = header_seq_dict[chain_ids[1]]\n full_seq = first_seq + second_seq\n print(\"{}\\t\\tPDB seq length: {}\".format(fname, len(full_seq)))\n print(\"\\tFinished cat-ting sequences.\".format(fname))\n if split:\n return first_seq, second_seq\n else:\n return full_seq", "def _add_stream(aa, bb):\n carry = 0\n cc = []\n for aaa, bbb in zip(aa, bb):\n carry, val = _add_digit(aaa,bbb, carry)\n cc.append(val)\n if carry:\n cc.append(carry)\n return cc", "def combine_latest(source: Observable[Any]) -> Observable[Any]:\n\n sources = (source,) + others\n\n return reactivex.combine_latest(*sources)", "def encode(self, signal: np.ndarray) -> np.ndarray:\n pass", "def SignalDistance(s1,s2):\n\tdist = 0\n\n\t# calculate absolute distance between each element\n\tfor i in range(0, len(s1)):\n\t\tdist += np.abs(s1[i] - s2[i])\n\treturn dist", "def connect(ends):\n d = np.diff(ends, axis=0)[0]\n j = np.argmax(np.abs(d))\n D = d[j]\n aD = np.abs(D)\n return ends[0] + (np.outer(np.arange(aD + 1), d) + (aD >> 1)) // aD", "def equalizeShapes( signal1, signal2):\r\n\r\n\tif signal1.size < signal2.size: signal1 = np.append( signal1, [0] * (signal2.size-signal1.size))\r\n\telif signal1.size > signal2.size: signal2 = np.append( signal2, [0] *(signal1.size - signal2.size))\r\n\treturn signal1, signal2", "def features_combine():\n\n\n\t# PROCESSING AUDIO", "def _combine_epics(\n norm_epics: Iterable[Epic],\n action_: Observable, state_: Observable\n) -> Observable:\n return merge(*map(run_epic(action_, state_), norm_epics))", "def combine_epics(*epics: Iterable[Epic]) -> Epic:\n return partial(_combine_epics, tuple(map(normalize_epic, epics)))", "def get_signal_info(self, signal_names):\n result = []\n for name in signal_names:\n description = self._pio.signal_description(name)\n domain_type = self._pio.signal_domain_type(name)\n aggregation, format_type, behavior = self._pio.signal_info(name)\n result.append((name, description, domain_type, aggregation, format_type, behavior))\n return result", "def signalTransform(dat):\n return numpy.convolve(dat, slopWindow, \"same\")", "def combine_inputs(self, left, right=[],undo=False):\n if undo:\n return self._combine_inputs_undo(left, right)", "def connect_all_signals():\n rohypnol.connect()", "def convolve(x, attn):\n stacked = torch.stack([pad_shift(x, i) for\n i in range(attn.shape[2])], dim=-1)\n return torch.sum(attn.unsqueeze(2) * stacked, dim=-1)", "def cbGetSignal( BoardNum, Direction, Signal, Index, Connection, Polarity ):\n Connection = ctypes.c_int( Connection )\n Polarity = ctypes.c_int( Polarity )\n CHK( cbw.cbGetSignal( BoardNum, Direction, Signal, Index,\n byref( Connection ), byref( Polarity ) ) )\n return Connection.value, Polarity.value", "def _get_next_signal(self, date_time):\n logger.warning('_get_next_signal called')\n try:\n k = self.required_signals.iterkeys().next()\n except StopIteration:\n return None\n v = self.required_signals.pop(k)\n v[STATUS_STARTED] = date_time\n return k, v", "def single_point_crossover(a: np.ndarray, b: np.ndarray, point: int) -> Tuple[np.ndarray, np.ndarray]:\n\n new_a = np.concatenate((a[:point+1], b[point+1:]))\n new_b = np.concatenate((b[:point+1], a[point+1:]))\n return new_a, new_b", "def connect_signals_and_slots(self):\n # Orthogonality\n self.orthogonality_m.sliderReleased.connect(self.calc_integral)\n self.psi_psi.clicked.connect(self.calc_integral)\n self.psi_phi.clicked.connect(self.calc_integral)\n self.phi_phi.clicked.connect(self.calc_integral)\n\n # Fourier series\n self.function_selector.currentIndexChanged.connect(self.select_function)\n self.number_of_terms.sliderReleased.connect(self.change_terms)\n self.save.clicked.connect(self.save_file)\n self.fs_checkbox.stateChanged.connect(self.select_function)\n\n #Parseval's Theorem\n self.number_of_terms_pi.sliderReleased.connect(self.approximate_pi)\n self.parseval_save.clicked.connect(self.save_file)", "def combined_inbox_count(request):\r\n count = 0\r\n for func in inbox_count_sources():\r\n counts = func(request)\r\n if counts:\r\n for value in counts.itervalues():\r\n try:\r\n count = count + int(value)\r\n except (TypeError, ValueError):\r\n pass\r\n return {'combined_inbox_count': count,}", "def _read_signals(edf_file, header):\n signals = OrderedDict([(label, []) for label in header['label']])\n\n while True:\n try:\n record = _read_record(edf_file, header)\n except EOFError:\n break\n\n for label, signal in record.items():\n signals[label].append(signal)\n\n for label, signal in signals.items():\n signals[label] = np.concatenate(signal)\n\n return signals", "def combine_permutations(p1, p2):\n p = tuple(map(p2.__getitem__, p1))\n return p", "def calculate_signal(self):\n y = self.data.get_bar_values(self.pair[0], \"adj_close\", N=self.ols_window)\n x = self.data.get_bar_values(self.pair[1], \"adj_close\", N=self.ols_window)\n\n if y is not None and x is not None:\n if len(y) >= self.ols_window and len(x) >= self.ols_window:\n # get hedge ratio\n self.hedge_ratio = sm.OLS(y, x).fit().params[0]\n\n # get z score of residuals\n spread = y - self.hedge_ratio * x\n zscore_last = ((spread - spread.mean()) / spread.std())[-1]\n\n # calculate signals and add to events queue\n y_signal, x_signal = self.calculate_xy_signal(zscore_last)\n if y_signal is not None and x_signal is not None:\n self.events.put(y_signal)\n self.events.put(x_signal)", "def connect(signal):\n def wrapper(func):\n REGISTRY.setdefault(signal, Signal(signal)).connect(func)\n return func\n return wrapper", "def add4(a,b):\n return [a[0]+b[0],a[1]+b[1],a[2]+b[2],a[3]+b[3]]", "def two_point_crossover(a: np.ndarray, b: np.ndarray, first: int, second: int) -> Tuple[np.ndarray, np.ndarray]:\n\n new_a = np.concatenate((a[:first+1], b[first+1:second], a[second:]))\n new_b = np.concatenate((b[:first+1], a[first+1:second], b[second:]))\n return new_a, new_b", "def add_tuple(tuple_a=(), tuple_b=()):\n list_a = list(tuple_a) + ([0] * (-len(tuple_a) + 2))\n list_b = list(tuple_b) + ([0] * (-len(tuple_b) + 2))\n return tuple(x + y for (x, y, z) in zip(list_a, list_b, range(2)))", "def combiner(self, key, values):\n yield key, sum(values, ValueFormat(0, 0))", "def combine_many(*fudge):\n result = sum(fudge)\n print(result)" ]
[ "0.60273904", "0.57812446", "0.5720774", "0.5197646", "0.5101901", "0.50511295", "0.49429286", "0.49002105", "0.48530227", "0.48525456", "0.48228434", "0.48220006", "0.48124447", "0.47445247", "0.4741653", "0.47265753", "0.46585917", "0.4639718", "0.46313864", "0.46211886", "0.45875856", "0.45622447", "0.456004", "0.45294926", "0.44831488", "0.44716176", "0.44523197", "0.44461283", "0.44461283", "0.44276547", "0.44230488", "0.44227588", "0.44174734", "0.44086766", "0.4398419", "0.43929914", "0.43912974", "0.43895203", "0.43866533", "0.43820462", "0.43802178", "0.43698877", "0.43535304", "0.4319622", "0.4315991", "0.43050227", "0.4301929", "0.4299982", "0.42962113", "0.42949077", "0.4292695", "0.42911792", "0.42821527", "0.428135", "0.42717457", "0.42713743", "0.42693645", "0.42670098", "0.42614123", "0.42598844", "0.425268", "0.42497566", "0.42468584", "0.42450818", "0.4240843", "0.4234181", "0.4230071", "0.4225261", "0.42219692", "0.42215043", "0.42174467", "0.42169788", "0.42169237", "0.42167068", "0.42152223", "0.42146033", "0.41981488", "0.4192795", "0.4185505", "0.4181764", "0.41795436", "0.41783017", "0.41685766", "0.41664565", "0.4163745", "0.41559476", "0.41543883", "0.4147898", "0.41477737", "0.41474947", "0.4137736", "0.41362637", "0.41326848", "0.4131377", "0.41271895", "0.4125751", "0.41253433", "0.41244504", "0.4120732", "0.41169846" ]
0.5614806
3
Reduce a bunch of periodic signals to a single signal. The value of x that answers the puzzle is the first place ( c + x ) % T = 0, that is to say, c + x = T, or x = Tc. >>> solve_buses(prep_input(EXAMPLE_BUSES)) 1068781
def solve_buses(prepared_buses): T, c = functools.reduce(combine_signals, prepared_buses) return T - c
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def solution2(inp):\n inp = get_lines(inp)\n notes = inp[1].split(\",\")\n\n offsets = {}\n for i, bus in enumerate(notes):\n if bus == 'x':\n continue\n bus = int(bus)\n offsets[bus] = i\n buses = set(offsets)\n old_buses = buses.copy()\n\n def search(bus, offset, t):\n if (t + offset) % bus == 0:\n buses.remove(bus)\n if len(buses) == 0:\n return True\n new_bus = max(buses)\n return search(new_bus, offsets[new_bus], t)\n return False\n\n cbus = max(buses)\n max_bus = cbus\n s = 100_000_000_000_000\n s = 0\n s = s - s % cbus - offsets[cbus]\n delta = cbus\n stack = buses.copy()\n stack.remove(cbus)\n sec_max = max(stack)\n while not search(max_bus, offsets[max_bus], offsets[max_bus]):\n buses = old_buses.copy()\n s += delta\n if (s + offsets[sec_max]) % sec_max == 0:\n if len(stack) != 0:\n cbus = max(stack)\n stack.remove(cbus)\n if len(stack) != 0:\n sec_max = max(stack)\n else:\n return s\n delta *= cbus\n\n return s - offsets[max(offsets)]", "def reduce_B(self, B_on_standard_basis_handles):\n # TODO: Check this description, then move to docstring\n #To see this dt effect, consider:\n #\n #dx/dt = Ax+Bu, approximate as (x^(k+1)-x^k)/dt = Ax^k + Bu^k.\n #Rearranging terms, x^(k+1) = (I+dt*A)x^k + dt*Bu^k.\n #The impulse response is: x^0=0, u^0=1, and u^k=0 for k>=1.\n #Thus x^1 = dt*B, x^2 = dt*(I+dt*A)*B, ...\n #and y^1 = dt*C*B, y^2 = dt*C*(I+dt*A)*B, ...\n #However, the impulse response to the true discrete-time system is\n #x^1 = B, x^2 = A_d*B, ...\n #and y^1 = CB, y^2 = CA_d*B, ...\n #(where I+dt*A ~ A_d)\n #The important thing to see is the factor of dt difference.\n \n self.B_reduced = self.vec_space.compute_inner_product_mat(\n self.adjoint_basis_vec_handles, B_on_standard_basis_handles)\n if not self.is_basis_orthonormal:\n self.B_reduced = self._get_proj_mat() * self.B_reduced\n return self.B_reduced", "def get_quickest_bus(departure_time: int, buses: List[int]) -> int:\n quickest_bus = sorted(buses,\n key=lambda x: get_wait_time(departure_time, x),\n reverse=False)[0]\n\n return get_wait_time(departure_time, quickest_bus) * quickest_bus", "def solution1(inp):\n inp = get_lines(inp)\n earliest = int(inp[0])\n notes = inp[1].split(',')\n min_bus = None\n for bus in notes:\n if bus == 'x':\n continue\n bus = int(bus)\n wait_time = bus - earliest % bus\n if min_bus == None or wait_time < (min_bus - earliest % min_bus):\n min_bus = bus\n return min_bus * (min_bus - earliest % min_bus)", "def filter_buses(list_of_buses):\n for bus in list_of_buses:\n return bus", "def soustraction(a,b):\n bina = [int(x) for x in bin(a)[2:]]\n binb = [int(x) for x in bin(b)[2:]]\n while len(bina) >= len(binb):\n binb = [0]+binb\n while len(bina) < len(binb)-1:\n bina = [0]+bina\n bina.reverse()\n binb.reverse()\n n = len(bina)+len(binb)\n na = len(bina)\n q = QuantumRegister(n+1, 'q')\n circ = QuantumCircuit(q)\n for i in range(na):\n if bina[i]:\n circ.x(q[i])\n for i in range(len(binb)):\n if binb[i]:\n circ.x(q[na+i])\n sub(circ, q, [q[i] for i in range(len(bina))], [q[i+na] for i in range(len(binb)-1)], q[n], q[na+len(binb)-1])\n circ_m = measure(circ, q, [i for i in range(na, n)])\n return circ_m", "def prep_input(buses):\n return sorted([(bus, offset) \n for offset, bus \n in enumerate(buses) \n if bus], reverse=True)", "def reverse_map(coarse_grained, mapping_moieties, target=None, solvent_name=None, sol_per_bead=4, sol_cutoff=2, scaling_factor=5, parallel=True):\n\n aa_system = Compound()\n\n not_solvent = [mol for mol in coarse_grained.children if mol.name != solvent_name]\n is_solvent = [mol for mol in coarse_grained.children if mol.name == solvent_name]\n\n print(\"There are {} non-solvent molecules and {} solvent molecules.\".format(len(not_solvent), len(is_solvent)))\n\n # For each bead, replace it with the appropriate mb compound\n # Iterate through each molecule (set of particles that are bonded together)\n if parallel:\n pool = mp.Pool(processes=mp.cpu_count())\n\n # get the solvent molecules mapped in parallel\n inp = zip(is_solvent,\n [target[solvent_name]]*len(is_solvent),\n [sol_per_bead]*len(is_solvent),\n [sol_cutoff]*len(is_solvent))\n chunksize = int(len(is_solvent) / mp.cpu_count()) + 1\n solvent_list = pool.starmap(reverse_map_solvent, inp, chunksize)\n # name the solvents\n\n # get the non_solvent molecules mapped in parallel\n inp = zip(not_solvent,\n [target]*len(not_solvent),\n [mapping_moieties]*len(not_solvent))\n chunksize = int(len(not_solvent) / mp.cpu_count()) + 1\n molecule_list = pool.starmap(reverse_map_molecule, inp, chunksize)\n\n\n # put put solvents in one list\n solvent_molecule_list = []\n for i in solvent_list:\n solvent_molecule_list += i\n\n # put lipids in a box and get the box size\n for molecule in molecule_list:\n aa_system.add(molecule)\n\n print(aa_system.boundingbox)\n\n # put everything in a box\n for molecule in solvent_molecule_list:\n aa_system.add(molecule)\n\n else:\n [aa_system.add(reverse_map_molecule(molecule, target, mapping_moieties)) for molecule in not_solvent]\n solvent_compound = reverse_map_solvent(is_solvent, target[solvent_name], sol_per_bead, sol_cutoff)\n [aa_system.add(molecule) for molecule in solvent_compound.children]\n\n\n return aa_system", "def compute_bias(ics, vbc):\n import os, time\n from seren3.array import SimArray\n \n # Compute size of grid and boxsize (for this patch)\n N = vbc.shape[0]\n boxsize = ics.boxsize.in_units(\"Mpc a h**-1\") * (float(N) / float(ics.header.N))\n\n # Compute vbc @ z=1000\n z = ics.z\n rms = vbc_rms(vbc)\n rms_recom = rms * (1001./z)\n\n # Check for PS and run CICsASS if needed\n fname_vbc0 = vbc_ps_fname(0., z, boxsize)\n if not os.path.isfile(fname_vbc0):\n exit_code = run_cicsass(boxsize, z, 0., fname_vbc0)\n\n fname_vbcrecom = vbc_ps_fname(rms_recom, z, boxsize)\n if not os.path.isfile(fname_vbcrecom):\n exit_code = run_cicsass(boxsize, z, rms_recom, fname_vbcrecom)\n\n # Load power spectra and compute bias\n ps_vbc0 = np.loadtxt(fname_vbc0, unpack=True)\n ps_vbcrecom = np.loadtxt(fname_vbcrecom, unpack=True)\n\n # Should have same lenghts if finished writing\n count = 0\n while len(ps_vbcrecom[1]) != len(ps_vbc0[1]):\n count += 1\n if count > 10:\n raise Exception(\"Reached sleep limit. Filesizes still differ\")\n time.sleep(5)\n ps_vbc0 = np.loadtxt(fname_vbc0, unpack=True)\n ps_vbcrecom = np.loadtxt(fname_vbcrecom, unpack=True)\n\n #CDM bias\n b_cdm = ps_vbcrecom[1] / ps_vbc0[1]\n # Baryon bias\n b_b = ps_vbcrecom[2] / ps_vbc0[2]\n # Wavenumber\n k_bias = SimArray(ps_vbcrecom[0] / ics.cosmo[\"h\"], \"h Mpc**-1\")\n\n return k_bias, b_cdm, b_b", "def fast_fdsb(self, signals: np.ndarray, delays: np.ndarray) -> np.ndarray:\n\t\tif (len(signals.shape) == 2): signals = signals[None,:,:]\n\n\t\tfconv = np.einsum(\"kij,ijlm->ilmk\", signals, delays)\n\t\tconv = np.fft.ifft(fconv, axis=0).real\n\t\tsquared_conv = np.einsum(\"ijkm,ijkm->jkm\", conv, conv)\n\t\treturn squared_conv", "def drag_schedules(beta_list, qubits, pulse_amp, pulse_width,\n pulse_sigma=None,\n width_sigma_ratio=4, drives=None, cmd_def=None,\n inst_map=None, meas_map=None):\n\n xdata = beta_list\n\n # copy the instruction to schedule mapping\n inst_map = copy.deepcopy(inst_map)\n if not inst_map:\n inst_map = copy.deepcopy(cmd_def)\n\n if pulse_sigma is None:\n pulse_sigma = pulse_width / width_sigma_ratio\n\n # Construct the circuits\n qr = qiskit.QuantumRegister(max(qubits) + 1)\n cr = qiskit.ClassicalRegister(len(qubits))\n\n circuits = []\n\n for circ_index, b_amp in enumerate(beta_list):\n\n circ = qiskit.QuantumCircuit(qr, cr)\n circ.name = 'dragcircuit_%d_0' % circ_index\n\n for qind, qubit in enumerate(qubits):\n\n # positive drag pulse\n drag_pulse = pulse_lib.drag(duration=pulse_width,\n amp=pulse_amp[qind],\n beta=b_amp,\n sigma=pulse_sigma,\n name='drag_pulse_%d_%d' % (circ_index,\n qubit))\n\n drag_gate = Gate(name='drag_%d_%d' % (circ_index, qubit),\n num_qubits=1, params=[])\n\n # add commands to schedule\n schedule = pulse.Schedule(name='drag_pulse_%f_%d' % (b_amp,\n qubit))\n\n schedule += drag_pulse(drives[qubit])\n\n # append this schedule to the inst_map\n inst_map.add('drag_%d_%d' % (circ_index, qubit), qubits=[qubit],\n schedule=schedule)\n\n # negative pulse\n drag_pulse2 = pulse_lib.drag(duration=pulse_width,\n amp=-1*pulse_amp[qind],\n beta=b_amp,\n sigma=pulse_sigma,\n name='drag_pulse_%d_%d' % (circ_index,\n qubit))\n\n drag_gate2 = Gate(name='drag2_%d_%d' % (circ_index, qubit),\n num_qubits=1, params=[])\n\n # add commands to schedule\n schedule2 = pulse.Schedule(name='drag_pulse2_%f_%d' % (b_amp,\n qubit))\n\n schedule2 += drag_pulse2(drives[qubit])\n\n # append this schedule to the inst_map\n inst_map.add('drag2_%d_%d' % (circ_index, qubit), qubits=[qubit],\n schedule=schedule2)\n\n circ.append(drag_gate, [qr[qubit]])\n # circ.u1(np.pi, [qr[qubit]])\n circ.append(drag_gate2, [qr[qubit]])\n\n for qind, qubit in enumerate(qubits):\n circ.measure(qr[qubit], cr[qind])\n\n circuits.append(circ)\n\n # schedule\n schedule_config = ScheduleConfig(inst_map, meas_map)\n drag_sched = [schedule_circuit(qcirc,\n schedule_config)\n for qcirc in circuits]\n\n return drag_sched, xdata", "def build_bridge(blocks):\n bridges = []\n for start in [ b for b in blocks if 0 in b ]:\n tmp = blocks[:]\n tmp.remove(start)\n bridges.append(build(tmp, start[1], [start], sum(start)))\n return find_max()", "def solve(self):\n\n # Assign variables to each quantity being solved.\n r_lookup, lookup, num = {}, {}, 0\n for element in self.elements:\n if is_wire(element) and element is not self.ground:\n lookup[num] = element\n r_lookup[element] = num\n num += 1\n elif not is_cs(element) and element is not self.ground:\n lookup[num] = element\n r_lookup[element] = num\n num += 1\n\n # Set up the linear algebraic equation Ax=b\n A = np.zeros((num, num))\n b = np.zeros(num)\n for row, element in lookup.items():\n if is_wire(element) and element is not self.ground:\n for two_sided in element.attached:\n if is_cs(two_sided):\n if two_sided.pos is element:\n b[row] += -1 * two_sided.current\n else:\n b[row] += two_sided.current\n else:\n if two_sided.pos is element:\n flow = 1\n else:\n flow = -1\n A[row, r_lookup[two_sided]] = flow\n elif is_vs(element):\n check_connected(element)\n if element.pos is not self.ground:\n A[row, r_lookup[element.pos]] = 1\n if element.neg is not self.ground:\n A[row, r_lookup[element.neg]] = -1\n b[row] = element.voltage\n elif is_resistor(element):\n check_connected(element)\n if element.pos is not self.ground:\n A[row, r_lookup[element.pos]] = 1\n if element.neg is not self.ground:\n A[row, r_lookup[element.neg]] = -1\n A[row, r_lookup[element]] = -1 * element.resistance\n\n b = b.reshape((num, 1))\n try:\n x = np.linalg.solve(A, b)\n except np.linalg.LinAlgError:\n raise CircuitError('Insufficient information to solve circuit')\n\n # Assign values to all circuit components\n for i in range(num):\n item = lookup[i]\n if is_wire(item):\n item.potential = x[i, 0]\n elif isinstance(item, DualSided):\n item.current = x[i, 0]\n\n # Mark circuit as solved\n self.been_solved = True", "def reduce_B(self, B_on_standard_basis_array):\n # TODO: Check this description, then move to docstring\n #To see this dt effect, consider:\n #\n #dx/dt = Ax+Bu, approximate as (x^(k+1)-x^k)/dt = Ax^k + Bu^k.\n #Rearranging terms, x^(k+1) = (I+dt*A)x^k + dt*Bu^k.\n #The impulse response is: x^0=0, u^0=1, and u^k=0 for k>=1.\n #Thus x^1 = dt*B, x^2 = dt*(I+dt*A)*B, ...\n #and y^1 = dt*C*B, y^2 = dt*C*(I+dt*A)*B, ...\n #However, the impulse response to the true discrete-time system is\n #x^1 = B, x^2 = A_d*B, ...\n #and y^1 = CB, y^2 = CA_d*B, ...\n #(where I+dt*A ~ A_d)\n #The important thing to see is the factor of dt difference.\n\n self.B_reduced = self.vec_space.compute_inner_product_mat(\n self.adjoint_basis_vecs, B_on_standard_basis_array)\n if not self.is_basis_orthonormal:\n self.B_reduced = self._get_proj_mat() * self.B_reduced\n return self.B_reduced", "def simulate_strategy_loop_known(\n num_buses,\n states,\n decisions,\n utilities,\n costs,\n ev,\n increments,\n num_periods,\n beta,\n unobs,\n):\n for period in range(num_periods):\n for bus in range(num_buses):\n\n old_state = states[bus, period]\n if (-costs[old_state, 0] + unobs[bus, period, 0] + beta * ev[old_state]) > (\n -costs[0, 0] - costs[0, 1] + unobs[bus, period, 1] + beta * ev[0]\n ):\n decision = 0\n utility = -costs[old_state, 0] + unobs[bus, period, 0]\n new_state = old_state + increments[bus, period]\n else:\n decision = 1\n utility = -costs[0, 0] - costs[0, 1] + unobs[bus, period, 1]\n new_state = increments[bus, period]\n\n decisions[bus, period] = decision\n utilities[bus, period] = utility\n states[bus, period + 1] = new_state\n return states, decisions, utilities", "def mult_mod(a, b, nbr, control):\n bina = [int(x) for x in bin(a)[2:]]\n # binb = [int(x) for x in bin(b)[2:]]\n binn = [int(x) for x in bin(nbr)[2:]]\n while len(binn) < len(bina):\n binn = [0]+binn\n # print(bina, binn)\n binn.reverse()\n bina.reverse()\n n = len(bina)+len(binn)*3+1\n na = len(bina)\n nan = len(bina)+len(binn) # debut de Y\n nany = len(bina)+2*len(binn)+1 # debut de \"A\" (ici c'est b)\n q = QuantumRegister(n+2+1, 'q') # +lost+lost2+control\n circ = QuantumCircuit(q)\n for i in range(na):\n if bina[i]:\n circ.x(q[i])\n for i in range(len(binn)):\n if binn[i]:\n circ.x(q[na+i])\n if control:\n circ.x(q[n+2])\n cmultmod(circ, q, # control, X, a, A, Y, n, N, binn, lost, lost2\n q[n+2],\n [q[i] for i in range(len(bina))],\n b,\n [q[i+nany] for i in range(len(binn))],\n [q[i+nan] for i in range(len(binn)+1)],\n nbr,\n [q[i+na] for i in range(len(binn))],\n binn,\n q[n],\n q[n+1])\n circ_m = measure(circ, q, [i for i in range(nan,nany)])\n return circ_m", "def rabi_schedules(amp_list, qubits, pulse_width, pulse_sigma=None,\n width_sigma_ratio=4, drives=None, cmd_def=None,\n inst_map=None, meas_map=None):\n\n xdata = amp_list\n\n # copy the instruction to schedule mapping\n inst_map = copy.deepcopy(inst_map)\n if not inst_map:\n inst_map = copy.deepcopy(cmd_def)\n\n if pulse_sigma is None:\n pulse_sigma = pulse_width / width_sigma_ratio\n\n # Construct the circuits\n qr = qiskit.QuantumRegister(max(qubits) + 1)\n cr = qiskit.ClassicalRegister(len(qubits))\n\n circuits = []\n\n for circ_index, g_amp in enumerate(amp_list):\n\n circ = qiskit.QuantumCircuit(qr, cr)\n circ.name = 'rabicircuit_%d_0' % circ_index\n\n rabi_pulse = pulse_lib.gaussian(duration=pulse_width,\n amp=g_amp,\n sigma=pulse_sigma,\n name='rabi_pulse_%d' % circ_index)\n\n rabi_gate = Gate(name='rabi_%d' % circ_index, num_qubits=1, params=[])\n\n for _, qubit in enumerate(qubits):\n\n # add commands to schedule\n schedule = pulse.Schedule(name='rabi_pulse_%f_%d' % (g_amp,\n qubit))\n\n schedule += rabi_pulse(drives[qubit])\n\n # append this schedule to the inst_map\n inst_map.add('rabi_%d' % circ_index, qubits=[qubit],\n schedule=schedule)\n\n circ.append(rabi_gate, [qr[qubit]])\n\n for qind, qubit in enumerate(qubits):\n circ.measure(qr[qubit], cr[qind])\n\n circuits.append(circ)\n\n # schedule\n schedule_config = ScheduleConfig(inst_map, meas_map)\n rabi_sched = [schedule_circuit(qcirc,\n schedule_config)\n for qcirc in circuits]\n\n return rabi_sched, xdata", "def reduce_set(cells: int, blocks: [int], uvars: [int], nbase: int):\n combos = []\n\n if sum(blocks) + (len(blocks) - 1) > cells:\n raise Exception(\"The passed block values exceeded the number of cells\")\n\n ogcombo = []\n acc = 0\n for block in blocks:\n ogcombo.append(acc)\n acc += block + 1\n\n combos.append(ogcombo)\n\n ccombo = ogcombo.copy()\n\n lookat = len(blocks) - 1\n while lookat >= 0:\n if blocks[-1] + ccombo[-1] < cells:\n ccombo[lookat] = ccombo[lookat] + 1\n s = ccombo[lookat] + blocks[lookat] + 1\n for i in range(lookat + 1, len(blocks)):\n ccombo[i] = s\n s += blocks[i] + 1\n lookat = len(blocks) - 1\n combos.append(ccombo.copy())\n else:\n lookat -= 1\n s = ccombo[lookat] + blocks[lookat] + 1\n for i in range(lookat + 1, len(blocks)):\n ccombo[i] = s\n s += blocks[i] + 1\n\n cnf = CNF()\n for combo in combos:\n clause = [-v if in_combo(i, combo, blocks) else v for i, v in zip(range(cells), uvars)]\n cnf.append(clause)\n\n return cnf.negate(nbase)", "def solve(self, state, times):", "def biz(cps):\n # Head of unit\n mask = cps['tc5_p'] > 0\n cps_valid = cps[mask]\n rand = np.random.uniform(size=len(cps_valid))\n new_vals = np.exp(10.4 + 1. * rand)\n new_vals = np.where(new_vals < 50000., 50000., new_vals)\n cps.loc[mask, 'bilp'] = new_vals\n # spouse of unit\n mask = cps['tc5_s'] > 0\n cps_valid = cps[mask]\n rand = np.random.uniform(size=len(cps_valid))\n new_vals = np.exp(10.4 + 1. * rand)\n new_vals = np.where(new_vals < 50000., 50000., new_vals)\n cps.loc[mask, 'bils'] = new_vals", "def bruno_mes(self):\n MI = -125\n MA = 125\n INCR = 19\n####################################################################\n ### CHANNEL 1\n self.write('CHN 1')\n self.write('CHN?')\n print 'Acting on channel:',self.read()\n self.write('WAVE ARB')\n self.write('ARBLOAD ARB1')\n self.write('FREQ 100')\n self.write('DCOFFS 0.05')\n self.write('AMPL 0.1')\n \n l =(125,-125,125)#arange(MI,MA,INCR) # the ramp\n# lll = copy(l)[::-1][1:-1]\n# l = concatenate((l,lll))\n self.write_array_to_byte(l,1)", "def bifurcation_diagram(args, Bpbmin, Bpbmax, ylim=(-1, 0.6)):\n\n xs = []\n Bpb_list = np.linspace(Bpbmin, Bpbmax, 100)\n Iext, G, Ein, Eex, eps, a, b, A, Bpb, Bbp, vsl = args\n\n sol, t = calcODE(args, -1.5, -1.5, 0.5, 0.5, 0.5, 0.5, ts=4000, nt=2 ** 25)\n sol = sol[-len(sol) // 2:, :]\n t = t[-len(t) // 2:]\n\n x0 = sol[0, :]\n n = np.array(ode(x0, t[0], *args))\n q, _ = np.linalg.qr(n[:, None], mode='complete')\n\n periods = []\n for Bpb in Bpb_list:\n args = (Iext, G, Ein, Eex, eps, a, b, A, Bpb, Bbp, vsl)\n sol, t = calcODE(args, *sol[-1, :], ts=1000, nt=2 ** 15)\n sol = sol[-len(sol) // 2:, :]\n t = t[-len(t) // 2:]\n\n for i in range(len(sol) - 1):\n x1 = sol[i]\n x2 = sol[i + 1]\n if np.sign(n @ (x2 - x0)) != np.sign(n @ (x1 - x0)):\n c1 = dist(x1, x0, n)\n c2 = dist(x2, x0, n)\n alpha = c2 / (c1 + c2)\n x_new = x1 + alpha * (x2 - x1)\n x = (x_new - x0).dot(q)\n xs.append((Bpb, x[0], x[1], x[2], x[3], x[4], x[5]))\n # if np.linalg.norm(x_new - x0) < 1e-2 and period is None:\n period = t[i] - periods[-1][-1] if len(periods) else 0\n periods.append((Bpb, period, np.linalg.norm(x_new - x0), t[i]))\n\n plt.figure(figsize=(15, 10))\n plt.scatter([i[0] for i in xs], [i[2] for i in xs], s=10)\n plt.xlabel('$B_{pb}$')\n\n # plt.ylim(ylim)\n plt.show()\n\n periods = [i for i in periods if i[1] > 0]\n\n return periods, xs", "def preCondConjugateGradientSolver(b, x, linsys_setup, eps, i_max, plotInterval, mapDir):\n datamaps, ninvs, beams, freqs, power_2d, precond_2d, clumaps, g_nu, \\\n map_prop = linsys_setup\n nx, ny, pixScaleX, pixScaleY = map_prop\n nCluster = len(clumaps[0])\n ksz = False\n if len(clumaps)==2: ksz=True\n \n \n # Calculate residual r = b - (A^-1) x\n r = b - applyMat(x, linsys_setup)\n d = r\n\n\n delta_new = numpy.inner(r,r)\n \n\n\n\n delta_o = delta_new\n delta_array = numpy.zeros(shape=(i_max))\n \n # Iterate CG solver until converged\n i = 0\n #i_max = 300\n while (i < i_max) and (delta_new > delta_o*eps**2.):\n if i==0: t = time.time()\n \n if i%plotInterval == 0 and i != 0:\n print \"\\tNumber of iterations in the CG:\", i\n x0 = x[:nx*ny] # CMB\n x1 = x[nx*ny:nx*ny+1] # Monopole\n x2 = x[nx*ny+1:nx*ny+1+nCluster] # TSZ\n if ksz: x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n print \"\\tMonopole:\", x1\n print \"\\tTSZ:\", x2\n if ksz: print \"\\tKSZ:\", x3\n \n x0.shape = (ny,nx)\n a_l = numpy.fft.fft2(x0)\n a_l *= precond_2d\n x_test = numpy.real(numpy.fft.ifft2(a_l))\n plot(x_test,mapDir+'/CMB_%d.png'%i,'Reconstructed CMB', range=(-250., 250.))\n print delta_new, delta_o*eps**2.\n\n q = applyMat(d, linsys_setup)\n alpha = delta_new / (numpy.inner(d,q))\n x += alpha * d\n\n # What does this do? It's always false.\n if i/50. < numpy.int(i/50):\n r = b - applyMat(x, linsys_setup)\n else:\n r = r - alpha*q\n \n delta_old = delta_new\n delta_new = numpy.inner(r,r)\n beta = delta_new/delta_old\n d = r + beta * d\n #if i==0: print \"\\tEach iteration takes:\", time.time()-t\n i += 1\n\n x0 = x[:nx*ny].reshape((ny, nx))\n x1 = x[nx*ny:nx*ny+1]\n x2 = x[nx*ny+1:nx*ny+1+nCluster]\n if ksz:\n x3 = x[nx*ny+1+nCluster:nx*ny+1+2*nCluster]\n else:\n x3 = None\n \n a_l = numpy.fft.fft2(x0) * precond_2d\n x0 = numpy.real(numpy.fft.ifft2(a_l))\n\n \n # CMB, monopole, TSZ, KSZ\n return x0, x1, x2, x3", "def bake_multiplier(multiplier, sr, duration):\n signal = 1\n for mult in multiplier:\n if mult[\"type\"] == \"fixed\":\n sig = mult[\"value\"]\n elif mult[\"type\"] == \"dynamic\":\n sig = bake_signal(mult[\"value\"], sr, duration)\n \n #if there is no signal yet, assign the first one\n if signal is 1:\n signal = sig\n #if there already is one, combine them with a multiplication\n else:\n signal *= sig\n return signal", "def reduce_bis(equation:sp.Eq):\n\n assert isinstance(equation,sp.Eq)\n symbols = equation.lhs.free_symbols | equation.rhs.free_symbols\n subs = []\n for symbol in symbols:\n if isinstance(symbol,Bis):\n subs.append((symbol,sp.solve(symbol.bis_eq,symbol)[0]))\n\n reduced = equation.subs(subs)\n return reduced", "def _decomposition_with_many_workers(control_wires, target_wire, work_wires):\n num_work_wires_needed = len(control_wires) - 2\n work_wires = work_wires[:num_work_wires_needed]\n\n work_wires_reversed = list(reversed(work_wires))\n control_wires_reversed = list(reversed(control_wires))\n\n gates = []\n\n for i in range(len(work_wires)):\n ctrl1 = control_wires_reversed[i]\n ctrl2 = work_wires_reversed[i]\n t = target_wire if i == 0 else work_wires_reversed[i - 1]\n gates.append(qml.Toffoli(wires=[ctrl1, ctrl2, t]))\n\n gates.append(qml.Toffoli(wires=[*control_wires[:2], work_wires[0]]))\n\n for i in reversed(range(len(work_wires))):\n ctrl1 = control_wires_reversed[i]\n ctrl2 = work_wires_reversed[i]\n t = target_wire if i == 0 else work_wires_reversed[i - 1]\n gates.append(qml.Toffoli(wires=[ctrl1, ctrl2, t]))\n\n for i in range(len(work_wires) - 1):\n ctrl1 = control_wires_reversed[i + 1]\n ctrl2 = work_wires_reversed[i + 1]\n t = work_wires_reversed[i]\n gates.append(qml.Toffoli(wires=[ctrl1, ctrl2, t]))\n\n gates.append(qml.Toffoli(wires=[*control_wires[:2], work_wires[0]]))\n\n for i in reversed(range(len(work_wires) - 1)):\n ctrl1 = control_wires_reversed[i + 1]\n ctrl2 = work_wires_reversed[i + 1]\n t = work_wires_reversed[i]\n gates.append(qml.Toffoli(wires=[ctrl1, ctrl2, t]))\n\n return gates", "def solve_canonical_impl(basis, c, A, b):\n (m, n) = A.shape\n Q = np.row_stack(\n (\n np.hstack(([0], -c)),\n np.column_stack((b, A)),\n )\n )\n gauss_elimination(Q, basis)\n\n while True:\n # choose 's' and 'r' according to the Bland's rule\n ss = (j for j in range(1, n + 1) if Q[0][j] < 0)\n s = min(ss, default=None)\n if s is None:\n return basis, Q\n\n rs = [i for i in range(1, m + 1) if Q[i][s] > 0] # and Q[0][s] / Q[i][s] > 0\n r = min(rs, key=lambda i: (abs(Q[0][s] / Q[i][s]), basis[i - 1]), default=None)\n if r is None:\n raise UnboundFunction\n\n Q[r] /= Q[r][s]\n for i in range(m + 1):\n if i != r:\n Q[i] -= Q[r] * Q[i][s]\n\n basis[r - 1] = s", "def SecondaryComplex_to_Bid():\n Parameter('RIP3_0' , 2.0e4) # molecules per cell\n Parameter('BidK_0' , 5.0e3) # molecules per cell\n \n alias_model_components()\n Initial(RIP3(bRHIM = None, state = 'unmod'), RIP3_0) # RIP3\n Initial(BidK(bf = None), BidK_0) \n # ==============================================================\n # Assembly of Complex II, Riptosome and Necrosome\n # --------------------------------------------------------------\n # FADD + TRADD[active] <-> FADD:TRADD[active]\n # FADD + RIP1 <-> FADD:RIP1\n # TRADD + RIP1 <-> TRADD:RIP1\n\n # CD95_to_secondary complex contains the rules for recruitment of proC8 to FADD.\n # (RIP1 or TRADD):FADD + proC8 <-> (RIP1 or TRADD):FADD:proC8\n # (RIP1 or TRADD):FADD:proC8 + proC8 <-> (RIP1 or TRADD):FADD:proC8:proC8\n # (RIP1 or TRADD):FADD:proC8 + flip_L <-> (RIP1 or TRADD):FADD:proC8:flip_L\n # (RIP1 or TRADD):FADD:proC8 + flip_S <-> (RIP1 or TRADD):proC8:flip_S\n \n # RIP1%ProC8%ProC8(in a complex) >> RIP1[trunc] + C8 + (remains of the complex)\n # RIP1%ProC8%cFlip[L](in a complex) >> RIP1[trunc] + remains of the complex)\n # RIP1%cFlip[S](in a complex) + RIP3 >> RIP1:RIP3(in a complex, i.e. necrosome)\n\n # RIP1 + C8 <-> RIP1:C8 >> RIP1[trunc] + C8\n # RIP3 + C8 <-> RIP3:C8 >> RIP3[trunc] + C8\n # Bid + C8 <-> Bid:C8 >> Bid[trunc] + C8\n \n # -------------Assembling Complex II-----------------\n Parameter('Ka_RIP1_FADD', 1e-7) # Biochemica et Biophysica Acta 1834(2013) 292-300\n Parameter('Kd_RIP1_FADD', 1e-8) # Biochemica et Biophysica Acta 1834(2013) 292-300\n alias_model_components()\n \n bind(FADD(bDD = None, bDED1 = None, bDED2 = None), 'bDD', TRADD(bDD1=None, state = 'active'), 'bDD1', [1e-6, 1e-3])\n bind(FADD(bDD = None), 'bDD', RIP1(bDD=None, bRHIM = None, state = 'unmod'), 'bDD', [Ka_RIP1_FADD, Kd_RIP1_FADD])\n bind(TRADD(bDD2 = None, state = 'active'),'bDD2', RIP1(bDD = None, bRHIM = None, state = 'unmod'), 'bDD', [1e-6, 1e-3])\n # For simplicity, I am neglecting the binary intereaction that occurs between proC8 and RIP1.\n # Binding of proC8 and c-flip to FADD is accomplished in CD95_to_Secondary complex. \n\n #--------------RIP1 Truncation reactions-------------\n #---Truncation by C8---------------------------------\n RIP_CIIA_proC8 = RIP1(bDD=ANY, bRHIM = None, state = 'unmod')% TRADD(bDD2 = None, bDD1 = ANY, state = 'active') % FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n RIP_CIIB_proC8 = RIP1(bDD=ANY, bRHIM = None, state = 'unmod')% FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n CIIA = TRADD(bDD2 = None, bDD1 = ANY, state = 'active') % FADD(bDD=ANY, bDED1=None, bDED2=None)\n \n Rule('RIP1_truncation_CIIA', RIP_CIIA_proC8 >> CIIA + C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k11',1e-1))\n Rule('RIP1_truncation_CIIB', RIP_CIIB_proC8 >> FADD(bDD=None, bDED1=None, bDED2=None)+ C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k12', 1e-1))\n catalyze_state(C8(bf = None, state = 'A'), 'bf', RIP1(bDD=None), 'bRHIM', 'state', 'unmod', 'trunc', [1e-6, 1e-3, 1e-1])\n\n #---Truncation by proC8:cFlip_L---------------------\n Riptosome_FADD = RIP1(bDD=1, bRHIM = None, state = 'unmod')%FADD(bDD=1, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n Riptosome_TRADD = RIP1(bDD=1, bRHIM = None, state = 'unmod')%TRADD(bDD1=ANY, bDD2=1)%FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n\n Rule('RIP1_truncation_FADD', Riptosome_FADD >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k13', 1e-1))\n Rule('RIP1_truncation_TRADD', Riptosome_TRADD >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k14', 1e-1))\n \n # -------------RIP3 Binding Interactions----------------\n Ripto1_Flip_S = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='unmod') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Ripto2_Flip_S = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='unmod') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Necrosome1 = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=6, state='unmod') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 6, state = 'unmod')\n Necrosome2 = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=5, state='unmod') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 5, state = 'unmod')\n\n Rule('RIP3_binding1', Ripto1_Flip_S + RIP3(bRHIM= None, state = 'unmod') <> Necrosome1, Parameter('k15', 1e-6), Parameter('k16', 1e-3))\n Rule('RIP3_binding2', Ripto2_Flip_S + RIP3(bRHIM= None, state = 'unmod') <> Necrosome2, Parameter('k17', 1e-6), Parameter('k18', 1e-3))\n \n #RIP3 Truncation\n catalyze_state(C8(bf = None, state = 'A'), 'bf', RIP3(), 'bRHIM', 'state', 'unmod', 'trunc', [1e-6, 1e-3, 1e-1])\n\n #-------------Bid Interactions--------------------------\n # Bid Phosphorylation and Truncation\n catalyze_state(BidK(), 'bf', Bid(), 'bf', 'state', 'U', 'po4', [1e-6, 1e-3, 1e-1])\n catalyze_state(C8(bf = None, state = 'A'), 'bf', Bid(), 'bf', 'state', 'U', 'T', [1.04e-5, 0.005, 0.1])\n\n # Bid-PO4 competing with RIP1 for binding to Complex II\n bind(TRADD(bDD2 = None, state = 'active'),'bDD2', Bid(bf = None, state = 'po4'), 'bf', [1e-6, 1e-3])\n # Bid-PO4 sequestering RIP1\n bind(RIP1(bDD = None, bRHIM = None, state = 'unmod'), 'bRHIM', Bid(bf = None, state = 'po4'), 'bf', [1e-6, 1e-3])", "def inst_bp(instrument,array=\"2\"):\n\n if instrument == \"MUSTANG2\" or instrument == \"MUSTANG\":\n srms = (300*u.um).to(\"m\") # surface RMS (microns)\n ### Reference: https://science.nrao.edu/facilities/gbt/proposing/GBTpg.pdf\n EA90 = 0.36 # Aperture efficiency at 90 GHz\n ### The beam efficiencies should be taken as 1.37* Aperture Efficiency\n R90 = np.exp(-4.0*np.pi*(srms/(const.c/(9.0e10*u.s**-1))).value) #\n Gnot = EA90/R90 # Unphysical, but see documentation...\n if instrument == \"MUSTANG2\":\n flow = 75.0 # GHz\n fhig = 105.0 # GHz\n else:\n flow = 82.5 # GHz\n fhig = 97.5 # GHz\n \n farr = np.arange(flow,fhig,1.0) # frequency array.\n tran = farr*0.0 + 1.0 # Let the transmission be unity everywhere.\n Larr = const.c.value/(farr*1.0e9) # Keep calm and carry on.\n ### Old formula:\n #Ruze = Gnot * np.exp(-4.0*np.pi*(srms.value)/Larr)\n ### Correct formula: (10 April 2018)\n Ruze = Gnot * np.exp(-(4.0*np.pi*srms.value/Larr)**2)\n NRuz = Ruze / np.max(Ruze) # Normalize it\n band = tran * Ruze # Bandpass, with (unnormalized) Ruze efficiency\n \n if instrument == \"NIKA2\" or instrument == \"NIKA\":\n caldir='/home/romero/NIKA2/NIKA_SVN/Processing/Pipeline/Calibration/BP/'\n bpfile=caldir+'Transmission_2017_Jan_NIKA2_v1.fits'\n hdulist = fits.open(bpfile)\n\n if array == \"1H\": # 1mm (260 GHz) array, Horizontal Polarization\n tbdata = hdulist[1].data # 1H\n freq = tbdata.field(0)\n tran = tbdata.field(1)\n erro = tbdata.field(2)\n atmt = tbdata.field(3)\n cfreq1h = np.sum(freq*tran)/np.sum(tran)\n \n if array == \"1V\": # 1mm (260 GHz) array, Vertical Polarization\n tbdata = hdulist[2].data # 1V\n freq = tbdata.field(0)\n tran = tbdata.field(1)\n erro = tbdata.field(2)\n atmt = tbdata.field(3)\n cfreq1v = np.sum(freq*tran)/np.sum(tran)\n \n if array == \"2\": # 2mm (150 GHz) array\n tbdata = hdulist[3].data # 2\n freq = tbdata.field(0)\n tran = tbdata.field(1)\n erro = tbdata.field(2)\n atmt = tbdata.field(3)\n cfreq2 = np.sum(freq*tran)/np.sum(tran)\n\n ### Trim the zero-frequency listing, if any.\n gi=np.where(freq > 0)\n freq = freq[gi]\n tran = tran[gi]\n erro = erro[gi]\n atmt = atmt[gi]\n \n### Calculate Aperture efficiencies from information found at:\n### http://www.iram.es/IRAMES/mainwiki/Iram30mEfficiencies\n Beff = 0.630 # at 210 GHz\n Aeff = Beff/1.27 # See text on webpage\n srms = (66.0*u.um).to(\"m\") # surface RMS (microns)\n R210 = np.exp(-4.0*np.pi*(srms/(const.c/(2.1e11*u.s**-1))).value) #\n Gnot = Aeff/R210 # Unphysical, but see documentation...\n\n Larr = const.c.value/(freq*1.0e9) # Keep calm and carry on. \n Ruze = Gnot * np.exp(-4.0*np.pi*(srms.value)/Larr)\n NRuz = Ruze / np.max(Ruze) # Normalize it\n band = tran * Ruze # Bandpass, with (unnormalized) Ruze efficiency\n farr = freq\n \n#########################################################################\n\n if instrument == 'ACT90':\n srms = (27.0*u.um).to(\"m\") # surface RMS (microns)\n EA90 = 0.95 # I'm making this number up...\n R90 = np.exp(-4.0*np.pi*(srms/(const.c/(9.0e10*u.s**-1))).value) #\n Gnot = EA90/R90 # Unphysical, but see documentation...\n flow = 65.0 # GHz\n fhig = 125.0 # GHz\n farr = np.arange(flow,fhig,1.0) # frequency array.\n freq_ref = 90.0 # I took EA90 to be a fictitious aperature efficiency at 90 GHz\n band = ruze_eff(farr,freq_ref,EA90,srms)\n\n if instrument == 'ACT150':\n srms = (27.0*u.um).to(\"m\") # surface RMS (microns)\n EA90 = 0.95 # I'm making this number up...\n R90 = np.exp(-4.0*np.pi*(srms/(const.c/(9.0e10*u.s**-1))).value) #\n Gnot = EA90/R90 # Unphysical, but see documentation...\n flow = 120.0 # GHz\n fhig = 180.0 # GHz\n farr = np.arange(flow,fhig,1.0) # frequency array.\n freq_ref = 90.0 # I took EA90 to be a fictitious aperature efficiency at 90 GHz\n band = ruze_eff(farr,freq_ref,EA90,srms)\n\n\n return band, farr", "def circuitSat(C):", "def chao1_var_bias_corrected(singles, doubles):\n s, d = float(singles), float(doubles)\n return s*(s-1)/(2*(d+1)) + (s*(2*s-1)**2)/(4*(d+1)**2) + \\\n (s**2 * d * (s-1)**2)/(4*(d+1)**4)", "def transform_t1_to_bo(t1, umat):\n if isinstance(t1, np.ndarray) and t1.ndim == 2:\n nocc, nvir = t1.shape\n umat_occ = umat[:nocc, :nocc]\n umat_vir = umat[nocc:, nocc:]\n return reduce(np.dot, (umat_occ.conj().T, t1, umat_vir))\n else: # UHF\n spin = len(t1)\n return [transform_t1_to_bo(t1[s], umat[s]) for s in range(spin)]", "def butter_bp_coe(lowcut, highcut, fs, order=1):\n nyq = 0.5 * fs\n low = lowcut / nyq\n high = highcut / nyq\n b, a = butter(order, [low, high], btype='band')\n return b, a", "def SecondaryComplex_to_Bid():\n Parameter('RIP3_0' , 2.0e4) # molecules per cell\n Parameter('Bid_0' , 2.0e4) # molecules per cell\n Parameter('BidK_0' , 5.0e3) # molecules per cell\n \n Initial(RIP3(bRHIM = None, state = 'unmod'), RIP3_0) # RIP3\n Initial(Bid(bf = None, state = 'unmod'), Bid_0) # Bid\n Initial(BidK(bf = None), BidK_0)\n # ==============================================================\n # Assembly of Complex II, Riptosome and Necrosome\n # --------------------------------------------------------------\n # FADD + TRADD[active] <-> FADD:TRADD[active]\n # FADD + RIP1 <-> FADD:RIP1\n # TRADD + RIP1 <-> TRADD:RIP1\n \n # CD95_to_secondary complex contains the rules for recruitment of proC8 to FADD.\n # (RIP1 or TRADD):FADD + proC8 <-> (RIP1 or TRADD):FADD:proC8\n # (RIP1 or TRADD):FADD:proC8 + proC8 <-> (RIP1 or TRADD):FADD:proC8:proC8\n # (RIP1 or TRADD):FADD:proC8 + flip_L <-> (RIP1 or TRADD):FADD:proC8:flip_L\n # (RIP1 or TRADD):FADD:proC8 + flip_S <-> (RIP1 or TRADD):proC8:flip_S\n \n # RIP1%ProC8%ProC8(in a complex) >> RIP1[trunc] + C8 + (remains of the complex)\n # RIP1%ProC8%cFlip[L](in a complex) >> RIP1[trunc] + remains of the complex)\n # RIP1%cFlip[S](in a complex) + RIP3 >> RIP1:RIP3(in a complex, i.e. necrosome)\n \n # RIP1 + C8 <-> RIP1:C8 >> RIP1[trunc] + C8\n # RIP3 + C8 <-> RIP3:C8 >> RIP3[trunc] + C8\n # Bid + C8 <-> Bid:C8 >> Bid[trunc] + C8\n \n # -------------Assembling Complex II-----------------\n bind(FADD(bDD = None, bDED1 = None, bDED2 = None), 'bDD', TRADD(bDD1=None, state = 'active'), 'bDD1', [KF, KR])\n bind(FADD(bDD = None, bDED1 = None, bDED2 = None), 'bDD', RIP1(bDD=None, bRHIM = None, state = 'unmod'), 'bDD', [Ka_RIP1_FADD, Kd_RIP1_FADD])\n bind(TRADD(bDD2 = None, state = 'active'),'bDD2', RIP1(bDD = None, bRHIM = None, state = 'unmod'), 'bDD', [KF, KR])\n # For simplicity, I am neglecting the binary intereaction that occurs between proC8 and RIP1.\n # Binding of proC8 and c-flip to FADD is accomplished in CD95_to_Secondary complex.", "def PGD(Params, relaxationVars, fixedBs, fixedTs, data):\n Tol = Params[\"tol\"]\n TolCD = Params[\"tolCD\"]\n Lambda0 = Params[\"Lambda\"]\n Lambda1 = Params[\"alpha\"] * Lambda0\n M = Params[\"M\"]\n y = data.ycentered # data.y - data.ybar\n\n Bindices = relaxationVars.BActive.copy() # list\n Tindices = relaxationVars.TActive.copy() # list of tuples (i,j)\n currentB, currentT = relaxationVars.initialSol.ToArray(Bindices, Tindices)\n fixedB = fixedBs.copy() # Dict. key = index, value = 0 or 1 (no index if not fixed)\n fixedT = fixedTs.copy() # Dict. key = (i,j), value = 0 or 1 (no index if not fixed)\n DualInitial = relaxationVars.useDual\n\n # Store the index mappings\n Bmap = {} # Bmap[i] = index of i in currentB or XB\n for i in range(len(Bindices)):\n Bmap[Bindices[i]] = i\n\n Tmap = {} # Tmap[(i,j)] = index of interaction in XT and currentT\n for i in range(len(Tindices)):\n c1, c2 = Tindices[i]\n Tmap[(c1, c2)] = i\n Tmap[(c2, c1)] = i\n\n # Next: Some sanity checks (those can be removed if we're carful about the\n # inputs)\n\n # Make sure if B_i is fixed to 0 then all T_{ij}'s (in Tindices) are also\n # fixed to zero\n for i, val in fixedB.items():\n if val == 0:\n for l, j in Tmap:\n if l < j and (l == i or j == i):\n fixedT[(l, j)] = 0\n\n # Make sure if T_{ij} is fixed to 1 then both B_i and B_j are fixed to 1\n for key, val in fixedT.items():\n if val == 1:\n i, j = key\n fixedB[i] = 1\n fixedB[j] = 1\n\n # Delete from Bindices and Tindices all the indices s.t. z_i = 0 / z_{ij}\n # = 0\n Bzeros = []\n for i, val in fixedB.items():\n if val == 0:\n Bzeros.append(Bmap[i])\n for i in sorted(Bzeros, reverse=True):\n del Bindices[i]\n currentB = np.delete(currentB, Bzeros)\n\n Tzeros = []\n for key, val in fixedT.items():\n if val == 0:\n Tzeros.append(Tmap[key])\n for i in sorted(Tzeros, reverse=True):\n del Tindices[i]\n currentT = np.delete(currentT, Tzeros)\n\n # Update the index mappings\n Bmap = {} # Bmap[i] = index of i in currentB or XB\n for i in range(len(Bindices)):\n Bmap[Bindices[i]] = i\n\n Tmap = {} # Tmap[(i,j)] = index of interaction in XT and currentT\n for i in range(len(Tindices)):\n c1, c2 = Tindices[i]\n Tmap[(c1, c2)] = i\n Tmap[(c2, c1)] = i\n\n # End of sanity checks\n\n # Retrive the matrices of the optimization variables\n # Later: We can store the centered columns (but this will require twice\n # the memory)\n XB, XT = data.Retrieve(Bindices, Tindices)\n XBMean = XB.mean(axis=0)\n XB = XB - XBMean\n XTMean = XT.mean(axis=0)\n XT = XT - XTMean\n\n Bfree = [i for i in Bindices if i not in fixedB]\n Tfree = [(i, j) for i, j in Tmap if i < j and (i, j) not in fixedT]\n TfreeIndices = [Tmap[(i, j)]\n for i, j in Tmap if i < j and (i, j) not in fixedT]\n lenFixedB = len(Bindices) - len(Bfree)\n lenFixedT = len([key for key in fixedT if fixedT[key] == 1])\n\n # (Dual) Block CD Variables\n u = defaultdict(float)\n w = defaultdict(dict)\n if not DualInitial:\n for i in Bindices:\n u[i] = 0\n for pair in Tmap:\n i, j = pair\n w[i][j] = 0\n else:\n for i in Bindices:\n if i in relaxationVars.u and i not in fixedB:\n u[i] = relaxationVars.u[i]\n else:\n u[i] = 0\n for i, j in Tmap:\n if j in relaxationVars.w[i] and (min(i, j), max(\n i, j)) not in fixedT and i not in fixedB and j not in fixedB:\n w[i][j] = relaxationVars.w[i][j]\n else:\n # Important: we need w[i][j] = 0 if T_{ij} if fixed (this is\n # due to the thresholding function)\n w[i][j] = 0\n\n sortedIndices = {i: sorted(w[i]) for i in w}\n sortedIndices = defaultdict(list, sortedIndices)\n\n # Prepare all the fixed matrices/vectors required for grad evaluation\n # later.\n XBty = np.dot(XB.T, y)\n XBtXB = np.dot(XB.T, XB)\n XTty = np.dot(XT.T, y)\n XTtXT = np.dot(XT.T, XT)\n XBtXT = np.dot(XB.T, XT)\n\n # Compute the lipschitz constant of the grad.\n Xfull = np.hstack((XB, XT))\n if Xfull.shape[1] != 0:\n eigvals, v = np.linalg.eig(np.dot(Xfull.T, Xfull))\n L = np.max(np.real(eigvals))\n else:\n L = 1 # any value here should suffice - it's not used.\n\n # Compute the lipschitz constants for BCD.\n LCD = {}\n for i in Bindices:\n LCD[i] = (len(w[i]) + 1) * ((Lambda0**2) / (L * M**2))\n\n # Define the thresholding constants\n frac = Lambda0 / (M * L)\n Mpfrac = M + frac\n frac1 = Lambda1 / (M * L)\n Mpfrac1 = M + frac1\n fracsqL = frac * frac * L\n LambdaovM = Lambda0 / M\n Lambda1ovM = Lambda1 / M\n Lambda1ovLambda0 = Lambda1 / Lambda0\n\n start = time.time()\n\n oldObj = math.inf\n for it in range(5000):\n grad_B = - XBty + np.dot(XBtXB, currentB) + np.dot(XBtXT, currentT)\n grad_T = - XTty + np.dot(XTtXT, currentT) + np.dot(XBtXT.T, currentB)\n Bstar = currentB - grad_B / L\n Tstar = currentT - grad_T / L\n # Iterate over the blocks, running dual BCD.\n # We employ dual warm starts by using the same (u,w) across the PGD updates.\n CDPrevObj = -math.inf\n LCDCurrent = copy(LCD)\n useZeroSuffCondition = True\n if useZeroSuffCondition:\n # Perform proximal screening below.\n zeroGroups = set()\n for i in Bfree:\n zeroSufficient = False\n cumsum = 0\n for j in w[i]:\n thrshld = max(\n (abs(Tstar[Tmap[(i, j)]]) / frac - Lambda1ovLambda0), 0)\n # Do feature level screening below.\n if thrshld == 0:\n # The initialization below ensures that \\theta_{ij} is\n # never updated by BCA.\n w[i][j] = 0\n w[j][i] = 0\n else:\n cumsum += thrshld\n\n if cumsum <= 1 - abs(Bstar[Bmap[i]]) / frac:\n zeroSufficient = True\n if zeroSufficient:\n u[i] = Bstar[Bmap[i]] / frac\n for j in w[i]:\n if abs(Tstar[Tmap[(i, j)]]) > frac1:\n w[i][j] = Tstar[Tmap[(\n i, j)]] / frac - Lambda1ovLambda0 * np.sign(Tstar[Tmap[(i, j)]])\n else:\n w[i][j] = 0\n w[j][i] = 0\n # Not nec. but can improve speed.\n LCDCurrent[j] -= (Lambda0**2) / (L * M**2)\n zeroGroups.add(i)\n\n BfreeMinusZeroGroups = [i for i in Bfree if i not in zeroGroups]\n CDObjConst = 0\n '''\n for i in zeroGroups:\n CDObjConst += q(u[i], Bstar[Bmap[i]], M, Lambda0, L,frac)\n for j in w[i]:\n if i < j:\n # T(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n CDObjConst += T(w[i][j], w[j][i], Tstar[Tmap[(i,j)]], M, Lambda0, L,frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n '''\n ####\n else:\n zeroGroups = set()\n CDObjConst = 0\n BfreeMinusZeroGroups = Bfree\n # To Turn the part above off, comment it out and set the following:\n # zeroGroups = set()\n # CDObjConst = 0\n # BfreeMinusZeroGroups = Bfree\n\n for innerit in range(10000):\n # for i in Bfree:\n for i in BfreeMinusZeroGroups:\n # First, Calculate utilde and wtilde for ith block\n utilde = u[i] + delq(u[i],\n Bstar[Bmap[i]],\n M,\n Lambda0,\n L,\n frac,\n Mpfrac,\n fracsqL,\n LambdaovM) / LCDCurrent[i]\n\n #wtilde = {}\n # for j in w[i]:\n # if B_j is fixed to 1, then we already set w[j][i] = 0\n # wtilde[j] = w[i][j] + delT(w[i][j], w[j][i], Tstar[Tmap[(i,j)]], M, Lambda0, L,frac, Mpfrac, fracsqL, LambdaovM)/LCD[i]\n sortedIndicesi = sortedIndices[i]\n # delT(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM)\n wtilde = [w[i][j] + delT(w[i][j],\n w[j][i],\n Tstar[Tmap[(i,\n j)]],\n M,\n Lambda0,\n L,\n frac,\n frac1,\n Mpfrac1,\n LambdaovM) / LCDCurrent[i] for j in sortedIndicesi]\n\n x = np.empty(shape=len(wtilde) + 1)\n # Solve the l1 projection problem.\n x[0] = utilde\n x[1:] = np.array(wtilde)\n projection = project(x)\n # Update the solution.\n u[i] = projection[0]\n # for j in range(len(w[i])):\n # w[i][sortedIndicesi[j]] = projection[j+1] ## +1 since u[i] is\n # first\n for counter, j in enumerate(sortedIndicesi):\n w[i][j] = projection[counter + 1]\n # Calculate the current objective\n CDObj = CDObjConst # 0\n for i in BfreeMinusZeroGroups: # Bfree:\n CDObj += q(u[i], Bstar[Bmap[i]], M, Lambda0, L, frac)\n for j in w[i]:\n if i < j:\n # T(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n CDObj += T(w[i][j], w[j][i], Tstar[Tmap[(i, j)]], M,\n Lambda0, L, frac, frac1, Mpfrac1, LambdaovM, Lambda1ovM)\n #Params[\"print\"](\"Inner obj: \", CDObj)\n if terminate(CDPrevObj, CDObj, TolCD):\n break\n CDPrevObj = CDObj\n\n # Get back the primal solution.\n for i in range(len(Bindices)):\n # if Bindices[i] is fixed to 1, then u[Bindices[i]] = 0 and the\n # update below will lead to currentB[i] = Bstar[i] (or +- M)\n if Bindices[i] not in zeroGroups:\n # assuming Bindices is sorted\n currentB[i] = dualtoprimalu(\n u[Bindices[i]], Bstar[i], M, Lambda0, L, frac)\n else:\n currentB[i] = 0\n\n for i, j in Tmap:\n # if i or j is fixed, the corresponding w[i][j] will be zero, which\n # leads to the correct update.\n if i < j:\n if (i, j) in Tfree:\n # dualtoprimalw(wij, wji, thetaij, M, Lambda0, L, frac, frac1, Mpfrac1)\n if i in zeroGroups or j in zeroGroups:\n currentT[Tmap[(i, j)]] = 0\n else:\n currentT[Tmap[(i, j)]] = dualtoprimalw(\n w[i][j], w[j][i], Tstar[Tmap[(i, j)]], M, Lambda0, L, frac, frac1, Mpfrac1)\n else: # careful, this is the case when no thresholding should be applied\n coefficient = Tstar[Tmap[(i, j)]]\n if np.abs(coefficient) <= M:\n currentT[Tmap[(i, j)]] = coefficient\n else:\n currentT[Tmap[(i, j)]] = M * np.sign(coefficient)\n\n r = y - np.dot(XB, currentB) - np.dot(XT, currentT)\n\n maxterm = 0\n for i in range(len(currentB)):\n if Bindices[i] not in fixedB:\n maxtemp = np.abs(currentB[i])\n for j in w[Bindices[i]]:\n maxtemp = max(maxtemp, np.abs(\n currentT[Tmap[(Bindices[i], j)]]))\n maxterm += maxtemp\n l1norm = np.sum(np.abs(currentT[TfreeIndices]))\n # IMPORTANT: Avoid using lenFixed and lenFixedT here.....!!!!!! ####\n currentobjective = 0.5 * np.dot(r, r) + Lambda0 * (\n lenFixedB + lenFixedT) + (Lambda0 / M) * maxterm + (Lambda1 / M) * l1norm\n\n if currentobjective > oldObj:\n Params[\"print\"](\"Objective Increased!!!\")\n\n if terminate(oldObj, currentobjective, Tol):\n break\n\n oldObj = currentobjective\n Params[\"print\"](\"Iteration :\", it, \". Objective: \", currentobjective)\n\n end = time.time()\n Params[\"print\"](\"Time: \", end - start, \" seconds.\")\n\n # Check if any small values should be zero.\n # Start with more aggressive checks first.\n Trunc = False\n for epsilon in [0.01, 1e-3, 1e-4, 1e-5, 1e-6]:\n currentBtrunc = np.copy(currentB)\n currentTtrunc = np.copy(currentT)\n currentBSetToZero = np.nonzero(np.abs(currentB) < epsilon)[0]\n currentBtrunc[currentBSetToZero] = 0\n currentBSetToZeroPSet = set(currentBSetToZero)\n for (i, j) in Tmap:\n if Bmap[i] in currentBSetToZeroPSet or Bmap[j] in currentBSetToZeroPSet:\n currentTtrunc[Tmap[(i, j)]] = 0\n\n currentTtrunc[np.abs(currentT) < epsilon] = 0\n rtrunc = y - np.dot(XB, currentBtrunc) - np.dot(XT, currentTtrunc)\n maxterm = 0\n for i in range(len(currentBtrunc)):\n if Bindices[i] not in fixedB:\n maxtemp = np.abs(currentBtrunc[i])\n for j in w[Bindices[i]]:\n maxtemp = max(maxtemp, np.abs(\n currentTtrunc[Tmap[(Bindices[i], j)]]))\n maxterm += maxtemp\n l1norm = np.sum(np.abs(currentTtrunc[TfreeIndices]))\n objectivetrunc = 0.5 * np.dot(rtrunc, rtrunc) + Lambda0 * (\n lenFixedB + lenFixedT) + (Lambda0 / M) * maxterm + (Lambda1 / M) * l1norm\n\n Params[\"print\"](\n \"eps: \",\n epsilon,\n \" objectivetrunc: \",\n objectivetrunc,\n \" currentobjective: \",\n currentobjective)\n # 1.01 might be beneficial in some extreme cases where supp becomes\n # very large (but might also cause descent problems)\n if objectivetrunc <= currentobjective:\n '''\n currentB = currentBtrunc\n currentT = currentTtrunc\n r = rtrunc\n currentobjective = objectivetrunc\n '''\n Params[\"print\"](\"###CHANGE###\", \"eps: \", epsilon)\n Params[\"print\"](\"Final Objective :\", objectivetrunc)\n Trunc = True\n break\n\n integral = True\n\n for i in Bfree:\n zi = np.abs(currentB[Bmap[i]]) / M\n if zi > 0 and zi < 0.999:\n integral = False\n\n for i in TfreeIndices:\n zi = np.abs(currentT[i]) / M\n if zi > 0 and zi < 0.999:\n integral = False\n\n Bnnz = {key: currentB[Bmap[key]]\n for key in Bmap if currentB[Bmap[key]] != 0}\n Tnnz = {(i, j): currentT[Tmap[(i, j)]]\n for i, j in Tmap if i < j and currentT[Tmap[(i, j)]] != 0}\n intercept = data.ybar - np.dot(XBMean, currentB) - np.dot(XTMean, currentT)\n sol = Solution(Bnnz, Tnnz, intercept)\n\n if Trunc:\n BnnzTrunc = {key: currentBtrunc[Bmap[key]]\n for key in Bmap if currentBtrunc[Bmap[key]] != 0}\n TnnzTrunc = {(i, j): currentTtrunc[Tmap[(\n i, j)]] for i, j in Tmap if i < j and currentTtrunc[Tmap[(i, j)]] != 0}\n interceptTrunc = data.ybar - \\\n np.dot(XBMean, currentBtrunc) - np.dot(XTMean, currentTtrunc)\n solTrunc = Solution(BnnzTrunc, TnnzTrunc, interceptTrunc)\n else:\n BnnzTrunc = Bnnz\n TnnzTrunc = Tnnz\n interceptTrunc = intercept\n solTrunc = sol\n\n return (sol, solTrunc, currentobjective, integral, r, u, w)", "def TransformBase(base:int, number:list, digts:int) -> int :\n i = 0\n res = 0\n while ( i < digts):\n index = digts - i - 1\n number[index] = int(number[index]) * (base ** i) \n res += number[index]\n i += 1\n return res", "def conv(A, B, c, i, tipus):\r\n if tipus == 1:\r\n suma = [np.conj(A[k, i]) * B[c - k, i] for k in range(1, c + 1)]\r\n return sum(suma)\r\n elif tipus == 2:\r\n suma = [A[k, i] * B[c - 1 - k, i] for k in range(1, c)]\r\n return sum(suma)\r\n elif tipus == 3:\r\n suma = [A[k, i] * np.conj(B[c - k, i]) for k in range(1, c)]\r\n return sum(suma)", "def run_solution1(self):\n return reduce(lambda a, b: a + self.calculate_fuel(b), self.data, 0)", "def syn_bucb(num_workers, gp, acq_optimiser, anc_data):\n recommendations = [asy_ucb(gp, acq_optimiser, anc_data)]\n for _ in range(1, num_workers):\n recommendations.append(_halluc_ucb(gp, acq_optimiser, recommendations, anc_data))\n return recommendations", "def ot_ul2_reg_path(a: np.array, b: np.array, C: np.array, lambdamax=np.inf, savePi=False, itmax=50000, save_AT_length=False):\n\n n = np.shape(a)[0]\n m = np.shape(b)[0]\n ones_n = np.ones((n,))\n ones_m = np.ones((m,))\n\n n_iter = 0\n lambda_list = []\n Pi_list = []\n\n active_index_i = []\n active_index_j = []\n e = np.array([])\n c = np.array([])\n H_inv = np.array([[]])\n lam = 0\n\n active_set_length = []\n\n while n_iter < itmax:\n # deal with the first iteration\n # print('------------iteration ', n_iter, '--------------')\n # print('active set length:', len(active_index_i))\n active_set_length.append(len(active_index_i))\n if n_iter == 0:\n M = C/(a[:, None] + b[None, :])/2\n ik, jk = np.unravel_index(np.argmin(M), M.shape)\n lam = M[ik, jk]\n id_pop = -1\n delta = np.array([])\n pi_tilde = np.array([])\n else:\n # compute next lambda when a couple of index is added to the active set\n M = compute_lambda_a(active_index_i, active_index_j, pi_tilde, delta, C, a, b, lam, ones_m, ones_n)\n\n # compute the next lambda when a couple of index is removed from the active set\n alt_lam, id_pop = compute_lambda_r(delta, pi_tilde, lam)\n lam = np.min(M)\n\n if alt_lam < lam:\n lam = alt_lam\n else:\n ik, jk = np.unravel_index(np.argmin(M), M.shape)\n id_pop = -1\n\n if lambdamax == np.inf:\n # stop criteria on marginals\n if n_iter > 0:\n pi_vect = delta / lam + pi_tilde\n Pi = sp.coo_matrix((pi_vect, (active_index_i, active_index_j)), shape=(n, m))\n if np.linalg.norm(Pi.dot(ones_m)-a, ord=2) + np.linalg.norm(Pi.T.dot(ones_n)-b, ord=2) <1e-6:\n if savePi:\n Pi_list.append(Pi)\n lambda_list.append(lam)\n break\n else:\n # stop criteria on lambda\n if lam > lambdamax:\n pi_vect = delta / lambdamax + pi_tilde\n Pi= sp.coo_matrix((pi_vect, (active_index_i, active_index_j)), shape=(n, m))\n if savePi:\n Pi_list.append(Pi)\n lambda_list.append(lam)\n break\n\n # if the positivity constraint is not satisfied, remove index (i,j) from the current active set\n # otherwise add (ik,jk) found from M to active set\n if id_pop != -1:\n active_index_j.pop(id_pop)\n active_index_i.pop(id_pop)\n c = np.delete(c, id_pop, 0)\n e = np.delete(e, id_pop, 0)\n\n else:\n active_index_i.append(ik)\n active_index_j.append(jk)\n c = np.append(c, -C[ik, jk] / 2)\n e = np.append(e, a[ik] + b[jk])\n\n\n # compute H^-1 (Schur complement)\n H_inv = complement_schur(active_index_i, active_index_j, H_inv, id_pop)\n delta = H_inv @ c\n pi_tilde = H_inv @ e\n pi_vect = delta / lam + pi_tilde\n\n # Compute current transport plan Pi\n if savePi:\n Pi = sp.coo_matrix((pi_vect, (active_index_i, active_index_j)), shape=(n, m))\n Pi_list.append(Pi)\n\n lambda_list.append(lam)\n n_iter += 1\n\n if itmax <= n_iter:\n Pi = sp.coo_matrix((pi_vect, (active_index_i, active_index_j)), shape=(n, m))\n print('max iteration number reached')\n if savePi:\n if save_AT_length:\n return Pi_list[-1].toarray(), lam, Pi_list, np.array(lambda_list), n_iter, active_set_length\n else:\n return Pi_list[-1].toarray(), lam, Pi_list, np.array(lambda_list), n_iter\n else:\n if save_AT_length:\n return Pi.toarray(), lam, np.array(lambda_list), n_iter, active_set_length\n else:\n return Pi.toarray(), lam, np.array(lambda_list), n_iter", "def _apply_basis_state(self, state, wires):\n # translate to wire labels used by device\n device_wires = self.map_wires(wires)\n\n # length of basis state parameter\n n_basis_state = len(state)\n\n if not set(state.tolist()).issubset({0, 1}):\n raise ValueError(\"BasisState parameter must consist of 0 or 1 integers.\")\n\n if n_basis_state != len(device_wires):\n raise ValueError(\"BasisState parameter and wires must be of equal length.\")\n\n # get computational basis state number\n basis_states = 2 ** (self.num_wires - 1 - np.array(device_wires))\n basis_states = qml.math.convert_like(basis_states, state)\n num = int(qml.math.dot(state, basis_states))\n\n self._state = self._create_basis_state(num)", "def simulate_strategy_loop(\n num_buses,\n states,\n decisions,\n utilities,\n costs,\n ev,\n increments,\n num_states,\n start_period,\n num_periods,\n beta,\n unobs,\n):\n need_size = bool(False)\n period = int(0)\n for period in range(start_period, num_periods):\n for bus in range(num_buses):\n old_state = states[bus, period]\n if (-costs[old_state, 0] + unobs[bus, period, 0] + beta * ev[old_state]) > (\n -costs[0, 0] - costs[0, 1] + unobs[bus, period, 1] + beta * ev[0]\n ):\n decision = 0\n utility = -costs[old_state, 0] + unobs[bus, period, 0]\n new_state = old_state + increments[bus, period]\n else:\n decision = 1\n utility = -costs[0, 0] - costs[0, 1] + unobs[bus, period, 1]\n new_state = increments[bus, period]\n\n decisions[bus, period] = decision\n utilities[bus, period] = utility\n if period < num_periods - 1:\n if new_state > (num_states / 2):\n need_size = True\n states[bus, period + 1] = new_state\n if need_size:\n return states, decisions, utilities, period\n return states, decisions, utilities, period", "def reference_to_signal_partial_mapping(rb_map_string, reference_location, read_location, contig_name,\n ref_start, bas_start):\n\n a, b = basecall_to_reference_mapping(rb_map_string, ref_start, bas_start)\n f = h5py.File(read_location, 'r')\n grp = np.array(f.get('/Analyses/Basecall_1D_000/BaseCalled_template/Events'))\n bts = base_to_signal_mapping(grp)\n norm_sig = normalized_signal(grp)\n vectors_for_nn = np.array([], dtype=np.int64).reshape(0, cs.NN_VECTOR_LENGTH)\n\n for i in b:\n rs = i[0]\n re = i[1]\n bs = i[2]\n # R=B cast sekvencie\n ref = refrence_sequence_from_interval(reference_location, contig_name, rs, re)\n left_border = int(cs.LENGTH/2 - 2)\n right_border = int(cs.LENGTH/2 + 2)\n ref1 = np.concatenate(create_one_hot(ref))\n\n for x in range(0, len(ref)-cs.LENGTH, 5):\n start = bts[bs+x+left_border]\n end = bts[bs+x+right_border]\n number_of_signals = end - start + 1\n\n if number_of_signals < cs.SIGNAL_LENGTH:\n d = int((cs.SIGNAL_LENGTH - number_of_signals) / 2)\n signal_relevant_start = bs+x+left_border - d\n signal_relevant_end = bs+x + left_border + number_of_signals + d - 1 \\\n if number_of_signals + 2*d == cs.SIGNAL_LENGTH else \\\n bs + x + left_border + number_of_signals + d\n else:\n continue\n\n signal_relevant = []\n [signal_relevant.append(x) for x in norm_sig[signal_relevant_start:signal_relevant_end+1]]\n id_sig, std = ideal_signal_for_sequence(ref[x:x+cs.LENGTH])\n help_con = np.concatenate((ref1[4*x:4*(x+cs.LENGTH)], np.array(signal_relevant)), axis=0)\n help_con = np.concatenate((help_con, id_sig), axis=0)\n help_con = np.concatenate((help_con, [std]), axis=0)\n\n if len(help_con) != cs.NN_VECTOR_LENGTH:\n break\n vectors_for_nn = np.append(vectors_for_nn, help_con[None, :], axis=0)\n\n return vectors_for_nn", "def toa_incoming_shortwave_flux(srad0, srad0u):\n return srad0 - srad0u", "def bin_by_npixels(self, npix):\n\n disp = self.dispersion\n dbins = disp[1:] - disp[:-1]\n bin_boundary = disp[:-1] + 0.5 * dbins\n\n lbins = bin_boundary[:-1]\n rbins = bin_boundary[1:]\n mbins = disp[1:-1]\n dbins = rbins - lbins\n flux = self.flux[1:-1]\n flux_err = self.flux_err[1:-1]\n num_bins = len(mbins)\n\n num_new_bins = int((num_bins - (num_bins % npix)) / npix)\n\n new_wave = np.zeros(num_new_bins)\n new_flux = np.zeros(num_new_bins)\n new_flux_err = np.zeros(num_new_bins)\n\n for idx in range(num_new_bins):\n\n _new_flux = 0\n _new_flux_err = 0\n _new_dbin = 0\n\n for jdx in range(npix):\n _new_flux += flux[idx * npix + jdx] * dbins[idx * npix + jdx]\n _new_dbin += dbins[idx * npix + jdx]\n _new_flux_err += (flux_err[idx * npix + jdx] * dbins[\n idx * npix + jdx]) ** 2\n\n rbin = rbins[npix * idx + npix - 1]\n lbin = lbins[npix * idx]\n _new_wave = (rbin - lbin) * 0.5 + lbin\n\n new_wave[idx] = _new_wave\n new_flux[idx] = _new_flux / _new_dbin\n new_flux_err[idx] = np.sqrt(_new_flux_err) / _new_dbin\n\n return SpecOneD(dispersion=new_wave, flux=new_flux,\n flux_err=new_flux_err, unit='f_lam')", "def reduce_basis(blst):\n if blst == []: # blst represents scalar\n blst_coef = [S.One]\n blst_expand = [[]]\n return blst_coef, blst_expand\n blst_expand = [blst]\n blst_coef = [S.One]\n blst_flg = [False]\n # reduce untill all blst revise flgs are True\n while not reduce(operator.and_, blst_flg):\n for i in range(len(blst_flg)):\n if not blst_flg[i]: # keep revising if revise flg is False\n tmp = MV.reduce_basis_loop(blst_expand[i])\n if isinstance(tmp, bool):\n blst_flg[i] = tmp # revision of blst_expand[i] complete\n elif len(tmp) == 3: # blst_expand[i] contracted\n blst_coef[i] = tmp[0] * blst_coef[i]\n blst_expand[i] = tmp[1]\n blst_flg[i] = tmp[2]\n else: # blst_expand[i] revised\n blst_coef[i] = -blst_coef[i]\n # if revision force one more pass in case revision\n # causes repeated index previous to revised pair of\n # indexes\n blst_flg[i] = False\n blst_expand[i] = tmp[3]\n blst_coef.append(-blst_coef[i] * tmp[0])\n blst_expand.append(tmp[1])\n blst_flg.append(tmp[2])\n new_blst_coef = []\n new_blst_expand = []\n for (coef, expand) in zip(blst_coef, blst_expand):\n if expand in new_blst_expand:\n i = new_blst_expand.index(expand)\n new_blst_coef[i] += coef\n else:\n new_blst_expand.append(expand)\n new_blst_coef.append(coef)\n return new_blst_coef, new_blst_expand", "def sync(schedule):\n\t# Process the buses from the schedule in the form of \n\t# buses = [(offset,busid), (offset,busid), ...(offset,busid)], which is\n\t# buses = [(v0,m0),(v1,m1), ... (vn,mn)]\n\tbuses = [tuple(int(x) for x in b) \n\t\tfor b in itertools.filterfalse(\n\t\t\tlambda b: b[1] == 'x', enumerate(schedule.split(',')))]\n\tvs = [t[0] for t in buses]\n\tms = [t[1] for t in buses]\n\t\n\t# Use CRT to get a (A,b) tuple. Use their difference to get timestamp t\n\t# We presume all bus IDs are coprime in order to accelerate computation\n\ta,b = sympy.ntheory.modular.crt(ms,vs,check=False)\n\treturn b - a", "def solveU(U, b):\n # validate input\n if np.allclose(U,np.triu(U))==False or np.linalg.det == 0:\n raise TypeError(\"U is not an upper regular triangular matrix\")\n \n elif len(U.shape) != 2 or len(b.shape) != 1:\n raise TypeError(\"unsuitable object\")\n \n else:\n un, um = U.shape\n n, = b.shape\n if un != um or un != n:\n raise TypeError((\"dimensions do not fullfill requirements\"))\n\n # solve \n x = np.zeros(n, dtype=complex)\n x[-1] = (b[-1]) / U[n - 1, n - 1]\n for i in range(1, n):\n t = U[(n - (i + 1)):(n - i)] @ x\n x[-(i + 1)] = (b[-(i + 1)] - t) / U[n - (i + 1), n - (i + 1)]\n\n return x", "def freeze_duet_values():\n signal = nussl.AudioSignal(nussl.efz_utils.download_audio_file('dev1_female3_inst_mix.wav'))\n duet = nussl.Duet(signal, 3)\n output_folder = os.path.abspath('duet_benchmarks')\n if not os.path.isdir(output_folder):\n os.mkdir(output_folder)\n\n duet.stft_ch0, duet.stft_ch1, \\\n duet.frequency_matrix = duet._compute_spectrogram(duet.sample_rate)\n np.save(os.path.join(output_folder, 'benchmark_stft_ch0'), duet.stft_ch0)\n np.save(os.path.join(output_folder, 'benchmark_stft_ch1'), duet.stft_ch1)\n np.save(os.path.join(output_folder, 'benchmark_wmat'), duet.frequency_matrix)\n\n duet.symmetric_atn, duet.delay = duet._compute_atn_delay(duet.stft_ch0, duet.stft_ch1,\n duet.frequency_matrix)\n np.save(os.path.join(output_folder, 'benchmark_sym_atn'), duet.symmetric_atn)\n np.save(os.path.join(output_folder, 'benchmark_delay'), duet.delay)\n\n duet.normalized_attenuation_delay_histogram, \\\n duet.attenuation_bins, duet.delay_bins = duet._make_histogram()\n np.save(os.path.join(output_folder, 'benchmark_hist'),\n duet.normalized_attenuation_delay_histogram)\n np.save(os.path.join(output_folder, 'benchmark_atn_bins'), duet.attenuation_bins)\n np.save(os.path.join(output_folder, 'benchmark_delay_bins'), duet.delay_bins)\n\n duet.peak_indices = nussl.utils.find_peak_indices(duet.normalized_attenuation_delay_histogram,\n duet.num_sources,\n threshold=duet.peak_threshold,\n min_dist=[duet.attenuation_min_distance,\n duet.delay_min_distance])\n np.save(os.path.join(output_folder, 'benchmark_peak_indices'), duet.peak_indices)\n\n duet.delay_peak, duet.atn_delay_est, duet.atn_peak = duet._convert_peaks(duet.peak_indices)\n np.save(os.path.join(output_folder, 'benchmark_delay_peak'), duet.delay_peak)\n np.save(os.path.join(output_folder, 'benchmark_atn_delay_est'), duet.atn_delay_est)\n np.save(os.path.join(output_folder, 'benchmark_atn_peak'), duet.atn_peak)\n\n duet.masks = duet._compute_masks()\n np.save(os.path.join(output_folder, 'benchmark_masks'), duet.masks)\n\n final_signals = duet.make_audio_signals()\n np.save(os.path.join(output_folder, 'benchmark_final_signals'), final_signals)", "def solve_with_oil(self, items):\n\t\tresults = self.solve_all(items)\n\t\tresults, further_inputs = self.solve_oil(results)\n\t\tmerge_processes_into(results, self.solve_all(further_inputs))\n\t\treturn results", "def sincint(x, nres, speclist) :\n\n dampfac = 3.25*nres/2.\n ksize = int(21*nres/2.)\n if ksize%2 == 0 : ksize +=1\n nhalf = ksize//2 \n\n #number of output and input pixels\n nx = len(x)\n nf = len(speclist[0][0])\n\n # integer and fractional pixel location of each output pixel\n ix = x.astype(int)\n fx = x-ix\n\n # outputs\n outlist=[]\n for spec in speclist :\n if spec[1] is None :\n outlist.append([np.full_like(x,0),None])\n else :\n outlist.append([np.full_like(x,0),np.full_like(x,0)])\n\n for i in range(len(x)) :\n xkernel = np.arange(ksize)-nhalf - fx[i]\n # in units of Nyquist\n xkernel /= (nres/2.)\n u1 = xkernel/dampfac\n u2 = np.pi*xkernel\n sinc = np.exp(-(u1**2)) * np.sin(u2) / u2\n sinc /= (nres/2.)\n\n lobe = np.arange(ksize) - nhalf + ix[i]\n vals = np.zeros(ksize)\n vars = np.zeros(ksize)\n gd = np.where( (lobe>=0) & (lobe<nf) )[0]\n\n for spec,out in zip(speclist,outlist) :\n vals = spec[0][lobe[gd]]\n out[0][i] = (sinc[gd]*vals).sum()\n if spec[1] is not None : \n var = spec[1][lobe[gd]]\n out[1][i] = (sinc[gd]**2*var).sum()\n\n for out in outlist :\n if out[1] is not None : out[1] = np.sqrt(out[1])\n \n return outlist", "def reactor_func(concs, t):\n\n k1 = 4.64e-4 # 1/(M*s)\n r1 = -k1 * concs[0] * concs[1]\n\n return np.array([r1, r1, -r1, -r1])", "def find_bscs(ckt, a):\n return reduce(lambda x, y: x | y, [ckt[x].fins for x in a]).difference(set(a))", "def addition_mod(a, b, nbr):\n bina = [int(x) for x in bin(a)[2:]]\n binb = [int(x) for x in bin(b)[2:]]\n binn = [int(x) for x in bin(nbr)[2:]]\n #print(binn)\n while len(bina) >= len(binb):\n binb = [0]+binb\n while len(bina) < len(binb)-1:\n bina = [0]+bina\n while len(binn) < len(bina):\n binn = [0]+binn\n while len(binn) > len(bina):\n bina = [0]+bina\n binb = [0]+binb\n binn.reverse()\n bina.reverse()\n binb.reverse()\n #print(bina, binb, binn)\n n = len(bina)+len(binb)+len(binn)\n na = len(bina)\n nab = len(bina)+len(binb)\n q = QuantumRegister(n+2, 'q')\n circ = QuantumCircuit(q)\n for i in range(na):\n if bina[i]:\n circ.x(q[i])\n for i in range(len(binb)):\n if binb[i]:\n circ.x(q[na+i])\n for i in range(len(binn)):\n if binn[i]:\n circ.x(q[nab+i])\n addmod(circ, q, # A, B, lost, last, N, lost2, binn):\n [q[i] for i in range(len(bina))],\n [q[i+na] for i in range(len(binb)-1)],\n q[n],\n q[na+len(binb)-1],\n [q[i+nab] for i in range(len(binn))],\n q[n+1],\n binn)\n circ_m = measure(circ, q, [i for i in range(na,nab)])\n return circ_m", "def SecondaryComplex_to_Bid_Alternate():\n Parameter('RIP3_0' , 2.0e4) # molecules per cell\n Parameter('BidK_0' , 5.0e3) # molecules per cell\n \n alias_model_components()\n Initial(RIP3(bRHIM = None, state = 'unmod'), RIP3_0) # RIP3\n Initial(BidK(bf = None), BidK_0)\n # ==============================================================\n # Assembly of Complex II, Riptosome and Necrosome\n # --------------------------------------------------------------\n # FADD + TRADD[active] <-> FADD:TRADD[active]\n # FADD + RIP1 <-> FADD:RIP1\n # TRADD + RIP1 <-> TRADD:RIP1\n \n # CD95_to_secondary complex contains the rules for recruitment of proC8 to FADD.\n # (RIP1 or TRADD):FADD + proC8 <-> (RIP1 or TRADD):FADD:proC8\n # (RIP1 or TRADD):FADD:proC8 + proC8 <-> (RIP1 or TRADD):FADD:proC8:proC8\n # (RIP1 or TRADD):FADD:proC8 + flip_L <-> (RIP1 or TRADD):FADD:proC8:flip_L\n # (RIP1 or TRADD):FADD:proC8 + flip_S <-> (RIP1 or TRADD):proC8:flip_S\n \n # RIP1%ProC8%ProC8(in a complex) >> RIP1[trunc] + C8 + (remains of the complex)\n # RIP1%ProC8%cFlip[L](in a complex) >> RIP1[trunc] + remains of the complex)\n # RIP1%cFlip[S](in a complex) + RIP3 >> RIP1:RIP3(in a complex, i.e. necrosome)\n \n # RIP1 + C8 <-> RIP1:C8 >> RIP1[trunc] + C8\n # RIP3 + C8 <-> RIP3:C8 >> RIP3[trunc] + C8\n # Bid + C8 <-> Bid:C8 >> Bid[trunc] + C8\n \n # -------------Assembling Complex II-----------------\n Parameter('Ka_RIP1_FADD', 1e-7) # Biochemica et Biophysica Acta 1834(2013) 292-300\n Parameter('Kd_RIP1_FADD', 1e-8) # Biochemica et Biophysica Acta 1834(2013) 292-300\n alias_model_components()\n \n #Assembling TRADD dependent Complex II\n bind(FADD(bDD = None, bDED1 = None, bDED2 = None), 'bDD', TRADD(bDD1=None, state = 'active'), 'bDD1', [1e-6, 1e-3])\n bind(FADD(bDD = None, bDED1 = None, bDED2 = None), 'bDD', RIP1(bDD = None, state = 'deub'), 'bDD', [1e-8, 1e-1])\n \n #Recruiting RIP1 to secondary complex and TRADD dependent Complex II\n bind(FADD(bDD = None, bDED1 = ANY, bDED2 = ANY), 'bDD', RIP1(bDD=None, bRHIM = None, state = 'unmod'), 'bDD', [Ka_RIP1_FADD, Kd_RIP1_FADD])\n bind(FADD(bDD = None, bDED1 = ANY, bDED2 = ANY), 'bDD', RIP1(bDD=None, bRHIM = None, state = 'deub'), 'bDD', [Ka_RIP1_FADD, Kd_RIP1_FADD])\n \n #bind(TRADD(bDD2 = None, state = 'active'),'bDD2', RIP1(bDD = None, bRHIM = None, state = 'unmod'), 'bDD', [1e-6, 1e-1])\n bind(TRADD(bDD2 = None, state = 'active'),'bDD2', RIP1(bDD = None, bRHIM = None, state = 'deub'), 'bDD', [1e-6, 1e-1])\n # For simplicity, I am neglecting the binary intereaction that occurs between proC8 and RIP1.\n # Binding of proC8 and c-flip to FADD is accomplished in CD95_to_Secondary complex.\n \n #--------------RIP1 Truncation reactions-------------\n #---Truncation by C8---------------------------------\n RIP_CIIA_proC8 = RIP1(bDD=ANY, bRHIM = None, state = 'unmod')% TRADD(bDD2 = None, bDD1 = ANY, state = 'active') % FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n RIP_CIIA_proC8_alt = RIP1(bDD=ANY, bRHIM = None, state = 'deub')% TRADD(bDD2 = None, bDD1 = ANY, state = 'active') % FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n \n RIP_CIIB_proC8 = RIP1(bDD=ANY, bRHIM = None, state = 'unmod')% FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n RIP_CIIB_proC8_alt = RIP1(bDD=ANY, bRHIM = None, state = 'deub')% FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED=ANY)%proC8(bDED=ANY)\n \n CIIA = TRADD(bDD2 = None, bDD1 = ANY, state = 'active') % FADD(bDD=ANY, bDED1=None, bDED2=None)\n \n Rule('RIP1_truncation_CIIA', RIP_CIIA_proC8 >> CIIA + C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k11',1e-1))\n Rule('RIP1_truncation_CIIA_alt', RIP_CIIA_proC8_alt >> CIIA + C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k11a',1e-6))\n \n Rule('RIP1_truncation_CIIB', RIP_CIIB_proC8 >> FADD(bDD=None, bDED1=None, bDED2=None)+ C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k12', 1e-1))\n Rule('RIP1_truncation_CIIB_alt', RIP_CIIB_proC8_alt >> FADD(bDD=None, bDED1=None, bDED2=None)+ C8(bf = None, state = 'A') + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k12a', 1e-6))\n \n catalyze_state(C8(bf = None, state = 'A'), 'bf', RIP1(bDD=None), 'bRHIM', 'state', 'unmod', 'trunc', [1e-6, 1e-3, 1e-1])\n catalyze_state(C8(bf = None, state = 'A'), 'bf', RIP1(bDD=None), 'bRHIM', 'state', 'deub', 'trunc', [1e-6, 1e-3, 1e-1])\n \n #---Truncation by proC8:cFlip_L---------------------\n Riptosome_FADD = RIP1(bDD=1, bRHIM = None, state = 'unmod')%FADD(bDD=1, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n Riptosome_FADD_alt = RIP1(bDD=1, bRHIM = None, state = 'deub')%FADD(bDD=1, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n \n Riptosome_TRADD = RIP1(bDD=1, bRHIM = None, state = 'unmod')%TRADD(bDD1=ANY, bDD2=1)%FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n Riptosome_TRADD_alt = RIP1(bDD=1, bRHIM = None, state = 'deub')%TRADD(bDD1=ANY, bDD2=1)%FADD(bDD=ANY, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY)\n \n Rule('RIP1_truncation_FADD', Riptosome_FADD >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k13', 1e-1))\n Rule('RIP1_truncation_FADD_alt', Riptosome_FADD_alt >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k13a', 1e-1))\n Rule('RIP1_truncation_TRADD', Riptosome_TRADD >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k14', 10))\n Rule('RIP1_truncation_TRADD_alt', Riptosome_TRADD_alt >> FADD(bDD=None, bDED1=ANY, bDED2=ANY)%proC8(bDED = ANY)%flip_L(bDED = ANY) + RIP1(bDD=None, bRHIM = None, state = 'trunc'), Parameter('k14a', 10))\n \n # -------------RIP3 Binding Interactions----------------\n Ripto1_Flip_S = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='unmod') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Ripto2_Flip_S = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='unmod') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Necrosome1 = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=6, state='unmod') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 6, state = 'unmod')\n Necrosome2 = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=5, state='unmod') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 5, state = 'unmod')\n \n Ripto1_Flip_S_alt = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='deub') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Ripto2_Flip_S_alt = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=None, state='deub') % flip_S(bDED=ANY) % proC8(bDED=ANY)\n Necrosome1_alt = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=6, state='deub') % TRADD(bDD1=ANY, bDD2=ANY, state='active') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 6, state = 'unmod')\n Necrosome2_alt = FADD(bDD=ANY, bDED1=ANY, bDED2=ANY) % RIP1(bDD=ANY, bRHIM=5, state='deub') % flip_S(bDED=ANY) % proC8(bDED=ANY) % RIP3(bRHIM= 5, state = 'unmod')\n \n Rule('RIP3_binding1', Ripto1_Flip_S + RIP3(bRHIM= None, state = 'unmod') <> Necrosome1, Parameter('k15', 1e-6), Parameter('k16', 1e-3))\n Rule('RIP3_binding2', Ripto2_Flip_S + RIP3(bRHIM= None, state = 'unmod') <> Necrosome2, Parameter('k17', 1e-6), Parameter('k18', 1e-3))\n Rule('RIP3_binding1_alt', Ripto1_Flip_S_alt + RIP3(bRHIM= None, state = 'unmod') <> Necrosome1_alt, Parameter('k15a', 1e-6), Parameter('k16a', 1e-3))\n Rule('RIP3_binding2_alt', Ripto2_Flip_S_alt + RIP3(bRHIM= None, state = 'unmod') <> Necrosome2_alt, Parameter('k17a', 1e-6), Parameter('k18a', 1e-3))\n \n #RIP3 Truncation\n catalyze_state(C8(bf = None, state = 'A'), 'bf', RIP3(), 'bRHIM', 'state', 'unmod', 'trunc', [1e-6, 1e-3, 1e-1])\n \n #-------------Bid Interactions--------------------------\n # Bid Phosphorylation and Truncation\n catalyze_state(BidK(), 'bf', Bid(), 'bf', 'state', 'U', 'po4', [1e-6, 1e-3, 1e-1])\n catalyze_state(C8(bf = None, state = 'A'), 'bf', Bid(), 'bf', 'state', 'U', 'T', [1.04e-5, 0.005, 0.1])\n \n # Bid-PO4 sequestering RIP1\n bind(RIP1(bDD = None, bRHIM = None, state = 'unmod'), 'bRHIM', Bid(bf = None, state = 'po4'), 'bf', [1e-6, 1e-3])\n bind(RIP1(bDD = None, bRHIM = None, state = 'deub'), 'bRHIM', Bid(bf = None, state = 'po4'), 'bf', [1e-6, 1e-3])", "def backcast(self, resids: NDArray) -> Union[float, NDArray]:\n tau = min(75, resids.shape[0])\n w = 0.94 ** np.arange(tau)\n w = w / sum(w)\n\n return float(np.sum((resids[:tau] ** 2.0) * w))", "def estimate(self, bases, freqs, **kwargs):\n \n # Make sure that frequencies are provided for every basis measured\n if len(bases) != len(freqs):\n print(\"Error, must provide frequency data for all bases measured.\")\n return\n\n # Go through the keyword arguments and set mu and eps if required.\n mu = 1e-4\n eps = 0.1 \n\n if \"mu\" in kwargs:\n mu = kwargs[\"mu\"]\n if \"eps\" in kwargs:\n eps = kwargs[\"eps\"]\n\n # Separate the bases out into measured and unmeasured\n meas_bs_idx = []\n unmeas_bs_idx = []\n\n for x in range(self.dim):\n if x in bases:\n meas_bs_idx.append(x)\n else:\n unmeas_bs_idx.append(x)\n\n # Handle the vertical slope separately\n if -1 in bases:\n meas_bs_idx.append(-1)\n else:\n unmeas_bs_idx.append(-1)\n\n # Begin with the initial state, the maximally mixed state\n rho_0 = (1.0 / self.dim) * np.eye(self.dim)\n rho_n = rho_0\n\n \"\"\"print(\"Measured bases are \", end = \"\")\n print(meas_bs_idx)\n print(\"Unmeasured bases are \", end = \"\")\n print(unmeas_bs_idx)\"\"\"\n \n n = 1\n\n # Iterate\n while (n):\n ########################################################\n # Compute W(rho)\n # I might eventually put this in a separate method, but\n # for now I'm going to leave it here to avoid having to\n # repeatedly pass the same (large chunk of) information \n # to some helper function.\n ########################################################\n term_1 = np.zeros((self.dim, self.dim))\n term_2 = np.zeros((self.dim, self.dim))\n\n # Compute the first sum, which contains the measurement \n # frequencies and the measured bases. Note that in theory\n # the bases may not be in ascending order, however the \n # frequencies will be generated in the same order as the \n # bases are placed in the list. So create a separate counter\n # for frequencies to just iterate through them one at a time\n # as we go through the bases by their slope index.\n freq_idx = 0 \n for basis_idx in meas_bs_idx:\n for proj_idx in range(self.dim):\n this_projector = self.projectors[basis_idx][proj_idx]\n\n p_num = freqs[freq_idx][proj_idx] \n p_denom = np.trace(np.dot(rho_n, this_projector))\n prefactor = p_num / p_denom\n\n term_1 = term_1 + (prefactor * this_projector)\n freq_idx += 1\n\n # If there are no unmeasured basis, do nothing\n if len(unmeas_bs_idx) != 0:\n # Compute the second sum, which is over all the unmeasured bases.\n for basis_idx in unmeas_bs_idx:\n for proj_idx in range(self.dim):\n this_projector = self.projectors[basis_idx][proj_idx]\n\n prefactor = log(np.trace(np.dot(rho_n, this_projector)))\n\n term_2 = term_2 + (prefactor * this_projector)\n \n \n # Finally, compute W(rho)\n W_rho_n = term_1 - mu * term_2\n ########################################################\n\n #print(\"n = \" + str(n))\n #print(rho_n) \n\n # Check if we've got a good estimate. If the desired accuracy \n # is satisfied by the most recent rho_n, then we're done. \n # Return the estimator and the number of steps.\n # If not, increment n and keep going.\n if self.check_accuracy(W_rho_n, rho_n):\n return rho_n, n \n else:\n n += 1\n\n # Compute the next term in the series. It's a big ugly expression,\n # so I've separated out a term 'clump', and also the num/denom\n clump = W_rho_n - np.trace(np.dot(W_rho_n, rho_n)) * np.eye(self.dim)\n \n numerator = np.dot(np.eye(self.dim) + eps * clump, \\\n np.dot(rho_n, np.eye(self.dim) + eps * clump))\n denominator = 1 + (eps ** 2) * np.trace(np.dot(np.dot(clump, clump), rho_n))\n\n rho_np1 = numerator / denominator\n rho_n = rho_np1", "def calculate_signal(phases):\n signals = np.real(np.sum(np.exp(1j*phases), axis = 1))\n return signals", "def SingleQubitRB(qubit: qreg, seqs, purity=False, add_cals=True):\n # Original:\n # seqsBis = []\n # op = [Id(qubit, length=0), Y90m(qubit), X90(qubit)]\n # for ct in range(3 if purity else 1):\n # for seq in seqs:\n # seqsBis.append(reduce(operator.add, [clifford_seq(c, qubit) for c in seq]))\n\n # #append tomography pulse to measure purity\n # seqsBis[-1].append(op[ct])\n # # Add the measurement to all sequences\n # seqsBis[-1].append(MEAS(qubit))\n\n # # Tack on the calibration sequences\n # if add_cals:\n # seqsBis += create_cal_seqs((qubit,), 2)\n\n# axis_descriptor = [{\n# 'name': 'length',\n# 'unit': None,\n# 'points': list(map(len, seqs)),\n# 'partition': 1\n# }]\n# metafile = compile_to_hardware(seqsBis, 'RB/RB', axis_descriptor = axis_descriptor, extra_meta = {'sequences':seqs})\n\n\n # seqs are result of create_RB_seqs: list of lists of integers\n # clifford_seq() returns a sequence of pulses itself\n # [clifford_seq() for c in seq]\n # gives a list of len(seq) sequences\n # reduce(operator.add, listOfSequences)\n # gives a single sequence of all the elements in listOfSequences\n # So the first for loop creates a single list of sequences\n\n ops = [Id]\n if purity:\n ops = [Id, Y90m, X90]\n for op in ops:\n for seq in seqs:\n init(qubit)\n for c in seq:\n clifford_seq(c, qubit)\n # append tomography pulse to measure purity\n if op == Id:\n op(qubit, length=0)\n else:\n op(qubit)\n # append measurement\n MEAS(qubit)\n\n if add_cals:\n # Tack on calibration sequences\n create_cal_seqs(qubit, 2)", "def _concatenate_pulses(\n self, pulse_instructions, scheduled_start_time, num_controls\n ):\n min_step_size = np.inf\n # Concatenate tlist and coeffs for each control pulses\n compiled_tlist = [[] for tmp in range(num_controls)]\n compiled_coeffs = [[] for tmp in range(num_controls)]\n for pulse_ind in range(num_controls):\n last_pulse_time = 0.0\n for start_time, tlist, coeff in pulse_instructions[pulse_ind]:\n # compute the gate time, step size and coeffs\n # according to different pulse mode\n (\n gate_tlist,\n coeffs,\n step_size,\n pulse_mode,\n ) = self._process_gate_pulse(start_time, tlist, coeff)\n min_step_size = min(step_size, min_step_size)\n\n if abs(last_pulse_time) < step_size * 1.0e-6: # if first pulse\n compiled_tlist[pulse_ind].append([0.0])\n if pulse_mode == \"continuous\":\n compiled_coeffs[pulse_ind].append([0.0])\n # for discrete pulse len(coeffs) = len(tlist) - 1\n\n # If there is idling time between the last pulse and\n # the current one, we need to add zeros in between.\n if np.abs(start_time - last_pulse_time) > step_size * 1.0e-6:\n idling_tlist = self._process_idling_tlist(\n pulse_mode, start_time, last_pulse_time, step_size\n )\n compiled_tlist[pulse_ind].append(idling_tlist)\n compiled_coeffs[pulse_ind].append(\n np.zeros(len(idling_tlist))\n )\n\n # Add the gate time and coeffs to the list.\n execution_time = gate_tlist + start_time\n last_pulse_time = execution_time[-1]\n compiled_tlist[pulse_ind].append(execution_time)\n compiled_coeffs[pulse_ind].append(coeffs)\n\n final_time = np.max([tlist[-1][-1] for tlist in compiled_tlist])\n for pulse_ind in range(num_controls):\n if not compiled_tlist[pulse_ind]:\n continue\n last_pulse_time = compiled_tlist[pulse_ind][-1][-1]\n if np.abs(final_time - last_pulse_time) > min_step_size * 1.0e-6:\n idling_tlist = self._process_idling_tlist(\n pulse_mode, final_time, last_pulse_time, min_step_size\n )\n compiled_tlist[pulse_ind].append(idling_tlist)\n compiled_coeffs[pulse_ind].append(np.zeros(len(idling_tlist)))\n\n for i in range(num_controls):\n if not compiled_coeffs[i]:\n compiled_tlist[i] = None\n compiled_coeffs[i] = None\n else:\n compiled_tlist[i] = np.concatenate(compiled_tlist[i])\n compiled_coeffs[i] = np.concatenate(compiled_coeffs[i])\n return compiled_tlist, compiled_coeffs", "def part_2():\n input_ = parse_input() + list(range(10, 1_000_001))\n cups = turn_input_into_cups(input_)\n cups = solve(cups, first_cup=cups[input_[0]], turns=10_000_000)\n\n return cups[1].next.number * cups[1].next.next.number", "def butterworth_coef(poles, terms=2):\r\n if terms == 2:\r\n g=[]\r\n g.append(1.0)\r\n for k in range(1,poles+1):\r\n g.append(2*math.sin((2*k-1)*math.pi/(2*poles)))\r\n g.append(1.0)\r\n else:\r\n g = \"this algorithm needs work\"\r\n return g", "def modulate(self, input_bits):\n\n index_list = map(lambda i: self.table[tuple((input_bits[i:i+self.num_bits_symbol]))], \\\n xrange(0, len(input_bits), self.num_bits_symbol))\n baseband_symbols = self.constellation[index_list]\n\n return baseband_symbols", "def burstensemble( base, x_0, z, dist, xi_p, mass, radius, bean, full_model=False ):\n\n minmdot = 0.0\n maxmdot = 1.0\n mdot_res = 1e-6\n sbt = bean.bstart\n salpha = []\n stime = []\n smdot = []\n se_b = []\n\n mdot = bean.flux_to_mdot(x_0, dist, xi_p, mass, radius, bean.pflux)\n\n for i in range(0, bean.numburstsobs):\n\n tmp = settle(base, z, x_0, mdot[i], 1.0, mass, radius)\n\n res = np.recarray(\n (1,), dtype=[(\"tdel\", np.float64), (\"e_b\", np.float64), (\"alpha\", np.float64), (\"mdot\", np.float64)]\n )\n # assign elements\n res.tdel = tmp.tdel / 24.0\n res.e_b = tmp.E_b*0.8 # multiply eb by 0.8 to account for incomlpete burning of fuel, as in Goodwin et al (2018).\n alpha = tmp.alpha\n alpha = alpha[0]\n res.mdot = mdot[i]\n _e_b = res.e_b\n _e_b = _e_b[0]\n se_b.append(_e_b)\n _mdot = res.mdot\n _mdot = _mdot[0]\n salpha.append(alpha)\n smdot.append(_mdot)\n # stime.append(bstart[i])\n stime.append(tmp.tdel[0])\n mdot_max = max(smdot)\n\n result = dict()\n\n if full_model:\n # model parameters are redundant for the model returned\n result[\"base\"] = [base]\n result[\"z\"] = [z]\n result[\"x_0\"] = [x_0]\n result[\"dist\"] = [dist]\n result[\"xi_p\"] = [xi_p]\n\n result[\"mdot_max\"] = [mdot_max]\n\n result[\"mass\"] = [mass]\n result[\"radius\"] = [radius]\n\n # now the actual predictions\n\n result[\"time\"] = stime\n result[\"mdot\"] = smdot\n result[\"alpha\"] = salpha\n result[\"e_b\"] = se_b\n\n # omit the printing for now, as it prevents assessing the progress\n # print('ensemble')\n # print(f\"In burstrain fluence is {se_b}\")\n\n return result", "def bjs(l, c):\n if len(l) == 4:\n l = mbvector(l)\n elif len(l) == 3:\n pass\n else:\n return 0\n v = np.array([1, pi, e])\n r = l / np.linalg.norm(l)\n m = np.cross(r, v)\n n = np.cross(r, m)\n m = m / np.linalg.norm(m)\n n = n / np.linalg.norm(n)\n w = np.arange(0, 2 * pi, 0.001)\n s = len(w)\n\n mm = vect_contract(m, c, m)\n mn = vect_contract(m, c, n)\n nm = vect_contract(n, c, m)\n nn0 = vect_contract(n, c, n)\n nn = np.linalg.inv(nn0)\n\n val1 = mm - np.dot(np.dot(mn, nn), nm)\n R = BB = np.zeros(shape=(3, 3))\n for i in range(1, s):\n t = 1 - cos(w[i])\n CO = cos(w[i])\n SI = sin(w[i])\n R[0, 0] = t * r[0] ** 2 + CO\n R[0, 1] = t * r[0] * r[1] - SI * r[2]\n R[0, 2] = t * r[0] * r[2] + SI * r[1]\n R[1, 0] = t * r[0] * r[1] + SI * r[2]\n R[1, 1] = t * r[1] ** 2 + CO\n R[1, 2] = t * r[1] * r[2] - SI * r[0]\n R[2, 0] = t * r[0] * r[2] - SI * r[1]\n R[2, 1] = t * r[1] * r[2] + SI * r[0]\n R[2, 2] = t * r[2] ** 2 + CO\n\n mr = np.dot(R, np.transpose(m))\n nr = np.dot(R, np.transpose(n))\n\n mm = vect_contract(mr, c, mr)\n mn = vect_contract(mr, c, nr)\n nm = vect_contract(nr, c, mr)\n nn0 = vect_contract(nr, c, nr)\n nn = np.linalg.inv(nn0)\n val2 = mm - np.dot(np.dot(mn, nn), nm)\n BB = BB + 0.5 * (val2 + val1) * (w[i] - w[i - 1])\n val1 = val2\n B = BB / (8 * pi**2)\n return B", "def nextbus(buses, timestamp):\n\twaits = [(bus, wait(bus,timestamp)) for bus in buses]\n\treturn min(waits, key= lambda w:w[1])", "def algorithm_4_10(p, tau, t, c):\n\n m = len(t) - (p + 1)\n n = len(tau) - (p + 1)\n c = np.array(c, dtype=np.float64)\n t = np.array(t, dtype=np.float64)\n tau = np.array(tau, dtype=np.float64)\n b = np.zeros(m)\n\n for i in range(m):\n mu = index(t[i], tau)\n if p == 0:\n b[i] = c[mu]\n else:\n C = c[mu - p:mu + 1]\n for j in range(0, p):\n k = p - j\n tau1 = tau[mu - k + 1:mu + 1]\n tau2 = tau[mu + 1:mu + k + 1]\n omega = np.divide(\n (t[i + k] - tau1), (tau2 - tau1),\n out=np.zeros_like(tau1),\n where=((tau2 - tau1) != 0))\n C = (1 - omega) * C[:-1] + omega * C[1:]\n b[i] = C\n return b", "def compute_bce(num_bidders, max_value, value_pdf, num_samples=100, max_trials=20, random_seed=1232, solver_str='COIN'):\r\n\r\n status = 0 # whether the LP was feasible\r\n trials = 0 # counter on sampling trials\r\n random.seed(random_seed) # seed the random generator\r\n while status != 1 and trials < max_trials: # while we have not found a feasible LP\r\n\r\n prob = plp.LpProblem(\"BCE\", plp.LpMinimize) # create an LP instance\r\n\r\n # Sample a set of bid vectors to try to create a BCE with just them\r\n bid_vectors = product(range(max_value + 1), repeat=num_bidders)\r\n pool = tuple(bid_vectors)\r\n indices = random.sample(range(len(pool)), min(num_samples, len(pool)))\r\n sampled_bid_vectors = set([pool[i] for i in indices])\r\n\r\n # Variables are of the form (v,b) where v is a value in {0,...,max_value}\r\n # and b is a bid vector in the sampled set\r\n lp_var_keys = product(*[range(max_value + 1), sampled_bid_vectors])\r\n\r\n # Create the psi variables which correspond to Pr[b | v] for each (v,b)\r\n # pair\r\n psi_vars = plp.LpVariable.dicts('psi', lp_var_keys, lowBound=0)\r\n\r\n # Creating the best response constraints\r\n devs = product(\r\n *[range(max_value + 1), range(max_value + 1), range(0, num_bidders)])\r\n for (cur_bid, dev_bid, bidder_id) in devs: # for all b_i*, b_i', i\r\n if cur_bid != dev_bid: # if b_i* \\neq b_i'\r\n # Create all the terms of the form Pr[b | v] * pi(v) * (U_i' - U_i)\r\n # for all b \\in S, such that b_i = b_i*\r\n dev_terms = [\r\n deviation_term(dev_bid, bidder_id, v, bids,\r\n psi_vars[(v, bids)], value_pdf[v])\r\n for (v, bids) in product(*[range(max_value + 1), sampled_bid_vectors])\r\n if bids[bidder_id] == cur_bid]\r\n # Add these terms to create the best response constraint\r\n prob += plp.lpSum(dev_terms) <= 0, \"Dev_{}_{}_{}\".format(\r\n cur_bid, dev_bid, bidder_id)\r\n\r\n # Constraint that Pr[b | v] is a distribution for each fixed v\r\n for value in range(max_value + 1):\r\n cond_vars = [psi_vars[(value, bids)]\r\n for bids in sampled_bid_vectors]\r\n prob += plp.lpSum(cond_vars) == 1, \"Density_Psi_{}\".format(value)\r\n\r\n # Objective coefficients are random numbers based on the seed\r\n np.random.seed(random_seed)\r\n prob += plp.lpSum([np.random.standard_normal(1) *\r\n var for var in psi_vars.values()])\r\n\r\n # Solve LP\r\n prob.solve(get_solver(solver_str))\r\n\r\n # Get the status returned by the solver. 1 means success\r\n status = int(prob.status)\r\n\r\n # Increase the trial counter\r\n trials += 1\r\n\r\n if status == 1:\r\n # Once we have found a BCE, compute the marginal bid vector\r\n # distribution\r\n bid_pdf = {}\r\n for bid_vector in sampled_bid_vectors:\r\n # Compute the probability of the bid vector: sum_{v} Pr[b | v] *\r\n # pi(v)\r\n prob_mass = sum([plp.value(psi_vars[(v, bid_vector)]) * value_pdf[v]\r\n for v in range(max_value + 1)])\r\n # If mass is positive add it to the bid_pdf dictionary\r\n if prob_mass > 0:\r\n bid_pdf[bid_vector] = prob_mass\r\n\r\n return bid_pdf\r\n else:\r\n return None", "def part1(input):\n sys = AmpSystem(input)\n return sys.max_thruster_signal([i for i in range(5)])", "def bufr(config, output_files=None, cycle='18'):\r\n if config['multi_stations']: #Train on multiple stations\r\n bufr_station_ids = config['BUFR']['bufr_station_id']\r\n if len(bufr_station_ids) != len(config['station_id']): #There has to be the same number of BUFR station IDs as station IDs, so raise error if not\r\n raise ValueError(\"There must be the same number of BUFR station IDs as station IDs\")\r\n if len(bufr_station_ids) != len(output_files): #There has to be the same number of output files as station IDs, so raise error if not\r\n raise ValueError(\"There must be the same number of output files as station IDs\")\r\n else:\r\n bufr_station_ids = [config['BUFR']['bufr_station_id']]\r\n if output_files is not None:\r\n output_files = [output_files]\r\n\r\n # Base arguments dictionary. dset and date will be modified iteratively.\r\n if config['verbose']:\r\n print('\\n')\r\n bufr_default_dir = '%s/metdat/bufkit' % config['BUFR_ROOT']\r\n bufr_data_dir = config['BUFR']['bufr_data_dir']\r\n if not(os.path.isdir(bufr_data_dir)):\r\n os.makedirs(bufr_data_dir)\r\n bufrgruven = config['BUFR']['bufrgruven']\r\n if config['verbose']:\r\n print('bufr: using BUFKIT files in %s' % bufr_data_dir)\r\n bufr_format = '%s/%s%s.%s_%s.buf'\r\n for i in range(len(bufr_station_ids)):\r\n bufr_station_id = bufr_station_ids[i]\r\n bufarg = {\r\n 'dset': '',\r\n 'date': '',\r\n 'cycle': cycle,\r\n 'stations': bufr_station_id.lower(),\r\n 'noascii': '',\r\n 'nozipit': '',\r\n 'prepend': ''\r\n }\r\n missing_dates = []\r\n models = config['BUFR']['bufr_models']\r\n model_names = config['BUFR']['models']\r\n start_date = datetime.strptime(config['data_start_date'], '%Y%m%d') - timedelta(days=1)\r\n end_date = datetime.strptime(config['data_end_date'], '%Y%m%d') - timedelta(days=1)\r\n dates = generate_dates(config, start_date=start_date, end_date=end_date)\r\n for date in dates:\r\n bufarg['date'] = datetime.strftime(date, '%Y%m%d')\r\n if date.year < 2010:\r\n if config['verbose']:\r\n print('bufr: skipping BUFR data for %s; data starts in 2010.' % bufarg['date'])\r\n continue\r\n if config['verbose']:\r\n print('bufr: date: %s' % bufarg['date'])\r\n \r\n for m in range(len(models)):\r\n if config['verbose']:\r\n print('bufr: trying to retrieve BUFR data for %s...' % model_names[m])\r\n bufr_new_name = bufr_format % (bufr_data_dir, bufarg['date'], '%02d' % int(bufarg['cycle']),\r\n model_names[m], bufarg['stations'])\r\n if os.path.isfile(bufr_new_name):\r\n if config['verbose']:\r\n print('bufr: file %s already exists; skipping!' % bufr_new_name)\r\n break\r\n \r\n if type(models[m]) == list:\r\n for model in models[m]:\r\n try:\r\n bufarg['dset'] = model\r\n bufr_retrieve(bufrgruven, bufarg)\r\n bufr_name = bufr_format % (bufr_default_dir, bufarg['date'], '%02d' % int(bufarg['cycle']),\r\n model, bufarg['stations'])\r\n bufr_file = open(bufr_name)\r\n bufr_file.close()\r\n os.rename(bufr_name, bufr_new_name)\r\n if config['verbose']:\r\n print('bufr: BUFR file found for %s at date %s.' % (model, bufarg['date']))\r\n print('bufr: writing BUFR file: %s' % bufr_new_name)\r\n break\r\n except:\r\n if config['verbose']:\r\n print('bufr: BUFR file for %s at date %s not retrieved.' % (model, bufarg['date']))\r\n else:\r\n try:\r\n model = models[m]\r\n bufarg['dset'] = model\r\n bufr_retrieve(bufrgruven, bufarg)\r\n bufr_name = bufr_format % (bufr_default_dir, bufarg['date'], '%02d' % int(bufarg['cycle']),\r\n bufarg['dset'], bufarg['stations'])\r\n bufr_file = open(bufr_name)\r\n bufr_file.close()\r\n os.rename(bufr_name, bufr_new_name)\r\n if config['verbose']:\r\n print('bufr: BUFR file found for %s at date %s.' % (model, bufarg['date']))\r\n print('bufr: writing BUFR file: %s' % bufr_new_name)\r\n except:\r\n if config['verbose']:\r\n print('bufr: BUFR file for %s at date %s not retrieved.' % (model, bufarg['date']))\r\n if not (os.path.isfile(bufr_new_name)):\r\n print('bufr: warning: no BUFR file found for model %s at date %s' % (\r\n model_names[m], bufarg['date']))\r\n missing_dates.append((date, model_names[m]))\r\n \r\n # Process data\r\n print('\\n')\r\n bufr_dict = OrderedDict({'PROF': OrderedDict(), 'SFC': OrderedDict(), 'DAY': OrderedDict()})\r\n for model in model_names:\r\n bufr_dict['PROF'][model] = OrderedDict()\r\n bufr_dict['SFC'][model] = OrderedDict()\r\n bufr_dict['DAY'][model] = OrderedDict()\r\n \r\n for date in dates:\r\n date_str = datetime.strftime(date, '%Y%m%d')\r\n verif_date = date + timedelta(days=1)\r\n start_dt = verif_date + timedelta(hours=config['forecast_hour_start'])\r\n end_dt = verif_date + timedelta(hours=config['forecast_hour_start'] + 24)\r\n for model in model_names:\r\n if (date, model) in missing_dates:\r\n if config['verbose']:\r\n print('bufr: skipping %s data for %s; file missing.' % (model, date_str))\r\n continue\r\n if config['verbose']:\r\n print('bufr: processing %s data for %s' % (model, date_str))\r\n bufr_name = bufr_format % (bufr_data_dir, date_str, '%02d' % int(bufarg['cycle']), model,\r\n bufarg['stations'])\r\n if not (os.path.isfile(bufr_name)):\r\n if config['verbose']:\r\n print('bufr: skipping %s data for %s; file missing.' % (model, date_str))\r\n continue\r\n profile = bufkit_parser_time_height(config, bufr_name, 6, start_dt, end_dt)\r\n sfc, daily = bufkit_parser_surface(bufr_name, 3, start_dt, end_dt)\r\n # Drop 'PRES' variable which is useless\r\n for key, values in profile.items():\r\n values.pop('PRES', None)\r\n profile[key] = values\r\n bufr_dict['PROF'][model][verif_date] = profile\r\n bufr_dict['SFC'][model][verif_date] = sfc\r\n bufr_dict['DAY'][model][verif_date] = daily\r\n \r\n #Optional: uncomment the two lines below to remove files that are finished processing to save disk space\r\n #os.remove(bufr_name)\r\n #os.system('rm %s/metdat/gempak/%s%s_%s*' % (config['BUFR_ROOT'], date_str, '%02d' % int(bufarg['cycle']), model.lower()))\r\n \r\n # Export data\r\n if output_files is None:\r\n output_file = '%s/%s_bufr.pkl' % (config['SITE_ROOT'], bufr_station_id)\r\n else:\r\n output_file = output_files[i]\r\n if config['verbose']:\r\n print('bufr: -> exporting to %s' % output_file)\r\n with open(output_file, 'wb') as handle:\r\n pickle.dump(bufr_dict, handle, protocol=2)\r\n\r\n return", "def decoupledpf(Ybus, Sbus, V0, pv, pq, ppci, options):\n # old algortihm options to the new ones\n pp2pypower_algo = {'fdbx': 2, 'fdxb': 3}\n\n # options\n tol = options[\"tolerance_mva\"]\n max_it = options[\"max_iteration\"]\n # No use currently for numba. TODO: Check if can be applied in Bp and Bpp\n # numba = options[\"numba\"]\n\n # NOTE: options[\"algorithm\"] is either 'fdbx' or 'fdxb'. Otherwise, error\n algorithm = pp2pypower_algo[options[\"algorithm\"]]\n\n voltage_depend_loads = options[\"voltage_depend_loads\"]\n v_debug = options[\"v_debug\"]\n\n baseMVA = ppci[\"baseMVA\"]\n bus = ppci[\"bus\"]\n branch = ppci[\"branch\"]\n gen = ppci[\"gen\"]\n\n # initialize\n i = 0\n V = V0\n Va = angle(V)\n Vm = abs(V)\n dVa, dVm = None, None\n\n if v_debug:\n Vm_it = Vm.copy()\n Va_it = Va.copy()\n else:\n Vm_it = None\n Va_it = None\n\n # set up indexing for updating V\n pvpq = r_[pv, pq]\n\n # evaluate initial mismatch\n P, Q = _evaluate_mis(Ybus, V, Sbus, pvpq, pq)\n\n # check tolerance\n converged = _check_for_convergence(P, Q, tol)\n\n # create and reduce B matrices\n Bp, Bpp = makeB(baseMVA, bus, real(branch), algorithm)\n # splu requires a CSC matrix\n Bp = Bp[array([pvpq]).T, pvpq].tocsc()\n Bpp = Bpp[array([pq]).T, pq].tocsc()\n\n # factor B matrices\n Bp_solver = splu(Bp)\n Bpp_solver = splu(Bpp)\n\n # do P and Q iterations\n while (not converged and i < max_it):\n # update iteration counter\n i = i + 1\n\n # ----- do P iteration, update Va -----\n dVa = -Bp_solver.solve(P)\n\n # update voltage\n Va[pvpq] = Va[pvpq] + dVa\n V = Vm * exp(1j * Va)\n\n # evalute mismatch\n P, Q = _evaluate_mis(Ybus, V, Sbus, pvpq, pq)\n\n # check tolerance\n if _check_for_convergence(P, Q, tol):\n converged = True\n break\n\n # ----- do Q iteration, update Vm -----\n dVm = -Bpp_solver.solve(Q)\n\n # update voltage\n Vm[pq] = Vm[pq] + dVm\n V = Vm * exp(1j * Va)\n\n if v_debug:\n Vm_it = column_stack((Vm_it, Vm))\n Va_it = column_stack((Va_it, Va))\n\n if voltage_depend_loads:\n Sbus = makeSbus(baseMVA, bus, gen, vm=Vm)\n\n # evalute mismatch\n P, Q = _evaluate_mis(Ybus, V, Sbus, pvpq, pq)\n\n # check tolerance\n if _check_for_convergence(P, Q, tol):\n converged = True\n break\n\n # the newtonpf/newtonpf funtion returns J. We are returning Bp and Bpp\n return V, converged, i, Bp, Bpp, Vm_it, Va_it", "def _update_pressure_bc(\n self,\n states: FlowFieldMap,\n additional_states: FlowFieldMap,\n ):\n bc_p = [[None, None], [None, None], [None, None]]\n\n velocity_keys = ['u', 'v', 'w']\n grid_spacing = (self._params.dx, self._params.dy, self._params.dz)\n\n def grad_per_dim(f, dim):\n \"\"\"Computes the diffusion term in a specific dimension.\"\"\"\n grad_ops = (\n lambda f: self._kernel_op.apply_kernel_op_x(f, 'kDx'),\n lambda f: self._kernel_op.apply_kernel_op_y(f, 'kDy'),\n lambda f: self._kernel_op.apply_kernel_op_z(f, 'kDz', 'kDzsh'),\n )\n return tf.nest.map_structure(\n lambda grad: grad / (2.0 * grid_spacing[dim]), grad_ops[dim](f))\n\n def ddh_per_dim(f, dim):\n \"\"\"Computes the second order derivative of `f` along `dim`.\"\"\"\n diff_ops = [\n lambda f: self._kernel_op.apply_kernel_op_x(f, 'kddx'),\n lambda f: self._kernel_op.apply_kernel_op_y(f, 'kddy'),\n lambda f: self._kernel_op.apply_kernel_op_z(f, 'kddz', 'kddzsh'),\n ]\n return tf.nest.map_structure(lambda diff: diff / grid_spacing[dim]**2,\n diff_ops[dim](f))\n\n # The diffusion term for the 3 velocity component can be expressed in vector\n # form as:\n # 𝛁·𝛕 = 𝜇 𝛁²u + 1/3𝜇 𝛁(𝛁·u).\n # We rearange terms in the wall-oriented coordinates (n is for the direction\n # normal to the wall, and t is for directions parallel/tangent to the wall).\n # Because the wall normal velocity component uₙ is 0 at the wall, 𝜕uₙ/𝜕t = 0\n # the equation above can be expressed as:\n # 𝛁·𝛕ₙ = 4/3 𝜇 𝜕²uₙ/𝜕n² + 1/3𝜇 𝜕/𝜕n (𝜕uₜ/𝜕t),\n # where n is for the direction normal to the wall, and t is for directions\n # parallel/tangent to the wall.\n # In additional, we assume that there's no turbulence at the wall, therefore\n # 𝜇 is the molecular viscosity.\n def diff_fn(\n mu_i: tf.Tensor,\n ddu_n_i: tf.Tensor,\n ddu_t_i: tf.Tensor,\n ) -> tf.Tensor:\n \"\"\"Computes the diffusion term at walls.\"\"\"\n return mu_i * (4.0 / 3.0 * ddu_n_i + 1.0 / 3.0 * ddu_t_i)\n\n mu = tf.nest.map_structure(lambda rho_i: self._params.nu * rho_i,\n states['rho'])\n ddu_n = [ddh_per_dim(states[velocity_keys[i]], i) for i in range(3)]\n du_dx = [grad_per_dim(states[velocity_keys[i]], i) for i in range(3)]\n du_t = (\n # The x component.\n tf.nest.map_structure(tf.math.add, du_dx[1], du_dx[2]),\n # The y component.\n tf.nest.map_structure(tf.math.add, du_dx[0], du_dx[2]),\n # The z component.\n tf.nest.map_structure(tf.math.add, du_dx[0], du_dx[1]),\n )\n ddu_t = [grad_per_dim(du_t[i], i) for i in range(3)]\n\n diff = [\n tf.nest.map_structure(diff_fn, mu, ddu_n[i], ddu_t[i]) for i in range(3)\n ]\n\n # Updates the pressure boundary condition based on the simulation setup.\n for i in range(3):\n for j in range(2):\n if (self._params.bc_type[i][j] ==\n boundary_condition_utils.BoundaryType.PERIODIC):\n bc_p[i][j] = None\n\n elif (self._params.bc_type[i][j] ==\n boundary_condition_utils.BoundaryType.INFLOW):\n bc_p[i][j] = (halo_exchange.BCType.NEUMANN_2, 0.0)\n\n elif (self._params.bc_type[i][j]\n == boundary_condition_utils.BoundaryType.OUTFLOW):\n if self._pressure_params.pressure_outlet:\n # Enforce a pressure outlet boundary condition on demand.\n bc_p[i][j] = (halo_exchange.BCType.DIRICHLET, 0.0)\n else:\n bc_p[i][j] = (halo_exchange.BCType.NEUMANN_2, 0.0)\n\n elif self._params.bc_type[i][j] in (\n boundary_condition_utils.BoundaryType.SLIP_WALL,\n boundary_condition_utils.BoundaryType.NON_SLIP_WALL,\n boundary_condition_utils.BoundaryType.SHEAR_WALL):\n\n bc_value = common_ops.get_face(diff[i], i, j,\n self._params.halo_width - 1,\n grid_spacing[i])[0]\n if i == self.g_dim:\n # Ensures the pressure balances with the buoyancy at the first fluid\n # layer by assigning values to the pressure in halos adjacent to the\n # fluid domain.\n rho_0 = self._thermodynamics.rho_ref(\n additional_states.get('zz', None), additional_states\n )\n b = eq_utils.buoyancy_source(self._kernel_op, states['rho_thermal'],\n rho_0, self._params, i)\n\n bc_value = tf.nest.map_structure(\n tf.math.add, bc_value,\n common_ops.get_face(b, i, j, self._params.halo_width - 1,\n grid_spacing[i])[0])\n\n # The boundary condition for pressure is applied at the interface\n # between the boundary and fluid only. Assuming everything is\n # homogeneous behind the halo layer that's closest to the fluid, a\n # homogeneous Neumann BC is applied to all other layers for pressure.\n zeros = [tf.nest.map_structure(tf.zeros_like, bc_value)] * (\n self._params.halo_width - 1)\n\n bc_planes = zeros + [bc_value] if j == 0 else [bc_value] + zeros\n\n bc_p[i][j] = (halo_exchange.BCType.NEUMANN_2, bc_planes)\n else:\n raise ValueError('{} is not defined for pressure boundary.'.format(\n self._params.bc_type[i][j]))\n\n self._bc['p'] = bc_p", "def make_bispectra(self, bgwindow=4):\n\n bisp = lambda d, ij, jk, ki: d[:,ij] * d[:,jk] * n.conj(d[:,ki]) # bispectrum for pol data\n# bisp = lambda d, ij, jk, ki: n.complex(d[ij] * d[jk] * n.conj(d[ki])) # without pol axis\n\n triples = self.make_triples()\n meanbl = self.data.mean(axis=2).mean(axis=0) # find bls with no zeros in either pol to ignore in triples\n self.triples = triples[n.all(meanbl[triples][:,0] != 0j, axis=1) & n.all(meanbl[triples][:,1] != 0j, axis=1) & n.all(meanbl[triples][:,2] != 0j, axis=1)] # only take triples if both pols are good. may be smaller than set for an individual pol\n\n # set up arrays for bispectrum and for weighting data (ignoring zeros)\n bispectra = n.zeros((len(self.dmarr), len(self.data), len(self.triples)), dtype='complex')\n truearr = n.ones( (self.npol, self.nbl, len(self.chans)))\n falsearr = n.zeros( (self.npol, self.nbl, len(self.chans)))\n\n # iterate over dm trials and integrations\n for d in xrange(len(self.dmarr)):\n twidth = n.round(self.twidths[d])\n dmwidth = int(n.round(n.max(self.dmtrack0[d][0]) - n.min(self.dmtrack0[d][0])))\n\n for i in xrange((bgwindow/2)+twidth, len(self.data)-( (bgwindow/2)+2*twidth+dmwidth )): # dmwidth avoided at end, others are split on front and back side of time iteration\n# for i in xrange((bgwindow/2)+twidth, len(self.data)-( (bgwindow/2)+twidth+dmwidth ), max(1,twidth/2)): # can step by twidth/2, but messes up data products\n diff = self.tracksub(d, i, bgwindow=bgwindow)\n\n if len(n.shape(diff)) == 1: # no track\n continue\n\n# **need to redo for self.flags**\n weightarr = n.where(diff != 0j, truearr, falsearr) # ignore zeros in mean across channels # bit of a hack\n try:\n diffmean = n.average(diff, axis=2, weights=weightarr)\n except ZeroDivisionError:\n diffmean = n.mean(diff, axis=2) # if all zeros, just make mean # bit of a hack\n\n for trip in xrange(len(self.triples)):\n ij, jk, ki = self.triples[trip]\n bispectra[d, i, trip] = bisp(diffmean, ij, jk, ki).mean(axis=0) # Stokes I bispectrum. Note we are averaging after forming bispectrum, so not technically a Stokes I bispectrum.\n print 'dedispersed for ', self.dmarr[d]\n self.bispectra = n.ma.masked_array(bispectra, bispectra == 0j)", "def bisplrep(x, y, z, w=None, xb=None, xe=None, yb=None, ye=None,\n kx=3, ky=3, task=0, s=None, eps=1e-16, tx=None, ty=None,\n full_output=0, nxest=None, nyest=None, quiet=1):\n x, y, z = map(ravel, [x, y, z]) # ensure 1-d arrays.\n m = len(x)\n if not (m == len(y) == len(z)):\n raise TypeError('len(x)==len(y)==len(z) must hold.')\n if w is None:\n w = ones(m, float)\n else:\n w = atleast_1d(w)\n if not len(w) == m:\n raise TypeError('len(w)=%d is not equal to m=%d' % (len(w), m))\n if xb is None:\n xb = x.min()\n if xe is None:\n xe = x.max()\n if yb is None:\n yb = y.min()\n if ye is None:\n ye = y.max()\n if not (-1 <= task <= 1):\n raise TypeError('task must be -1, 0 or 1')\n if s is None:\n s = m - sqrt(2*m)\n if tx is None and task == -1:\n raise TypeError('Knots_x must be given for task=-1')\n if tx is not None:\n _surfit_cache['tx'] = atleast_1d(tx)\n nx = len(_surfit_cache['tx'])\n if ty is None and task == -1:\n raise TypeError('Knots_y must be given for task=-1')\n if ty is not None:\n _surfit_cache['ty'] = atleast_1d(ty)\n ny = len(_surfit_cache['ty'])\n if task == -1 and nx < 2*kx+2:\n raise TypeError('There must be at least 2*kx+2 knots_x for task=-1')\n if task == -1 and ny < 2*ky+2:\n raise TypeError('There must be at least 2*ky+2 knots_x for task=-1')\n if not ((1 <= kx <= 5) and (1 <= ky <= 5)):\n raise TypeError('Given degree of the spline (kx,ky=%d,%d) is not '\n 'supported. (1<=k<=5)' % (kx, ky))\n if m < (kx + 1)*(ky + 1):\n raise TypeError('m >= (kx+1)(ky+1) must hold')\n if nxest is None:\n nxest = int(kx + sqrt(m/2))\n if nyest is None:\n nyest = int(ky + sqrt(m/2))\n nxest, nyest = max(nxest, 2*kx + 3), max(nyest, 2*ky + 3)\n if task >= 0 and s == 0:\n nxest = int(kx + sqrt(3*m))\n nyest = int(ky + sqrt(3*m))\n if task == -1:\n _surfit_cache['tx'] = atleast_1d(tx)\n _surfit_cache['ty'] = atleast_1d(ty)\n tx, ty = _surfit_cache['tx'], _surfit_cache['ty']\n wrk = _surfit_cache['wrk']\n u = nxest - kx - 1\n v = nyest - ky - 1\n km = max(kx, ky) + 1\n ne = max(nxest, nyest)\n bx, by = kx*v + ky + 1, ky*u + kx + 1\n b1, b2 = bx, bx + v - ky\n if bx > by:\n b1, b2 = by, by + u - kx\n msg = \"Too many data points to interpolate\"\n lwrk1 = _int_overflow(u*v*(2 + b1 + b2) +\n 2*(u + v + km*(m + ne) + ne - kx - ky) + b2 + 1,\n msg=msg)\n lwrk2 = _int_overflow(u*v*(b2 + 1) + b2, msg=msg)\n tx, ty, c, o = _fitpack._surfit(x, y, z, w, xb, xe, yb, ye, kx, ky,\n task, s, eps, tx, ty, nxest, nyest,\n wrk, lwrk1, lwrk2)\n _curfit_cache['tx'] = tx\n _curfit_cache['ty'] = ty\n _curfit_cache['wrk'] = o['wrk']\n ier, fp = o['ier'], o['fp']\n tck = [tx, ty, c, kx, ky]\n\n ierm = min(11, max(-3, ier))\n if ierm <= 0 and not quiet:\n _mess = (_iermess2[ierm][0] +\n \"\\tkx,ky=%d,%d nx,ny=%d,%d m=%d fp=%f s=%f\" %\n (kx, ky, len(tx), len(ty), m, fp, s))\n warnings.warn(RuntimeWarning(_mess))\n if ierm > 0 and not full_output:\n if ier in [1, 2, 3, 4, 5]:\n _mess = (\"\\n\\tkx,ky=%d,%d nx,ny=%d,%d m=%d fp=%f s=%f\" %\n (kx, ky, len(tx), len(ty), m, fp, s))\n warnings.warn(RuntimeWarning(_iermess2[ierm][0] + _mess))\n else:\n try:\n raise _iermess2[ierm][1](_iermess2[ierm][0])\n except KeyError as e:\n raise _iermess2['unknown'][1](_iermess2['unknown'][0]) from e\n if full_output:\n try:\n return tck, fp, ier, _iermess2[ierm][0]\n except KeyError:\n return tck, fp, ier, _iermess2['unknown'][0]\n else:\n return tck", "def targetFromSignals(obars, nbands=3, amount=1, targetprofit=15., stoploss=45.):\n # bandsg, yband, ask, bid, day, amount, targetprofit, stoploss\n bars = obars.copy()\n for j in range(nbands): # for each band traverse it\n ibandsg = bars.columns.get_loc('bandsg'+str(j))\n # being pessimistic ... right\n ybandsell = traverseSellBand(bars.iloc[:, ibandsg].values.astype(int),\n bars.H.values, bars.L.values, bars.date.values,\n amount, targetprofit, stoploss)\n ybandbuy = traverseBuyBand(bars.iloc[:, ibandsg].values.astype(int),\n bars.H.values, bars.L.values, bars.date.values,\n amount, targetprofit, stoploss)\n bars['y'+str(j)] = mergebandsignals(ybandsell, ybandbuy)\n\n return bars", "def burg(sample_list, coefficient_number):\n\n p = sum(sample ** 2 for sample in sample_list)\n a0 = p / len(sample_list)\n\n b1 = sample_list[:len(sample_list) - 1]\n b2 = sample_list[1:]\n\n aa = [0.0 for i in range(coefficient_number)]\n coefficient_list = [0.0 for i in range(coefficient_number)]\n\n for i in range(coefficient_number):\n\n numerator = 0.0\n denominator = 0.0\n\n for j in range(len(sample_list) - i - 1):\n numerator += b1[j] * b2[j]\n denominator += b1[j] ** 2 + b2[j] **2\n\n coefficient_list[i] = 2.0 * numerator / denominator\n a0 *= 1.0 - coefficient_list[i] ** 2\n\n for j in range(i - 1):\n coefficient_list[j] = aa[j] - coefficient_list[i] * aa[i - j - 1]\n\n if i < coefficient_number + 1:\n\n for j in range(i + 1):\n aa[j] = coefficient_list[j]\n\n for j in range(len(sample_list) - i - 2):\n b1[j] -= aa[i] * b2[j]\n b2[j] = b2[j + 1] - aa[i] * b1[j + 1];\n\n return a0, coefficient_list", "def optimize(self, cycles = 1, waveplates = ['half', 'quarter'],\n counter = 0):\n \n if counter in range(0,4):\n self.counter = counter\n else:\n raise ValueError('Argument specified for counter is not understood')\n\n for c in range(cycles):\n print '* Optimizing cycle %d of %d...'%(c+1, cycles)\n \n for w in waveplates:\n #measure position before optimizing\n self.rotator.set_zero_position(getattr(self,'_'+w+'_channel'))\n pos_before = getattr(self.rotator, 'get_noof_steps_ch'+\\\n str(getattr(self,'_'+w+'_channel')) )()\n \n #turn waveplat2es\n data, qtdata, dataplot, premature_quit = self.run(w, self.get_opt_red_power())\n if not premature_quit:\n qtdata, fitres = self.fit(w, data, qtdata, dataplot)\n qtdata.close_file()\n\n if not premature_quit:\n \n #set optimal position\n if type(fitres) != type(False):\n if np.sign(fitres['a2']) != -1:\n optim_pos = -np.int(fitres['a1']/(2*fitres['a2']))\n else:\n print '\\tFitting a maximum instead of a minimum.'\n optim_pos = 0\n \n else:\n print '\\tGuessing optimal waveplate position...'\n optim_pos = data['wp_steps'](self.find_nearest(data['counts'],\n min(data['counts'])))\n\n if self.get_plot_degrees():\n print '\\tOptimal waveplate position determined at %.3f degrees.'%(optim_pos*self.get_conversion_factor(w))\n else:\n print '\\tOptimal waveplate position determined at %d steps.'%optim_pos\n \n #BEWARE: never ask the current position in noof_steps\n curr_pos = data['wp_step'][len(data['wp_step'])-1]\n\n #check that the optimum position is somewhat reasonable\n if abs(optim_pos) < self.check_noof_steps:\n #set the position to the optimal position\n self.rotator.quick_scan(optim_pos-curr_pos, \n getattr(self,'_'+w+'_channel'))\n else:\n print '\\tWARNING: Optimal position differs %s steps\\\n from initial position'%optim_pos\n check = raw_input('\\tPress \"ENTER\" to continue, \"q\" to quit\\n')\n \n if check == '':\n #set the position to the optimal position\n self.rotator.quick_scan(optim_pos-curr_pos, \n getattr(self,'_'+w+'_channel'))\n \n elif check == 'q':\n print 'Process aborted by user'\n pass\n else:\n raise ValueError('Response to question is not \\\n understood. Not taking any action.')\n else:\n #what to do if there was a premature quit during optimization?\n pos_quit = data['wp_step'][len(data['wp_step'])-1]\n\n print '\\tReturning to initial position...'\n #set the position to the optimal position\n self.rotator.quick_scan(pos_before-pos_quit, getattr(self,'_'+w+'_channel'))\n\n #measure position after optimizing\n pos_after = getattr(self.rotator, 'get_noof_steps_ch'+\\\n str(getattr(self,'_'+w+'_channel')))()\n\n #print \"\\tPosition of %s waveplate changed %d steps\"\\\n # %(w, pos_after-pos_before)\n \n if msvcrt.kbhit():\n kb_char=msvcrt.getch()\n if kb_char == \"q\" : break\n \n qt.msleep(0.5)\n \n if premature_quit:\n break", "def planck_B_nu(freq, T):\n import numpy as np\n from astropy import units as u\n from astropy import constants as c\n\n if isinstance(T, u.quantity.Quantity):\n use_units = True\n else:\n T = T * u.K\n use_units = False\n\n if not isinstance(freq, u.quantity.Quantity):\n freq *= u.Hz\n\n T = np.array(T.value, ndmin=1) * T.unit\n freq = np.array(freq.value, ndmin=1) * freq.unit\n\n f_ov_T = freq[np.newaxis, :] / T[:, np.newaxis]\n mx = np.floor(np.log(np.finfo(f_ov_T.ravel()[0].value).max))\n exp = np.minimum(f_ov_T * c.h / c.k_B, mx)\n exp = np.maximum(exp, -mx)\n\n output = 2 * c.h * freq**3 / c.c**2 / (np.exp(exp) - 1.0) / u.sr\n\n cgsunit = 'erg/(s*sr*cm**2*Hz)'\n if use_units:\n return output.to(cgsunit).squeeze()\n else:\n return output.to(cgsunit).value.squeeze()", "def quantum_pool_circuit(self, source_bits, sink_bits, symbols):\n circuit = cirq.Circuit()\n for source, sink in zip(source_bits, sink_bits):\n circuit += self.two_qubit_pool(source, sink, symbols)\n return circuit", "def prbs(Tmax, Tmin, initstate=\"random\"):\n if not isinstance(Tmax, int):\n raise TypeError(\"`Tmax` must be an integer\")\n\n if Tmax < 2:\n raise ValueError(\"`Tmax` must be > 2\")\n\n if not isinstance(Tmin, int):\n raise TypeError(\"`Tmax` must be an integer\")\n\n if Tmin < 1:\n raise ValueError(\"`Tmin` must be > 1\")\n\n if Tmin >= Tmax:\n raise ValueError(\"`Tmax` must be strictly superior to `Tmin`\")\n\n __init_availabble__ = [\"random\", \"ones\"]\n if initstate not in __init_availabble__:\n raise ValueError(f\"`initstate` must be either {__init_availabble__}\")\n\n # get the register length\n n = np.ceil(Tmax / Tmin)\n if n < 2 or n > 31:\n raise ValueError(\n \"The PRBS cannot be generated, \" \"decompose the signal in two sequences\"\n )\n\n # Linear feedback register up to 32 bits\n fpoly = {\n 2: [2, 1],\n 3: [3, 1],\n 4: [4, 1],\n 5: [5, 2],\n 6: [6, 1],\n 7: [7, 1],\n 8: [8, 4, 3, 2],\n 9: [9, 4],\n 10: [10, 3],\n 11: [11, 2],\n 12: [12, 6, 4, 1],\n 13: [13, 4, 3, 1],\n 14: [14, 8, 6, 1],\n 15: [15, 1],\n 16: [16, 12, 3, 1],\n 17: [17, 3],\n 18: [18, 7],\n 19: [19, 5, 2, 1],\n 20: [20, 3],\n 21: [21, 2],\n 22: [22, 1],\n 23: [23, 5],\n 24: [24, 7, 2, 1],\n 25: [25, 3],\n 26: [26, 6, 2, 1],\n 27: [27, 5, 2, 1],\n 28: [28, 3],\n 29: [29, 2],\n 30: [30, 23, 2, 1],\n 31: [31, 3],\n }\n\n L = LFSR(fpoly=fpoly[n], initstate=initstate, verbose=False)\n\n seq = []\n for n in range(L.expectedPeriod):\n L.next()\n seq.append(L.state[0])\n\n seq_padded = np.repeat(seq, Tmin)\n\n # check generated PRBS\n assert seq_padded.shape[0] == L.expectedPeriod * Tmin\n assert max(len(list(v)) for g, v in itertools.groupby(seq_padded)) == Tmax\n assert min(len(list(v)) for g, v in itertools.groupby(seq_padded)) == Tmin\n\n return seq_padded", "def find_busses(singals):\n buses = {}\n for n in singals:\n key = (n.split(\"[\"))[0]\n vals = map(lambda x: x.split(\"]\")[0] , n.split(\"[\")[1:] ) \n if key not in buses.keys():\n buses[key] = vals\n else:\n buses[key] = map(lambda x: max(buses[key][x], vals[x]), range(len(vals)) )\n\n return buses", "def reconstruct_solution(spikes,sol,uval,twin,ics,tau,variable,**kwargs):\n\n # Model parameters\n pars = {'dt' : 1e-3}\n pars = gu.varargin(pars,**kwargs)\n\n # Generate time vector\n time = np.arange(twin[0],twin[-1],pars['dt'])\n time = np.sort(np.r_[time,spikes])\n # Generate spike vector\n tspk = np.copy(time)\n for i in range(1,len(spikes)):\n tspk[np.where(np.logical_and(time>=spikes[i-1],time<spikes[i]))[0]] = spikes[i-1]\n tspk[np.where(time >= spikes[len(spikes)-1])[0]] = spikes[len(spikes)-1]\n tspk[np.where(time < spikes[0])[0]] = 0\n # Generate general solution vector\n vsol = np.ones(time.size)\n if (variable=='x') and isscalar(uval):\n uval = uval * np.ones(sol.size)\n if variable=='x':\n for i in range(1, len(spikes)):\n # x must be given at x(t_i^+) according to xsol\n vsol[np.where(np.logical_and(time >= spikes[i - 1], time < spikes[i]))[0]] = sol[i-1]*(1-uval[i-1])\n vsol[np.where(time >= spikes[len(spikes) - 1])[0]] = sol[len(spikes) - 1]*(1-uval[len(spikes)-1])\n else:\n for i in range(1, len(spikes)):\n vsol[np.where(np.logical_and(time >= spikes[i - 1], time < spikes[i]))[0]] = sol[i-1]\n vsol[np.where(time >= spikes[len(spikes) - 1])[0]] = sol[len(spikes) - 1]\n vsol[np.where(time < spikes[0])[0]] = ics\n # Compute effective solution\n solution = np.zeros((2, time.size))\n solution[0] = time\n\n if variable=='x':\n # Assumes that the first ICs is x(0)\n solution[1] = xsol(vsol,time-tspk,tau)\n else:\n solution[1] = usol(vsol,time-tspk,tau)\n\n return solution", "def extractstatesusingintegral(self, bias=1.0):\n numtoadd = int(round(float(bias) * simplesum(comp.weight for comp in self.gmm)))\n print(\"bias is %g, numtoadd is %i\" % (bias, numtoadd))\n items = []\n # A temporary list of peaks which will gradually be decimated as we steal from its highest peaks\n peaks = [{'loc': comp.loc, 'weight': comp.weight, 'id': comp.id} for comp in self.gmm]\n while numtoadd > 0:\n windex = 0\n wsize = 0\n for which, peak in enumerate(peaks):\n if peak['weight'] > wsize:\n windex = which\n wsize = peak['weight']\n # add the winner\n items.append([deepcopy(peaks[windex]['loc']), 0, peaks[windex]['id']])\n #peaks[windex]['weight'] -= 100.0\n peaks.pop(windex)\n numtoadd -= 1\n\n lp, lc = len(self.pre_state), len(items) # pre_state and items is current state\n cost = numpy.ones([lp, lc]) * 100000000\n for i in range(0, lp):\n for j in range(0, lc):\n if (self.pre_state[i][2] == items[j][2]):\n xp, yp, _, _ = self.pre_state[i][0]\n xc, yc, _, _ = items[j][0]\n cost[i, j] = sqrt((xp - xc) ** 2 + (yp - yc) ** 2)\n row_ind, col_ind = linear_sum_assignment(cost, maximize=False)\n for i, idx in enumerate(col_ind):\n items[idx][1] = self.pre_state[row_ind[i]][1]\n for i in range(0, lc):\n if i not in col_ind:\n self.track_id += 1\n items[i][1] = self.track_id\n\n self.pre_state = deepcopy(items)\n\n return items", "def make_bispectra(self, bgwindow=4):\n\n bisp = lambda d, ij, jk, ki: d[:,ij] * d[:,jk] * n.conj(d[:,ki]) # bispectrum for pol data\n\n self.triples = self.make_triples()\n self.bispectra = n.ma.zeros((len(self.data), len(self.triples)), dtype='complex')\n\n for i in xrange((bgwindow/2)+self.twidth, len(self.data)-( (bgwindow/2)+2*self.twidth )):\n# for i in xrange((bgwindow/2)+self.twidth, len(self.data)-( (bgwindow/2)+self.twidth ), max(1,self.twidth)): # leaves gaps in data\n diff = self.tracksub(i, bgwindow=bgwindow)\n\n if len(n.shape(diff)) == 1: # no track\n continue\n\n diffmean = n.mean(diff, axis=2) # if all zeros, just make mean # bit of a hack\n\n for trip in xrange(len(self.triples)):\n ij, jk, ki = self.triples[trip]\n self.bispectra[i, trip] = bisp(diffmean, ij, jk, ki).mean(axis=0) # Stokes I bispectrum. Note we are averaging after forming bispectrum, so not technically a Stokes I bispectrum.", "def causDspectra(uxmax, uymax, ax, ay, dso, dsl, dm, m, n, N):\n \n ymin = -m*uxmax + n\n ymax = m*uxmax + n\n if ymin < -uymax:\n xmin = (-uymax - n)/m\n ymin = m*xmin + n\n else:\n xmin = -uxmax\n if ymax > uymax:\n xmax = (uymax - n)/m\n ymax = m*xmax + n\n else:\n xmax = uxmax\n \n dlo = dso - dsl\n coeff = dsl*dlo*re*dm/(2*pi*dso)\n \n rx = np.linspace(xmin - 5., xmax + 5., 500)\n ry = np.linspace(ymin - 5., ymax + 5., 500)\n uvec = np.meshgrid(rx, ry)\n A, B, C, D, E = causticFreqHelp(uvec, ax, ay, m, n)\n upxvec = np.linspace(xmin, xmax, N)\n freqcaus = []\n for upx in upxvec:\n eq1 = A*upx**2 + B*upx + C\n eq2 = D*upx + E\n evcaus = np.array([eq1, eq2])\n roots = polishedRootsBulk(evcaus, causEqFreq, rx, ry, args = (upx, ax, ay, m, n))\n for root in roots:\n ux, uy = root\n arg = coeff*lensg(ux, uy)[0]/(ux - upx)\n # print(arg)\n if arg > 0:\n freq = c*np.sqrt(arg)/(ax*GHz)\n if freq > 0.01:\n freqcaus.append([upx, freq])\n # print(freqcaus)\n freqcaus = np.asarray(freqcaus).T\n # plt.scatter(freqcaus[0], freqcaus[1], marker = '.', color = 'black', s = 3.)\n # plt.xlim(xmin, xmax)\n # plt.ylim(0., max(freqcaus[1]) + 0.5)\n # plt.xlabel(r\"$u'_x$\", fontsize = 16)\n # plt.ylabel(r'$\\nu$ (GHz)', fontsize = 16)\n # plt.grid()\n # plt.show()\n return freqcaus", "def apply_transmission(self, slamb, sflux):\n _wavelength = self._get_filter_in_units_of(slamb)\n _lamb = _drop_units(slamb)\n ifT = np.interp(_lamb, _wavelength, self.transmit, left=0., right=0.)\n return ifT * sflux", "def _concatenate_pulses(\n self, pulse_instructions, scheduled_start_time, num_controls):\n # Concatenate tlist and coeffs for each control pulses\n compiled_tlist = [[] for tmp in range(num_controls)]\n compiled_coeffs = [[] for tmp in range(num_controls)]\n for pulse_ind in range(num_controls):\n last_pulse_time = 0.\n for start_time, tlist, coeff in pulse_instructions[pulse_ind]:\n # compute the gate time, step size and coeffs\n # according to different pulse mode\n gate_tlist, coeffs, step_size, pulse_mode = \\\n self._process_gate_pulse(start_time, tlist, coeff)\n\n if abs(last_pulse_time) < step_size * 1.0e-6: # if first pulse\n compiled_tlist[pulse_ind].append([0.]) \n if pulse_mode == \"continuous\":\n compiled_coeffs[pulse_ind].append([0.])\n # for discrete pulse len(coeffs) = len(tlist) - 1\n\n # If there is idling time between the last pulse and\n # the current one, we need to add zeros in between.\n if np.abs(start_time - last_pulse_time) > step_size * 1.0e-6:\n idling_tlist = self._process_idling_tlist(\n pulse_mode, start_time, last_pulse_time, step_size)\n compiled_tlist[pulse_ind].append(idling_tlist)\n compiled_coeffs[pulse_ind].append(np.zeros(len(idling_tlist)))\n\n # Add the gate time and coeffs to the list.\n execution_time = gate_tlist + start_time\n last_pulse_time = execution_time[-1]\n compiled_tlist[pulse_ind].append(execution_time)\n compiled_coeffs[pulse_ind].append(coeffs)\n\n for i in range(num_controls):\n if not compiled_coeffs[i]:\n compiled_tlist[i] = None\n compiled_coeffs[i] = None\n else:\n compiled_tlist[i] = np.concatenate(compiled_tlist[i])\n compiled_coeffs[i] = np.concatenate(compiled_coeffs[i])\n return compiled_tlist, compiled_coeffs", "def b_mix(s, p, phase='x'): # (Validated)\n b_mix = 0.0\n for i in range(1, p.m['n']+1):\n b_mix += s.c[i][phase]*s.c[i]['b']\n return b_mix", "def pru_cal(variables, values, reduce_factor):\n names = [variable.name for variable in variables]\n values = np.array(values)\n values = [np.transpose(value) for value in values]\n\n kernel_index = []\n for name in names:\n if name.find(\"kernel\") != -1 or name.find(\"weights\") != -1:\n kernel_index.append(names.index(name))\n\n # The definition of redundancy\n channel_to_delete_pack = []\n pruning_number_pack = []\n for i in kernel_index:\n layer = values[i]\n M = np.sum(abs(layer)) / np.prod(np.shape(layer))\n channel = np.shape(layer)[0]\n S = np.zeros(channel)\n for j in range(channel):\n kernel = layer[j]\n s = np.sum(abs(kernel) < M) / np.prod(np.shape(kernel))\n S[j] = s\n index = np.argsort(S)\n channel_to_delete_pack.append(index)\n pruning_number = int(channel * reduce_factor)\n pruning_number_pack.append(pruning_number)\n\n return kernel_index, channel_to_delete_pack, pruning_number_pack", "def compute_partials(self, inputs, partials):\n t = inputs['t']\n\n nn = self.options['num_nodes']\n\n omega = self.options['omega']\n gha = self.options['gha0'] + omega * t\n\n partials['q_IE', 't'][:nn] = -np.sin(gha) * omega\n partials['q_IE', 't'][nn:2*nn] = -np.cos(gha) * omega", "def brentq(x1, b, U, gamma, idens, ixmom, iymom, iener,\n TOL=1.e-6, ITMAX=100):\n\n # initialize variables\n a = x1\n c = 0.0\n d = 0.0\n fa = f(a, U, gamma, idens, ixmom, iymom, iener)\n fb = f(b, U, gamma, idens, ixmom, iymom, iener)\n fc = 0.0\n\n # root found\n if fa * fb >= 0.0:\n return x1\n\n # switch variables\n if abs(fa) < abs(fb):\n a, b = b, a\n fa, fb = fb, fa\n\n c = a\n fc = fa\n\n mflag = True\n\n for _ in range(ITMAX):\n if fa != fc and fb != fc: # pylint: disable=consider-using-in\n s = a*fb*fc / ((fa-fb) * (fa-fc)) + b*fa*fc / ((fb-fa)*(fb-fc)) + \\\n c*fa*fb / ((fc-fa)*(fc-fb))\n else:\n s = b - fb * (b-a) / (fb-fa)\n\n # test conditions and store in con1-con5\n con1 = False\n\n if 0.25 * (3.0 * a + b) < b:\n if s < 0.25 * (3.0 * a + b) or s > b:\n con1 = True\n elif s < b or s > 0.25 * (3.0 * a + b):\n con1 = True\n\n con2 = mflag and abs(s-b) >= 0.5 * abs(b-c)\n\n con3 = (not mflag) and abs(s-b) >= 0.5 * abs(c-d)\n\n con4 = mflag and abs(b-c) < TOL\n\n con5 = (not mflag) and abs(c-d) < TOL\n\n if con1 or con2 or con3 or con4 or con5:\n s = 0.5 * (a + b)\n mflag = True\n else:\n mflag = False\n\n # evaluate at midpoint and set new limits\n fs = f(s, U, gamma, idens, ixmom, iymom, iener)\n\n if abs(fa) < abs(fb):\n a, b = b, a\n fa, fb = fb, fa\n\n d = c\n c = b\n fc = fb\n\n if fa * fs < 0.0:\n b = s\n fb = fs\n else:\n a = s\n fa = fs\n\n # found solution to required tolerance\n if fb == 0.0 or fs == 0.0 or abs(b-a) < TOL:\n return b\n\n return x1", "def _solve_freq_rb(self, d, v, a, force, freqw, freqw2, incrb, unc):\n if self.rbsize and incrb:\n rb = self.rb\n if self.m is not None:\n if unc:\n a_rb = self.invm[self._rb] * force[rb]\n else:\n a_rb = la.lu_solve(self.imrb, force[rb], check_finite=False)\n else:\n a_rb = force[rb]\n if \"d\" in incrb or \"v\" in incrb:\n pvnz = freqw != 0\n if \"v\" in incrb:\n v[rb, pvnz] = (-1j / freqw[pvnz]) * a_rb[:, pvnz]\n if \"d\" in incrb:\n d[rb, pvnz] = (-1.0 / freqw2[pvnz]) * a_rb[:, pvnz]\n if \"a\" in incrb:\n a[rb] = a_rb", "def fluxes_increments_to_actual(example_dict):\n\n edge_heights_m_agl = get_grid_cell_edges(example_dict[HEIGHTS_KEY])\n grid_cell_widths_metres = get_grid_cell_widths(edge_heights_m_agl)\n\n num_examples = len(example_dict[VALID_TIMES_KEY])\n num_heights = len(example_dict[HEIGHTS_KEY])\n\n grid_cell_width_matrix_metres = numpy.reshape(\n grid_cell_widths_metres, (1, num_heights)\n )\n grid_cell_width_matrix_metres = numpy.repeat(\n grid_cell_width_matrix_metres, repeats=num_examples, axis=0\n )\n\n down_flux_increment_matrix_w_m03 = get_field_from_dict(\n example_dict=example_dict, field_name=SHORTWAVE_DOWN_FLUX_INC_NAME\n )\n up_flux_increment_matrix_w_m03 = get_field_from_dict(\n example_dict=example_dict, field_name=SHORTWAVE_UP_FLUX_INC_NAME\n )\n\n down_flux_matrix_w_m02 = numpy.cumsum(\n down_flux_increment_matrix_w_m03 * grid_cell_width_matrix_metres,\n axis=1\n )\n up_flux_matrix_w_m02 = numpy.cumsum(\n up_flux_increment_matrix_w_m03 * grid_cell_width_matrix_metres,\n axis=1\n )\n\n down_flux_matrix_w_m02 = numpy.maximum(down_flux_matrix_w_m02, 0.)\n up_flux_matrix_w_m02 = numpy.maximum(up_flux_matrix_w_m02, 0.)\n\n vector_target_names = example_dict[VECTOR_TARGET_NAMES_KEY]\n found_down_flux = SHORTWAVE_DOWN_FLUX_NAME in vector_target_names\n found_up_flux = SHORTWAVE_UP_FLUX_NAME in vector_target_names\n\n if not found_down_flux:\n vector_target_names.append(SHORTWAVE_DOWN_FLUX_NAME)\n if not found_up_flux:\n vector_target_names.append(SHORTWAVE_UP_FLUX_NAME)\n\n down_flux_index = vector_target_names.index(SHORTWAVE_DOWN_FLUX_NAME)\n up_flux_index = vector_target_names.index(SHORTWAVE_UP_FLUX_NAME)\n example_dict[VECTOR_TARGET_NAMES_KEY] = vector_target_names\n\n if found_down_flux:\n example_dict[VECTOR_TARGET_VALS_KEY][..., down_flux_index] = (\n down_flux_matrix_w_m02\n )\n else:\n example_dict[VECTOR_TARGET_VALS_KEY] = numpy.insert(\n example_dict[VECTOR_TARGET_VALS_KEY],\n obj=down_flux_index, values=down_flux_matrix_w_m02, axis=-1\n )\n\n if found_up_flux:\n example_dict[VECTOR_TARGET_VALS_KEY][..., up_flux_index] = (\n up_flux_matrix_w_m02\n )\n else:\n example_dict[VECTOR_TARGET_VALS_KEY] = numpy.insert(\n example_dict[VECTOR_TARGET_VALS_KEY],\n obj=up_flux_index, values=up_flux_matrix_w_m02, axis=-1\n )\n\n return example_dict", "def reconstruct_pu(self, receivers, compute_uxy = True):\n # Initialize\n self.p_recon = np.zeros((receivers.coord.shape[0], len(self.controls.k0)), dtype=complex)\n self.uz_recon = np.zeros((receivers.coord.shape[0], len(self.controls.k0)), dtype=complex)\n if compute_uxy:\n self.ux_recon = np.zeros((receivers.coord.shape[0], len(self.controls.k0)), dtype=complex)\n self.uy_recon = np.zeros((receivers.coord.shape[0], len(self.controls.k0)), dtype=complex)\n # Loop over frequency\n bar = tqdm(total = len(self.controls.k0), desc = 'Reconstructing sound field...')\n for jf, k0 in enumerate(self.controls.k0):\n # get the scaled version of the propagating directions\n k_p = k0 * self.dir\n # Form the new sensing matrix\n h_mtx = np.exp(1j*receivers.coord @ k_p.T)\n # compute P and U\n self.p_recon[:,jf] = h_mtx @ self.pk[:,jf]\n self.uz_recon[:,jf] = -((np.divide(k_p[:,2], k0)) * h_mtx) @ self.pk[:,jf]\n if compute_uxy:\n self.ux_recon[:,jf] = -((np.divide(k_p[:,0], k0)) * h_mtx) @ self.pk[:,jf]\n self.uy_recon[:,jf] = -((np.divide(k_p[:,1], k0)) * h_mtx) @ self.pk[:,jf]\n bar.update(1)\n bar.close()", "def compute_partials(self, inputs, partials):\n I_bat = inputs['I_bat']\n SOC = inputs['SOC']\n\n self.dCh_dg, self.dCh_drho = KSfunction.derivatives(I_bat - self.Imax, self.rho)\n self.dDs_dg, self.dDs_drho = KSfunction.derivatives(self.Imin - I_bat, self.rho)\n self.dS0_dg, self.dS0_drho = KSfunction.derivatives(self.SOC0 - SOC, self.rho)\n self.dS1_dg, self.dS1_drho = KSfunction.derivatives(SOC - self.SOC1, self.rho)\n\n self.dCh_dg = self.dCh_dg.flatten()\n self.dCh_drho = self.dCh_drho.flatten()\n self.dDs_dg = self.dDs_dg.flatten()\n self.dDs_drho = self.dDs_drho.flatten()\n self.dS0_dg = self.dS0_dg.flatten()\n self.dS0_drho = self.dS0_drho.flatten()\n self.dS1_dg = self.dS1_dg.flatten()\n self.dS1_drho = self.dS1_drho.flatten()", "def obtain_Bios(self, X):\n i,s,o = 0, 1, 2\n pS = self.obtain_statedist(X)\n\n b = np.einsum(self.O, [i,s,o], pS, [s], [i,s,o], optimize=self.opti)\n bsum = b.sum(axis=1, keepdims=True)\n bsum = bsum + (bsum == 0) # to avoid dividing by zero\n Biso = b /bsum\n Bios = np.swapaxes(Biso, 1,-1)\n \n return Bios", "def get_brdugates(brdu, x_brdu=None, plotting=False):\n if x_brdu is None:\n mx = np.max(brdu.tolist())+0.01\n x_brdu = np.arange(-0.01, mx, 1)\n f_brdu = findpeaks.get_kde(brdu, x_brdu) # brdu should be an array\n peak_amp, peak_loc, peak_width = findpeaks.findpeaks(\n f_brdu.tolist(), npeaks=1)\n\n # choose BRDU cutoff based on half-proximal width and\n # right trough of peak\n width_2p5 = int((peak_loc + 2.5 * peak_width[0])[0])\n width_5 = int((peak_loc + 5 * peak_width[0])[0])\n\n # Find location of minimun on the right\n f_neg = [-x for x in f_brdu[width_2p5:width_5]]\n _, trough_loc, _ = findpeaks.findpeaks(f_neg, npeaks=1)\n if np.any(trough_loc):\n trough_loc = trough_loc[0] + peak_loc[0] - 1\n else:\n trough_loc = width_2p5\n brdu_cutoff = x_brdu[trough_loc]\n if plotting:\n plt.plot(x_brdu, f_brdu)\n plt.plot([brdu_cutoff, brdu_cutoff],\n [0, 0.5 * peak_amp])\n return brdu_cutoff", "def motors_update(t, x, u, params={}):\n tm = params['motor']['tm'] # Motor torque constant\n cr = params['motor']['cr'] # Motor speed constant\n wb = params['motor']['wb'] # Motor base speed\n\n u = np.clip(u / 199999, 0, 1)\n accel = [(cr * throttle + wb - speed) / tm for throttle, speed in zip(u, x)]\n\n return accel", "def fluid_func(self):\n residual = []\n for fluid, x in self.inl[0].fluid.val.items():\n res = x * self.inl[0].m.val_SI\n for o in self.outl:\n res -= o.fluid.val[fluid] * o.m.val_SI\n residual += [res]\n return residual", "def make_albedo(self, x_sol, unc, nbands=7):\n\n n_doys = x_sol.shape[0] / 3\n n_bands = x_sol.shape[1]\n bhr_spectral = np.zeros((n_doys, n_bands))\n bhr_spectral_unc = np.zeros((n_doys, n_bands))\n bhr_spectral_nbar = np.zeros((n_doys, n_bands))\n bhr_spectral_nbar_unc = np.zeros((n_doys, n_bands))\n bhr_bb = np.zeros((n_doys, 3))\n bhr_bb_unc = np.zeros((n_doys, 3))\n to_vis = np.array([0.3265, 0., 0.4364, 0.2366, 0, 0, 0])\n a_to_vis = -0.0019\n to_nir = np.array([0., 0.5447, 0, 0, 0.1363, 0.0469, 0.2536])\n a_to_nir = -0.0068\n to_sw = np.array([0.3973, 0.2382, 0.3489, -0.2655, 0.1604, -0.0138, 0.0682])\n a_to_sw = 0.0036\n for band in xrange(n_bands):\n u1 = np.sqrt(unc[band].diagonal()[:n_doys])\n u2 = np.sqrt(unc[band].diagonal()[n_doys:(n_doys * 2)])\n u3 = np.sqrt(unc[band].diagonal()[2 * n_doys:])\n\n bhr_spectral[:, band] = (x_sol[:(n_doys), band] +\n 0.189184 * x_sol[(n_doys):(2 * n_doys), band] +\n 1.377622 * x_sol[(2 * n_doys):, band])\n bhr_spectral_unc[:, band] = (u1 + 0.189184 * u2 + 1.377622 * u3)\n\n bhr_bb[:, 0] = np.sum(bhr_spectral * to_vis, axis=1) + a_to_vis\n bhr_bb[:, 1] = np.sum(bhr_spectral * to_nir, axis=1) + a_to_nir\n bhr_bb[:, 2] = np.sum(bhr_spectral * to_sw, axis=1) + a_to_sw\n bhr_bb_unc[:, 0] = np.sum(bhr_spectral_unc * to_vis, axis=1) + a_to_vis\n bhr_bb_unc[:, 1] = np.sum(bhr_spectral_unc * to_nir, axis=1) + a_to_nir\n bhr_bb_unc[:, 2] = np.sum(bhr_spectral_unc * to_sw, axis=1) + a_to_sw\n\n kr = Kernels(0, 20, 0, \\\n LiType='Sparse', doIntegrals=False, \\\n normalise=1, RecipFlag=True, RossHS=False, MODISSPARSE=True, \\\n RossType='Thick')\n\n # n_doys = x_sol.shape[0] / 3\n for band in xrange(self.nbands):\n bhr_spectral_nbar[:, band] = (x_sol[:(n_doys), band] +\n kr.Ross[0] * x_sol[(n_doys):(2 * n_doys), band] +\n kr.Li[0] * x_sol[(2 * n_doys):, band])\n\n u1 = np.sqrt(unc[band].diagonal()[:n_doys])\n u2 = np.sqrt(unc[band].diagonal()[n_doys:(n_doys * 2)])\n u3 = np.sqrt(unc[band].diagonal()[2 * n_doys:])\n bhr_spectral_nbar_unc[:, band] = (u1 + kr.Ross[0] * u2 + kr.Li[0] * u3)\n\n return bhr_spectral[self.min_doy:self.max_doy], bhr_spectral_unc[self.min_doy:self.max_doy],\\\n bhr_bb[self.min_doy:self.max_doy], bhr_bb_unc[self.min_doy:self.max_doy],\\\n bhr_spectral_nbar[self.min_doy:self.max_doy], bhr_spectral_nbar_unc[self.min_doy:self.max_doy]" ]
[ "0.5621422", "0.5476572", "0.5280086", "0.5161614", "0.5074294", "0.49908358", "0.49579656", "0.48839134", "0.48619267", "0.48579392", "0.48389107", "0.48320952", "0.4826791", "0.48213837", "0.48160282", "0.4811936", "0.48002857", "0.4795177", "0.47753403", "0.4768853", "0.4751825", "0.4745213", "0.4737876", "0.47342184", "0.4717476", "0.47160357", "0.46291357", "0.46277666", "0.4620215", "0.46052235", "0.45996413", "0.4580219", "0.45801565", "0.45792916", "0.4575378", "0.4563875", "0.4557071", "0.45351028", "0.45301428", "0.45225385", "0.45188004", "0.44975865", "0.44973728", "0.44943017", "0.44938013", "0.44860634", "0.4484488", "0.4482116", "0.44753674", "0.4473905", "0.44642842", "0.44590378", "0.44578245", "0.44569644", "0.44309273", "0.44282514", "0.44252902", "0.4424874", "0.4421663", "0.44210878", "0.44177487", "0.44169763", "0.44117856", "0.44103912", "0.44071603", "0.4405864", "0.4402495", "0.44024423", "0.44014168", "0.43976223", "0.43902594", "0.43879277", "0.4384373", "0.43745828", "0.43741116", "0.4373826", "0.437301", "0.43717447", "0.4371045", "0.43628824", "0.4358929", "0.43562365", "0.4347466", "0.43467298", "0.4341081", "0.43397507", "0.43250293", "0.43240768", "0.4320951", "0.43204424", "0.43166795", "0.43160868", "0.43151963", "0.43130916", "0.43130204", "0.43123543", "0.43119422", "0.43117526", "0.42987648", "0.42971176" ]
0.76751333
0
Generator over all subclasses of a given class, in depth first order.
def itersubclasses(cls, _seen=None): if not isinstance(cls, type): raise TypeError('itersubclasses must be called with ' 'new-style classes, not %.100r' % cls) if _seen is None: _seen = set() try: subs = cls.__subclasses__() except TypeError: # fails only when cls is type subs = cls.__subclasses__(cls) for sub in subs: if sub not in _seen: _seen.add(sub) yield sub for sub in itersubclasses(sub, _seen): yield sub
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def all_subclasses(cls):\n for subclass in cls.__subclasses__():\n yield subclass\n for subc in all_subclasses(subclass):\n yield subc", "def class_hierarchy(clslist):\n for cls in clslist:\n subclass_list = cls.__subclasses__()\n if subclass_list:\n for subcls in class_hierarchy(subclass_list):\n yield subcls\n else:\n yield cls", "def all_subclasses(cls):\r\n for s in cls.__subclasses__():\r\n yield s\r\n for c in s.all_subclasses():\r\n yield c", "def subclass_iterator(cls, _seen=None):\n\n if not isinstance(cls, type):\n raise TypeError('_subclass_iterator must be called with '\n 'new-style classes, not %.100r' % cls)\n\n _seen = _seen or set()\n\n try:\n subs = cls.__subclasses__()\n except TypeError: # fails only when cls is type\n subs = cls.__subclasses__(cls)\n for sub in subs:\n if sub not in _seen:\n _seen.add(sub)\n yield sub\n for sub in subclass_iterator(sub, _seen):\n yield sub", "def subclass_iterator(cls, _seen=None):\r\n\r\n if not isinstance(cls, type):\r\n raise TypeError('_subclass_iterator must be called with '\r\n 'new-style classes, not %.100r' % cls)\r\n\r\n _seen = _seen or set()\r\n\r\n try:\r\n subs = cls.__subclasses__()\r\n except TypeError: # fails only when cls is type\r\n subs = cls.__subclasses__(cls)\r\n for sub in subs:\r\n if sub not in _seen:\r\n _seen.add(sub)\r\n yield sub\r\n for sub in subclass_iterator(sub, _seen):\r\n yield sub", "def subclasses(cls) -> Iterator:\n for subclass in cls.__subclasses__():\n if subclass._type_definition.description: # type: ignore\n yield subclass\n yield from subclass.subclasses()", "def iter_subclasses(cls, _seen=None, template_classes=[]):\n if not isinstance(cls, type):\n raise TypeError(\n \"itersubclasses must be called with \"\n \"new-style classes, not %.100r\" % cls\n )\n if _seen is None:\n _seen = set()\n try:\n subs = cls.__subclasses__()\n except TypeError: # fails only when cls is type\n subs = cls.__subclasses__(cls)\n for sub in subs:\n if sub not in _seen and sub.__name__ not in template_classes:\n _seen.add(sub)\n # we do not want to yield the templates, but we do want to\n # recurse on them\n yield sub\n for sub in iter_subclasses(sub, _seen, template_classes):\n yield sub", "def itersubclasses(cls, _seen=None):\n if not isinstance(cls, type):\n raise TypeError('itersubclasses must be called with '\n 'new-style classes, not %.100r' % cls)\n if _seen is None:\n _seen = set()\n try:\n subs = cls.__subclasses__()\n except TypeError: # fails only when cls is type\n subs = cls.__subclasses__(cls)\n for sub in subs:\n if sub not in _seen:\n _seen.add(sub)\n yield sub\n for sub in itersubclasses(sub, _seen):\n yield sub", "def __iter__(self):\n for tree in self._tree.subTrees():\n yield self.__class__(tree)", "def base_subclasses(cls):\n for subclass in cls.__subclasses__():\n yield from subclass.base_subclasses()\n if isinstance(subclass.base, type):\n yield subclass", "def get_flattened_subclasses(cls):\n classes = cls.__subclasses__()\n return list(itertools.chain(classes, *map(lambda x: get_flattened_subclasses(x), classes)))", "def _classes_(cls):\n for base_cls in cls.__bases__:\n # Avoid infinite loop\n if base_cls == Sandbox:\n continue\n\n yield base_cls", "def get_subclasses(module, clazz):\n for subclazz_name, subclazz in inspect.getmembers(module):\n if hasattr(subclazz, '__bases__') and clazz in subclazz.__bases__:\n yield (subclazz_name, subclazz)", "def descendants(cls):\n return cls.__subclasses__() + \\\n [g for s in cls.__subclasses__() for g in s.descendants()]", "def iter_classes(base_class, *modules, class_filter=None):\n for root_module in modules:\n try:\n module_repo = walk_modules(root_module)\n except:\n continue\n for module in module_repo:\n for obj in vars(module).values():\n if inspect.isclass(obj) and issubclass(obj, base_class) and obj.__module__ == module.__name__:\n if not class_filter or class_filter(obj):\n yield obj", "def get_subclasses(self, klass: TypeInfo) -> OrderedSet[TypeInfo]:\n if klass not in self._graph:\n return OrderedSet([klass])\n result: OrderedSet[TypeInfo] = OrderedSet(nx.descendants(self._graph, klass))\n result.add(klass)\n return result", "def iter_cls(*classes, blacklist=tuple()):\n for bases in permutations(classes):\n if bases not in blacklist:\n yield type('_'.join(c.__name__ for c in bases), bases, {})", "def get_subclasses(self, t):\n if isinstance(t, pytd.ClassType):\n subclasses = self.direct_subclasses.get(t, [])\n return sum((self.get_subclasses(pytd.ClassType(c.name, c))\n for c in subclasses), [t])\n else:\n raise NotImplementedError(f\"Can't extract subclasses from {type(t)}\")", "def get_subclasses(mod, cls):\n for name, obj in inspect.getmembers(mod):\n if hasattr(obj, \"__bases__\") and cls in obj.__bases__:\n yield obj", "def get_subclasses(classes, level=0):\n # for convenience, only one class can can be accepted as argument\n # converting to list if this is the case\n if not isinstance(classes, list):\n classes = [classes]\n\n if level < len(classes):\n classes += classes[level].__subclasses__()\n return get_subclasses(classes, level+1)\n else:\n return classes", "def get_all_subclasses(python_class):\n python_class.__subclasses__()\n\n subclasses = set()\n check_these = [python_class]\n\n while check_these:\n parent = check_these.pop()\n for child in parent.__subclasses__():\n if child not in subclasses:\n subclasses.add(child)\n check_these.append(child)\n\n return sorted(subclasses, key=lambda x: x.__name__)", "def classes(self) -> Iterable[GDScriptClass]:\n for item in self._classes_by_type_id.values():\n yield item", "def walktree(classes, children, parent):\r\n results = []\r\n classes.sort(key=attrgetter('__module__', '__name__'))\r\n for c in classes:\r\n results.append((c, c.__bases__))\r\n if c in children:\r\n results.append(walktree(children[c], children, c))\r\n return results", "def getleafsubclasses(cls):\n scls = itersubclasses(cls)\n return [s for s in scls if not s.__subclasses__()]", "def __subkinds__(cls) -> typing.Iterable[type['dsl.Any']]:\n\n def scan(subs: typing.Iterable[type['dsl.Any']]) -> typing.Iterable[type['dsl.Any']]:\n \"\"\"Scan the class subtree of the given types.\n\n Args:\n subs: Iterable of classes to descend from.\n\n Returns:\n Iterable of all subclasses.\n \"\"\"\n return (s for c in subs for s in (c, *scan(c.__subclasses__())))\n\n return {k for k in scan(cls.__subclasses__()) if not inspect.isabstract(k)}", "def get_subclasses(base_class: callable) -> list:\n all_subclasses = []\n\n for subclass in base_class.__subclasses__():\n if not inspect.isabstract(subclass):\n all_subclasses.append(subclass)\n all_subclasses.extend(get_subclasses(subclass))\n\n return all_subclasses", "def Subclass_finder(cls):\n\n subclasses = [] # Create a list to deposit subclasses\n\n for subclass in cls.__subclasses__():\n subclasses.append(subclass) # Add founded subclass\n subclasses.extend(Subclass_finder(subclass)) # Check if there is a subclass\n # of a subclass.\n\n Output_types = [] # Create a list to deposit final strings\n for i in range(len(subclasses)): \n instance = subclasses[i]() # Create an instance for the \n Output_types.append(instance.kind) # Add them to the output list\n \n return Output_types", "def all_subclasses(cls):\n return cls.__subclasses__() + [g for s in cls.__subclasses__()\n for g in all_subclasses(s)]", "def get_subclasses(self, class_name):\n return class_name.__subclasses__()", "def GetScaffolderClasses(cls) -> Iterator[Type[interface.Scaffolder]]:\n for scaffolder_class in cls._scaffolder_classes.values():\n yield scaffolder_class", "def node_subclasses(root, abstract = False):\r\n classes = []\r\n for c in utils.subclass_iterator(root):\r\n try:\r\n info = get_node_info(c)\r\n\r\n node_type = info.get(\"type\")\r\n if node_type != \"abstract\":\r\n classes.append(c)\r\n except AttributeError:\r\n pass\r\n\r\n return classes", "def inheritors(cls):\n subclasses = set()\n work = [cls]\n while work:\n parent = work.pop()\n for child in parent.__subclasses__():\n if child not in subclasses:\n subclasses.add(child)\n work.append(child)\n return subclasses", "def get_all_superclasses(cls):\n classes = []\n for superclass in cls.__bases__:\n for c in get_all_superclasses(superclass):\n if c is not object and c not in classes:\n classes.append(c)\n for superclass in cls.__bases__:\n if superclass is not object and superclass not in classes:\n classes.append(superclass)\n\n return classes", "def getclasstree(classes, unique=0):\r\n children = {}\r\n roots = []\r\n for c in classes:\r\n if c.__bases__:\r\n for parent in c.__bases__:\r\n if not parent in children:\r\n children[parent] = []\r\n children[parent].append(c)\r\n if unique and parent in classes: break\r\n elif c not in roots:\r\n roots.append(c)\r\n for parent in children:\r\n if parent not in classes:\r\n roots.append(parent)\r\n return walktree(roots, children, None)", "def get_all_subclasses(asts):\n hierarchy = {}\n for ast in asts:\n hierarchy.update(ast.Visit(visitors.ExtractSuperClasses()))\n def filter_superclasses(superclasses):\n return [superclass for superclass in superclasses\n if is_complete(superclass)]\n hierarchy = {cls: filter_superclasses(superclasses)\n for cls, superclasses in hierarchy.items() if is_complete(cls)}\n # typically this is a fairly short list, e.g.:\n # [ClassType(basestring), ClassType(int), ClassType(object)]\n return utils.invert_dict(hierarchy)", "def get_all_subclasses(the_class, check_meta_abstract=True, top=True):\n kids = the_class.__subclasses__()\n\n result = set(kids).union(\n [s for c in kids for s in get_all_subclasses(c, check_meta_abstract, False)])\n\n if top:\n if check_meta_abstract:\n non_abstract_result = []\n for k in result:\n if not k._meta.abstract:\n non_abstract_result.append(k)\n result = non_abstract_result\n return sorted(result)", "def get_superclasses(self, klass: TypeInfo) -> OrderedSet[TypeInfo]:\n if klass not in self._graph:\n return OrderedSet([klass])\n result: OrderedSet[TypeInfo] = OrderedSet(nx.ancestors(self._graph, klass))\n result.add(klass)\n return result", "def discover_classes(\n package,\n cls_match_func=trivial,\n module_match_func=trivial,\n):\n for module in discover_modules(package, module_match_func):\n # Check all the classes in that module\n for _, imported_class in inspect.getmembers(module, inspect.isclass):\n # Don't include things that are only there due to a side-effect of\n # importing\n if imported_class.__module__ != module.__name__:\n continue\n\n if cls_match_func(imported_class):\n yield imported_class", "def subclasses(cls, instantiate=True, **kw):\n\n lcls = cls.__subclasses__()\n rcls = lcls + list(chain.from_iterable([c.__subclasses__() for c in lcls]))\n for c in rcls:\n assert isinstance(c.priority, int), \\\n \"type(%s . priority) = %s != int\" % (repr(c), type(c.priority.name))\n\n clss = sorted(rcls, key=lambda c: c.priority, reverse=True)\n\n if not instantiate:\n return [C for C in clss\n if not C.__name__.startswith(\"_\")]\n\n return [C(**kw) for C in clss\n if not C.__name__.startswith(\"_\")]", "def inheritors(klass):\n subclasses = set()\n work = [klass]\n while work:\n parent = work.pop()\n for child in parent.__subclasses__():\n if child not in subclasses:\n subclasses.add(child)\n work.append(child)\n return subclasses", "def inheritors(klass):\n subclasses = set()\n work = [klass]\n while work:\n parent = work.pop()\n for child in parent.__subclasses__():\n if child not in subclasses:\n subclasses.add(child)\n work.append(child)\n return subclasses", "def find_all(m, cls):\n return [node for node in ast.walk(m) if isinstance(node, cls)]", "def FindChilds(self,cls):\n childs = []\n for i in self.classes:\n if self.InheritsFrom(cls,i):\n childs.append(i)\n if childs:\n return childs\n else:\n return False", "def get_subclasses(cls, include_parents=False):\n subclasses = dict()\n for child in cls.__subclasses__():\n grandchildren = child.get_subclasses(include_parents)\n subclasses.update(grandchildren)\n if include_parents or not grandchildren:\n subclasses[child.__name__] = child\n\n return subclasses", "def descendant_classes(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n return response\n descendants = nx.descendants(self.se.full_class_only_graph,\n self.uri)\n result = restructure_output(self,\n descendants,\n inspect.stack()[0][3],\n self.output_type)\n return result", "def expand_classes_glob(classes, salt_data):\n all_classes = []\n expanded_classes = []\n saltclass_path = salt_data[\"path\"]\n\n for _class in classes:\n all_classes.extend(match_class_glob(_class, saltclass_path))\n\n for _class in all_classes:\n if _class not in expanded_classes:\n expanded_classes.append(_class)\n\n return expanded_classes", "def __iter__(self):\n for classresult in self.classresults:\n yield classresult", "def _classesToCheck(self, cls):\r\n yield cls\r\n yield from inspect.getmro(cls)", "def classes(self):\n return list(self._classes_generator())", "def get_all_subclasses(parent: type, include_private_sublcasses=False) -> t.List[type]:\r\n\r\n # I refuse to use recursion in Python. I am afraid that I would melt the CPU.\r\n # => do it iteratively\r\n i: int = 0 # Index of last class that has been checked for subclasses\r\n all_subs = parent.__subclasses__()\r\n while i < len(all_subs):\r\n\r\n # Check if the subclass I found is already on the list, becaus OF COURSE Python supports multiple inheritance.\r\n # Am I not glad I did not use recursion?\r\n\r\n new_subs = (all_subs[i]).__subclasses__()\r\n for sub in new_subs:\r\n if not sub in all_subs:\r\n all_subs.append(sub)\r\n i += 1\r\n\r\n if include_private_sublcasses:\r\n return all_subs\r\n else:\r\n return [sub for sub in all_subs if not sub.__name__[0] == '_']", "def find_classes(cls, cutoff_class=None):\n cutoff_class = cutoff_class or Interface\n module = sys.modules[__name__]\n for ni, vi in inspect.getmembers(module, inspect.isclass):\n if issubclass(vi, cutoff_class) and vi is not cutoff_class:\n yield vi", "def __iter__(self):\n for benchclass in sorted(self.classes.values()):\n yield benchclass", "def instantiate_all_classes(self, node):\n self.instantiate_classes(node)\n\n for cls in node.classes:\n self.instantiate_classes(cls)\n\n for ns in node.namespaces:\n self.instantiate_all_classes(ns)", "def get_superclasses(self, t):\n if isinstance(t, pytd.ClassType):\n return sum((self.get_superclasses(c) for c in t.cls.bases), [t])\n elif isinstance(t, pytd.AnythingType):\n # All types, even \"?\", inherit from object.\n return [pytd.NamedType(\"builtins.object\")]\n elif isinstance(t, pytd.GenericType):\n return self.get_superclasses(t.base_type)\n else:\n log.warning(\"Can't extract superclasses from %s\", type(t))\n return [pytd.NamedType(\"builtins.object\")]", "def get_all_subclasses(base_class: Type[T]) -> Set[Type[T]]:\n return set(base_class.__subclasses__()).union(s for c in base_class.__subclasses__() for s in get_all_subclasses(c))", "def get_all_classes_defined_in_module(module):\n for _cls in inspect.getmembers(module, inspect.isclass):\n if module.__name__ == _cls[1].__module__:\n yield _cls", "def _traverse_tree(self):\n if not self.children:\n yield self\n for child in self.children:\n yield from child._traverse_tree()", "def _get_all_bases(class_or_name: Union[str, Type]) -> List[str]:\n if isinstance(class_or_name, str):\n return [class_or_name]\n\n classes = [class_or_name.__name__]\n for base in class_or_name.__bases__:\n classes.extend(_get_all_bases(base))\n\n return deduplicate(classes)", "def subtrees(self):\n yield from subtrees(self)", "def iter_tree(self):\n yield self\n for c in self.children:\n for ci in c.iter_tree:\n yield ci", "def walk(self):\n yield self\n for child in self.children:\n for descendant in child.walk():\n yield descendant", "def get_intermediate_classes(cls, baseclass):\n classes = inspect.getmro(cls)\n classes = [c for c in classes if issubclass(c, baseclass)]\n return classes", "def HierarchyIterator(obj):\n while obj:\n yield obj\n for opChild in SplineInputGeneratorHelper.HierarchyIterator(obj.GetDown()):\n yield opChild\n obj = obj.GetNext()", "def all_subclasses(classes: Iterable[Type]) -> Set[Type]:\n return reduce(set.union, [{cls}.union(\n [s for c in cls.__subclasses__() for s in all_subclasses([c])])\n for cls in classes], set())", "def iter_spider_classes(module):\n ...", "def collect_subclasses(parent, suffix=None):\n\n subclasses = {}\n for c in subclass_iterator(parent):\n if hasattr(c, \"_ns_object_name\"):\n name = getattr(c, \"_ns_object_name\")\n else:\n name = to_identifier(decamelize(c.__name__))\n\n if suffix and name.endswith(suffix):\n name = name[:-len(suffix)]\n subclasses[name] = c\n\n return subclasses", "def all_subtrees(self):\n yield self\n for subtree in self:\n for subsubtree in subtree.all_subtrees():\n yield subsubtree", "def descendants(self):\n yield self\n for child in self.children:\n if isinstance(child, ParentBox):\n for grand_child in child.descendants():\n yield grand_child\n else:\n yield child", "def all_named_subclasses(cls):\n\n subclasses = set()\n\n # Search each of our subclasses...\n for subclass in cls.__subclasses__():\n # If the current class is named, add it to our list...\n if subclass.UI_NAME:\n subclasses.add(subclass)\n\n # ... and explore all of its subclasses.\n subclasses.update(subclass.all_named_subclasses())\n\n return subclasses", "def __iter__(self):\n for child in self.children:\n yield child", "def at_depth(self, depth):\n\n for child in list(self.children):\n if depth == 0:\n yield child\n else:\n for grandchild in child.at_depth(depth - 1):\n yield grandchild", "def get_all_class_symbols_in_program(program=None, class_path=None):\n if program is None:\n program = getState().getCurrentProgram()\n\n iterator_list = []\n if class_path is None:\n iterator_list.append(program.getSymbolTable().getDefinedSymbols())\n else:\n class_path = ghidra_utils.SymbolDescriptor(class_path).to_ghidra()\n namespaces = ghidra.app.util.NamespaceUtils.getNamespaces( class_path, None, program)\n if namespaces is not None:\n iterator_list.append(program.getSymbolTable().getSymbols(namespaces[0]))\n \n while iterator_list:\n symit = iterator_list.pop()\n for symbol in symit:\n if symbol.getSymbolType() == ghidra.program.model.symbol.SymbolType.CLASS:\n yield symbol\n elif symbol.getSymbolType() == ghidra.program.model.symbol.SymbolType.NAMESPACE:\n # if it's a namespace, get an iterator over it and add that to the list of iterators we're working through\n # ghidra namespacesymbols aren't namespaces, so gross conversion :/\n namespace = ghidra.app.util.NamespaceUtils.getNamespaces( symbol.getName(True), None, program)\n if namespace is not None:\n iterator_list.append(program.getSymbolTable().getSymbols(namespace[0]))", "def get_subclasses(cls) -> dict:\n return dict(cls._subclasses)", "def _getChildrenOfType(self, elementClass):\n method = getattr(self.__class__, \"_getChildrenOfType\" + elementClass.__name__)\n return method(self)", "def list_all_classes(self):\n classes = list(self.extended_class_only_graph.nodes())\n classes = [SchemaClass(_cls, self) for _cls in classes]\n return classes", "def flatten(class_dict):\r\n for cls, fields_list in class_dict.items():\r\n for fields in fields_list:\r\n yield (cls, fields)", "def all(cls, package=None):\n # Determine modules that may contain extensions.\n packages = get_packages()\n if package is None:\n modules = packages.modules\n elif isinstance(package, Package):\n modules = package.modules\n else:\n modules = packages[package].modules\n # Find all subclasses of `cls`.\n subclasses = [cls]\n # Used to weed out duplicates (due to diamond inheritance).\n seen = set([cls])\n idx = 0\n while idx < len(subclasses):\n base = subclasses[idx]\n # Allow subclasses to override `all()`.\n for subclass in (base.__subclasses__()\n if base.all.__func__ is cls.all.__func__\n else base.all(package)):\n if subclass not in seen:\n subclasses.append(subclass)\n seen.add(subclass)\n idx += 1\n # Find disabled implementations.\n disabled = set()\n for key in cls.disable_map:\n interface, module = key\n if module in packages.modules and issubclass(interface, cls):\n disabled.update(cls.disable_map[key])\n # Filter out abstract classes, disabled implementations and\n # implementations not included with the active application.\n implementations = []\n for subclass in subclasses:\n if subclass.__module__ not in modules:\n continue\n if disabled:\n matches = [subclass]\n matches.append(subclass.__name__)\n matches.append(\n \"%s.%s\"\n % (subclass.__module__, subclass.__class__.__name__))\n if isinstance(subclass.priority, str):\n matches.append(subclass.priority)\n if isinstance(subclass.priority, list):\n for priority in subclass.priority:\n if isinstance(priority, str):\n matches.append(priority)\n if subclass.signature.__func__ is not \\\n Extension.signature.__func__:\n matches.append(subclass.signature())\n if any([match in matches for match in disabled]):\n continue\n implementations.append(subclass)\n return [implementation\n for implementation in implementations\n if implementation.enabled()]", "def derived_classes(self, what: Union[GDScriptClass, str, int]):\n base_cls: Optional[GDScriptClass] = None\n if isinstance(what, GDScriptClass):\n base_cls = what\n else:\n base_cls = self.get_class(what)\n\n for cls in self._classes_by_type_id.values():\n if cls.base == base_cls:\n yield cls", "def find(self, type_cls=None, name=None, parent=None,\n recurse=True, **kwargs):\n for child in self.children:\n yield_child = True\n if type_cls is not None and not isinstance(child, type_cls):\n yield_child = False\n if name is not None and child.name != name:\n yield_child = False\n if parent is not None and child.parent != parent:\n yield_child = False\n for key in kwargs:\n child_value = getattr(child, key, DOESNOTEXIST)\n if child_value == DOESNOTEXIST or child_value != kwargs[key]:\n yield_child = False\n break\n if yield_child:\n yield child\n if recurse:\n # yield from FTW! But we want to support older Python\n # versions too...\n for cchild in child.find(type_cls=type_cls, name=name,\n parent=parent, recurse=recurse,\n **kwargs):\n yield cchild", "def getClasses(self):\n self._process()\n return self._sets", "def __next__(self):\n for child in self.children:\n yield child", "def find_all_classes(module_path: Union[str, ModuleType], cls: type) -> List[type]:\n if isinstance(module_path, ModuleType):\n mod = module_path\n else:\n mod = importlib.import_module(module_path)\n\n cls_list = []\n\n def _append_cls(obj):\n # Leverage the closure trick to reuse code\n if isinstance(obj, type) and issubclass(obj, cls) and cls not in cls_list:\n cls_list.append(obj)\n\n for attr in dir(mod):\n _append_cls(getattr(mod, attr))\n\n if hasattr(mod, \"__path__\"):\n # if the model is a package\n for _, modname, _ in pkgutil.iter_modules(mod.__path__):\n sub_mod = importlib.import_module(f\"{mod.__package__}.{modname}\")\n for m_cls in find_all_classes(sub_mod, cls):\n _append_cls(m_cls)\n return cls_list", "def match_class_glob(_class, saltclass_path):\n straight, sub_init, sub_straight = get_class_paths(_class, saltclass_path)\n classes = []\n matches = []\n matches.extend(glob.glob(straight))\n matches.extend(glob.glob(sub_straight))\n matches.extend(glob.glob(sub_init))\n if not matches:\n log.warning(\"%s: Class globbing did not yield any results\", _class)\n for match in matches:\n classes.append(get_class_from_file(match, saltclass_path))\n return classes", "def derive_class_hierarchy():\n logger.info('Deriving class hierarchy ...')\n data = statistics.get_json_data('classes')\n\n hierarchy = defaultdict(dict)\n keys = ['i', 's', 'ai', 'as', 'sc', 'sb', 'r']\n\n for cid in data:\n for key in keys:\n if key in data[cid] and data[cid][key]:\n hierarchy[cid][key] = data[cid][key]\n\n statistics.update_json_data('classes/hierarchy', hierarchy)\n statistics.update_split_json_data('classes/hierarchy', hierarchy, 1000)", "def child_classes(self):\n response = check_defined(self, inspect.stack()[0][3])\n if not response:\n return response\n children = self.se.full_class_only_graph.successors(self.uri)\n result = restructure_output(self,\n children,\n inspect.stack()[0][3],\n self.output_type)\n return result", "def registered_subclasses_by_name(cls, name) -> RegisteredSubclassList:\n return [\n subclass\n for subclass in cls.__registered_subclasses__.get(name, [])\n if issubclass(subclass, cls) and subclass is not cls\n ]", "def descendants(self):\n for a in self._related(set(), 'children'):\n yield a", "def _get_ancestor_types(itype):\n\n for i in range(interrogate_type_number_of_derivations(itype)):\n ibase = interrogate_type_get_derivation(itype, i)\n yield ibase\n yield from _get_ancestor_types(ibase)", "def iter_ancestors(self, xsd_classes: ComponentClassType = None)\\\n -> Iterator['XsdComponent']:\n ancestor = self\n while True:\n if ancestor.parent is None:\n break\n ancestor = ancestor.parent\n if xsd_classes is not None and not isinstance(ancestor, xsd_classes):\n break\n yield ancestor", "def permutations(self, key):\n for class_ in inspect.getmro(key):\n yield class_\n if class_ is not object:\n yield object", "def childs(cls, forceLoad: bool = True) -> list:\n if forceLoad:\n ModuleLoader.loadModules(cls.__module__)\n\n return type.__subclasses__(cls)", "def get_class_list(self):\n t = []\n for cls in self.classes:\n if not self.is_opaque(cls.classobj):\n t.append(cls)\n elif cls.parents or cls.childs:\n t.append(cls)\n \n return t", "def parse_bases(self, node, clsobj):\n bases = []\n for b in node.bases:\n if not (isinstance(b, Name) and b.id == KW_PROCESS_DEF):\n self.current_context = Read(clsobj)\n bases.append(self.visit(b))\n if isinstance(clsobj, dast.Process):\n # try to resolve the base classes:\n for b in bases:\n try:\n pd = self.resolver.find_process_definiton(b)\n clsobj.merge_scope(pd)\n except ResolverException as e:\n self.warn('unable to resolve base class spec, '\n 'compilation may be incomplete: {}.'\n .format(e.reason), e.node if e.node else b)\n return bases", "def get_instances(cls, recursive=False):\n instances = list(cls._instances)\n if recursive:\n for Child in cls.__subclasses__():\n instances += Child.get_instances(recursive=recursive)\n\n # Remove duplicates from multiple inheritance.\n return list(set(instances))", "def subclass_steps(cls):\n steps = []\n for attrname in dir(cls):\n obj = getattr(cls, attrname)\n if isinstance(obj, type) and issubclass(obj, StepBaseAbs):\n steps.append(obj)\n return steps", "def segms_by_class(cls):\n\tcls = norm(cls)\n\tfor n in xrange(idaapi.get_segm_qty()):\n\t\tseg = idaapi.getnseg(n)\n\t\tif seg and not seg.empty():\n\t\t\tsegcls = norm(idaapi.get_segm_class(seg))\n\t\t\tif segcls == cls:\n\t\t\t\tyield seg", "def __iter__(self):\n\n for i in self._children:\n yield i", "def iter(self):\n for elem in self:\n if isinstance(elem, Tree):\n for elem2 in elem.iter:\n yield elem2\n else:\n yield elem", "def get_inheritable_types(self, node):\n\n assert isinstance(node, ast.Class)\n\n parent_chain = self.get_class_hierarchy(node)\n types = []\n for p in parent_chain:\n types += [self._transformer.lookup_typenode(t) for t in p.interfaces]\n types += [t for t in parent_chain if t is not node]\n return types", "def descendants(self):\n def recurse(node, stack):\n stack.append(node)\n for child in node.children:\n stack = recurse(child, stack)\n return stack\n\n descendants = []\n for child in self.children:\n descendants = recurse(child, descendants)\n return descendants" ]
[ "0.7962573", "0.79427534", "0.7785052", "0.75617856", "0.7538503", "0.75220305", "0.7414212", "0.7337148", "0.7327652", "0.7236801", "0.70275396", "0.7026855", "0.7007763", "0.6963463", "0.6906831", "0.688978", "0.688137", "0.6830467", "0.68002534", "0.67773235", "0.6738663", "0.6710605", "0.6710558", "0.6689011", "0.66405755", "0.66321194", "0.66100526", "0.6550558", "0.6533134", "0.6518949", "0.64872503", "0.6425355", "0.63673586", "0.63312787", "0.6317968", "0.63165504", "0.62845147", "0.627982", "0.62788916", "0.62723553", "0.62723553", "0.62535757", "0.62234974", "0.62154764", "0.61804825", "0.6174773", "0.6161379", "0.61526334", "0.61366343", "0.610071", "0.60870653", "0.6085961", "0.6075192", "0.60632026", "0.6052786", "0.6028963", "0.60070086", "0.60021687", "0.6001715", "0.6001303", "0.59881836", "0.5958615", "0.59496766", "0.593215", "0.5896938", "0.588169", "0.5852754", "0.58250684", "0.58095825", "0.5791615", "0.5788536", "0.5767791", "0.5764674", "0.57608443", "0.57481766", "0.57360595", "0.5733437", "0.57149696", "0.5714961", "0.5709901", "0.5707134", "0.57034135", "0.56986874", "0.5692803", "0.5657516", "0.56457585", "0.5644317", "0.5642726", "0.5615219", "0.5594909", "0.5593895", "0.5585853", "0.5567829", "0.55594623", "0.5557068", "0.5550372", "0.55184937", "0.55086637", "0.550352", "0.5482369" ]
0.7340427
7
Might be a useful helper
def _get_belt(self, new_score): for score in reversed(scores): if new_score >= score: return BELTS[score].capitalize() return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def regular(self):", "def sth():", "def substantiate():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def __call__():", "def exo2():", "def support(self):", "def __call__(self) -> None:", "def common(self):", "def fn():", "def func():", "def _prepare(self):", "def _prepare(self):", "def result(self):", "def result(self):", "def check():", "def use(self):", "def degibber(self):", "def __call__(self):\n\t\treturn", "def __call__(object):", "def firstFunction(self):", "def apply(self) -> None:", "def apply(self) -> None:", "def _regr_basic():", "def falcon():", "def pick_up(self):", "def items():", "def decide():", "def lookup():", "def mezclar_bolsa(self):", "def call(self):", "def simple():", "def simple():", "def apply(self):", "def preprocess(self):", "def parameters(self):", "def base():", "def __call__(self):\n pass", "def __call__(self):\n pass", "def check(self) -> None:", "def util():\n pass", "def util():\n pass", "def parse(self):", "def map():", "def access():", "def identifier(self):", "def test_solareclipses_get(self):\n pass", "def think(s):", "def _hook(self):", "def one(self):", "def first(self):", "def process(self):", "def process(self):", "def process(self):", "def __call__(self):\n raise NotImplementedError", "def transform(self):", "def test_get_parts(self):\n pass", "def value(self):", "def test_get_part(self):\n pass", "def cx():", "def exercise_b2_53():\r\n pass", "def __call__( self ):\n pass", "def task4_1(self):\n\n pass", "def present(self):", "def test_03_visit_special(self):", "def values():", "def code():", "def test_4_4_1_1(self):\n pass", "def object(self):", "def variable(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def _test(self):", "def nulltest():", "def __call__(value):", "def g():", "def __init__():", "def task4(self):\n\n pass", "def validate(ob):", "def __call__(self):", "def __call__(self):", "def function(self):\n raise NotImplementedError", "def exercise_b2_106():\r\n pass", "def post_processor(self):", "def method(self):", "def target(self):", "def exercise_b2_107():\r\n pass", "def get_name():", "def __int__(self):\n pass", "def elems(self):", "def healthcare():", "def query3() :", "def two(self):", "def items(self):", "def _build(self):", "def _build(self):" ]
[ "0.61169916", "0.5881199", "0.5851093", "0.5847512", "0.5847512", "0.5847512", "0.5847512", "0.5847512", "0.5819249", "0.5811447", "0.579026", "0.5758938", "0.57119006", "0.56060624", "0.55889446", "0.55889446", "0.5582659", "0.5582659", "0.5576801", "0.55566376", "0.55192375", "0.55081147", "0.5499162", "0.54773784", "0.5464789", "0.5464789", "0.54518104", "0.5397731", "0.5381497", "0.5381035", "0.5364671", "0.53493", "0.5327608", "0.53140336", "0.5300975", "0.5300975", "0.52786255", "0.5264117", "0.52597964", "0.5250824", "0.5245809", "0.5245809", "0.5236801", "0.5226242", "0.5226242", "0.52042747", "0.519055", "0.5190235", "0.51854545", "0.5185089", "0.5160705", "0.5155613", "0.5152819", "0.51487845", "0.51447576", "0.51447576", "0.51447576", "0.51391274", "0.51355267", "0.51162446", "0.511462", "0.5106076", "0.51033473", "0.5098755", "0.50984275", "0.5091965", "0.50901717", "0.5087694", "0.5077844", "0.507646", "0.50731397", "0.5070356", "0.50648034", "0.5062259", "0.5062259", "0.5062259", "0.5062259", "0.5062259", "0.505019", "0.50442576", "0.50407946", "0.50313276", "0.5018286", "0.5014541", "0.5004786", "0.5004786", "0.500101", "0.49936882", "0.4976244", "0.49726713", "0.49660456", "0.49657726", "0.49640313", "0.49616367", "0.4961562", "0.49568987", "0.49560076", "0.49535406", "0.4953161", "0.49494362", "0.49494362" ]
0.0
-1
Method counting photos and creatig list of subfolders with images.
def my_root_listdir(root_dir): root_listdir = [ images_dir for images_dir in os.listdir(root_dir) if not any( characters in images_dir for characters in [".", "test", "train", "valid"] ) ] summ = 0 for images_dir in root_listdir: summ += len(os.listdir(root_dir + "/" + images_dir)) / 2 - 2 print("Sum of images in directories: ", int(summ)) return root_listdir
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def appendpics(pathofimg, w_sub, h_sub, step):\n num = 0\n dirlist = []\n images = [] # images in each folder\n for root, dirs, fileswer in os.walk(pathofimg):\n if len(dirs)!= 0:\n for dir in dirs:\n dirlist.append(dir)\n for rooert, dirwerwes, files in os.walk(pathofimg+'/'+dir):\n for file in files:\n if(file.endswith('.png')):\n images.append(Image.open(pathofimg+'/'+dir+'/'+file))\n if(len(images)==81):\n break\n target = montage(images, w_sub, h_sub, step)\n target.save(pathofimg +'/'+ dir + '.png', quality=100)\n else:\n dir = 'Generated'\n for file in fileswer:\n if (file.endswith('.png')):\n images.append(Image.open(pathofimg +'/'+ file))\n target1 = montage(images, w_sub, h_sub, step)\n savepath = pathofimg +'/'+ 'generated'\n os.makedirs(savepath)\n target1.save(savepath +'/'+ dir + '.png', quality=100)", "def _queue_photos(self):\n global filtering_queue\n global number_of_pictures\n\n number_of_pictures = 0\n\n for root, dirs, files in walk(curdir):\n for file in files:\n if dirs == \"thumb\" or dirs == \"filtered\":\n pass\n else:\n file_path = join(root, file)\n filtering_data = [file_path, curdir]\n filtering_queue.put(filtering_data)\n number_of_pictures += 1\n print(\"Queued:\", file_path)\n\n try:\n mkdir(join(curdir, \"thumb\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"thumb\", \"Alexander\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"thumb\", \"Bjarke\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"thumb\", \"Gabrielle\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"thumb\", \"Monica\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"thumb\", \"Wenche\"))\n except FileExistsError:\n pass\n try:\n mkdir(join(curdir, \"filtered\"))\n except FileExistsError:\n pass", "def create_image_lists(image_dir):\n if not gfile.Exists(image_dir):\n print(\"Image directory '\" + image_dir + \"' not found.\")\n return None\n result = {}\n sub_dirs = [x[0] for x in os.walk(image_dir)]\n # The root directory comes first, so skip it.\n is_root_dir = True\n for sub_dir in sub_dirs:\n print('in sub loop')\n extensions = ['jpg', 'jpeg', 'JPG', 'JPEG']\n file_list = []\n dir_name = os.path.basename(image_dir)\n print(\"Looking for images in '\" + image_dir + \"'\")\n for extension in extensions:\n file_glob = os.path.join(image_dir, dir_name, '*.' + extension)\n file_list.extend(glob.glob(file_glob))\n if not file_list:\n print('No files found')\n continue\n if len(file_list) < 20:\n print('WARNING: Folder has less than 20 images, which may cause issues.')\n label_name = re.sub(r'[^a-z0-9]+', ' ', dir_name.lower())\n testing_images = []\n for file_name in file_list:\n base_name = os.path.basename(file_name)\n # We want to ignore anything after '_nohash_' in the file name when\n # deciding which set to put an image in, the data set creator has a way of\n # grouping photos that are close variations of each other. For example\n # this is used in the plant disease data set to group multiple pictures of\n # the same leaf.\n hash_name = re.sub(r'_nohash_.*$', '', file_name)\n # This looks a bit magical, but we need to decide whether this file should\n # go into the training, testing, or validation sets, and we want to keep\n # existing files in the same set even if more files are subsequently\n # added.\n # To do that, we need a stable way of deciding based on just the file name\n # itself, so we do a hash of that and then use that to generate a\n # probability value that we use to assign it.\n testing_images.append(base_name)\n return testing_images", "def __init__(self, data_dir, file_prefix, num_images):\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n print('found %d images' % len(self.files))", "def __init__(self, data_dir, file_prefix, num_images):\n print(file_prefix)\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n print('found %d images' % len(self.files))", "def move_images_and_list(path, final_path):\n #Lists all created folders\n directories = os.listdir(path)\n #Array that stores the path to each image\n lists = []\n #This variable will be used to give a unique name to each image\n tot_images = 0\n #Creates the path where will be stored all files\n if not os.path.exists(final_path):\n os.mkdir(final_path)\n #Iterates over each folder\n for ph in directories:\n #Iterates over each line of the generated file images.lst\n for img in open(os.path.join(path, ph, \"images.lst\")).readlines():\n \"\"\"Images are stored with a name, how many objects have and\n where it is, like this '01_0252_0067_0139_0222.jpg 1 252 67 139 222'\n so these five lines under changes the first part before '_', because\n in some cases, the command opencv_createsamples creates a same name\n to different positive images, this ensures a different name to each\n image\"\"\"\n split_space = img.split()\n split_underscore = split_space[0].split(\"_\")\n split_underscore[0] = str(tot_images)\n join_underscore = \"_\".join(split_underscore)\n join_space = \" \".join([join_underscore, *split_space[1:]])\n #Appends the new image's name to the list\n lists.append(join_space)\n #Moves each image in the folder to the final path, with a new name\n move(os.path.join(path, ph, split_space[0]),\n os.path.join(final_path, join_space.split()[0]))\n tot_images += 1\n #Writes a file withe the name of all images in the folder\n with open(os.path.join(final_path, \"images.lst\"), \"w+\") as f:\n for i in lists:\n f.write(\"\".join([i, '\\n']))\n #Removes the temporary path\n rmtree(os.path.abspath(path))\n #Name of the created file\n return \"images.lst\"", "def scan_images(self):\n rtn = 0\n mime_list = self.db.get_mime_list()\n (results,count) = datastore.find({})\n for f in results:\n dict = f.get_metadata().get_dictionary()\n if dict[\"mime_type\"] in mime_list:\n #record the id, file size, file date, in_ds\n self.db.create_picture_record(f.object_id, f.get_file_path())\n rtn += 1\n f.destroy()\n self.db.commit()\n _logger.debug('%s entries found in journal. Number of pictures %s'%(count,rtn,))\n return rtn", "def get_num_of_images(self):", "def get_photos_counts(self):\n return GoodsPhotos.objects.filter(good=self).count()", "def process_images(image_folder: Path) -> List[Dict]:\n images = []\n files = image_folder.glob(\"*.jpg\")\n\n for file_path in files:\n file_name = file_path.name\n file_id = file_name.split(\".jpg\")[0]\n file_id = file_id.split(\"in\")[-1]\n file_id = int(file_id)\n file_id = f\"{file_path.parent.parent.name}_{str(file_id)}\"\n\n width, height = imagesize.get(str(file_path))\n\n image_data = {\"id\": file_id,\n \"width\": width,\n \"height\": height,\n \"filename\": str(file_path)}\n images.append(image_data)\n\n return images", "def create_images(jsons_info, image_dir, photos_info_dict, num_images=200):\n for item in jsons_info:\n json_info = json.load(open(item, \"r\"))\n category_dir = os.path.join(image_dir, os.path.splitext(os.path.basename(item))[0])\n print(\"Downloading in -- \", category_dir)\n if not os.path.exists(category_dir):\n os.makedirs(category_dir)\n count = 0\n i = 0\n while count < num_images:\n photo_id = json_info[i][\"photo\"]\n link = photos_info_dict[f'{photo_id:09}']\n try:\n urllib.request.urlretrieve(link, f\"{category_dir}/{count}.jpg\")\n count = count + 1\n i = i + 1\n except:\n i = i + 1\n print(\"Image - Downloaded\")", "def __init__(self, data_dir, file_prefix, num_images):\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n self.images = []\n self.display_match = False\n self.useBlending = False\n print('found %d images' % len(self.files))", "def preprocessfolder(self):\n imgs, _ = getFilesAndHdf(str(self.in_directory.text()))\n self.img_list = sorted(imgs)\n self.updateImageGroups()", "def process_images():\n create_dirs()\n for root, dirs, files in os.walk(IN):\n for name in files:\n if name[0] == '.':\n continue\n process_image(name)", "def photos():\n cwd = os.getcwd()\n db_path = os.path.join(cwd, CLI_PHOTOS_DB)\n return PhotosDB(db_path).photos(intrash=True)", "def filelist_create(self, directory=\".\"):\n # Get data from ls -lh and parse it correctly\n files = listdir_wrapper(directory, self.show_hidden)\n self.filesize = {}\n for fil in files:\n # Number of images in directory as filesize\n if os.path.isdir(fil):\n try:\n subfiles = listdir_wrapper(fil, self.show_hidden)\n # Necessary to keep acceptable speed in library\n many = False\n if len(subfiles) > self.file_check_amount:\n many = True\n subfiles = [subfile\n for subfile in subfiles[:self.file_check_amount]\n if is_image(os.path.join(fil, subfile))]\n amount = str(len(subfiles))\n if subfiles and many:\n amount += \"+\"\n self.filesize[fil] = amount\n except:\n self.filesize[fil] = \"N/A\"\n else:\n self.filesize[fil] = sizeof_fmt(os.path.getsize(fil))\n\n return files", "def get_images(self, page_number):", "def get_images(path_list):\n images = []\n labels = []\n names = []\n i = 0\n for path in path_list:\n for fruit_dir_path in glob.glob(path):\n fruit_label = fruit_dir_path.split(\"/\")[-1]\n for image_path in glob.glob(os.path.join(fruit_dir_path, \"*.jpg\")):\n image = cv2.imread(image_path, cv2.IMREAD_COLOR)\n\n image = cv2.resize(image, (45, 45))\n image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)\n\n images.append(image)\n names.append(fruit_label)\n labels.append(i)\n i += 1\n\n images = np.array(images)\n print(images.shape)\n # add a new dimension here\n with np.nditer(images, op_flags=['readwrite']) as it:\n for x in it:\n x = np.expand_dims(x, axis=0)\n labels = np.array(labels)\n return images, labels, i", "def _showphotos(self, btn):\n global layout\n global curdir\n\n # Create the layouts.\n layout = GridLayout(cols=5, padding=0, spacing=0, size_hint=(1, None))\n layout.bind(minimum_height=layout.setter(\"height\"))\n\n foldername = btn\n\n # Args is combined with \"curdir\" to load the thumbnails, and add them to the Gridlayout.\n if foldername == \"\":\n pass\n else:\n for filename in sorted(glob(join(curdir, \"thumb\", foldername, \"*\"))):\n try:\n canvas = BoxLayout(size_hint=(1, None))\n im = Image(source=filename)\n canvas.add_widget(im)\n layout.add_widget(canvas)\n\n except Exception:\n print(\"Pictures: Unable to load <%s>\" % filename)\n\n return layout", "def __count_media_files(self, CurrentDir):\n self.__counter_lock.acquire()\n self.media_counter = [0, 0]\n self.__counter_lock.release()\n if os.path.exists(CurrentDir):\n for i in os.listdir(CurrentDir):\n MediaType = self.thumb_filter(CurrentDir,i)\n if MediaType == TYPE_PHOTO:\n self.__counter_lock.acquire()\n self.media_counter[0] += 1\n self.__counter_lock.release()\n elif MediaType == TYPE_VIDEO:\n self.__counter_lock.acquire()\n self.media_counter[1] += 1 \n self.__counter_lock.release()", "def __init__(self, data_dir, pairs_filepath, img_ext, num_random_images_per_folder):\n self.data_dir = data_dir\n self.pairs_filepath = pairs_filepath\n self.img_ext = img_ext\n self.num_random_images_per_folder = num_random_images_per_folder\n\n if os.name == 'nt':\n self.separator = \"\\\\\"\n else:\n self.separator = \"/\"\n\n self.remaining = []\n for name in os.listdir(self.data_dir):\n if os.path.isdir(os.path.join(self.data_dir, name)):\n self.remaining.append(name)", "def get_recent_images(num_images=30):\n folder = app.config['UPLOAD_FOLDER']\n\n init_image_info()\n\n # get list of last modified images - ignore .json file and files start with .\n files = ['/'.join((folder, file)) \\\n for file in os.listdir(folder) if ('json' not in file) \\\n and not (file.startswith('.')) ]\n\n # list of tuples (file_path, timestamp)\n last_modified_files = [(file, os.path.getmtime(file)) for file in files]\n print(last_modified_files)\n last_modified_files = sorted(last_modified_files,\n key=lambda t: t[1], reverse=True)\n num_stored_images = len(last_modified_files)\n\n # build a list of image information\n image_stats = []\n\n print(\"THE NUMBER OF STORED IMAGES IS: {}\".format(num_stored_images))\n\n if num_stored_images != 0:\n\n # read in image info\n with open(IMAGE_INFO_JSON, 'r') as f:\n info = json.load(f)\n\n for i, f in enumerate(last_modified_files):\n # set limit for rendering pictures\n if i > num_images: break\n\n path, filename = f[0], f[0].replace(folder, '').replace('/', '')\n cur_image_info = info.get(filename, {})\n\n print(\"CURRENT IMAGE INFO IS: {}\".format(cur_image_info))\n\n img = {\n 'path': path,\n 'labels': cur_image_info\n }\n print(\"CURRENT IMG LABEL DATA IS: {}\".format(img['labels']))\n image_stats.append(img)\n\n return image_stats, num_stored_images", "def list_images(self):\n raise NotImplementedError()", "def loadimages(root):\n imgs = []\n\n def add_json_files(path,):\n for imgpath in glob.glob(path+\"/*.png\"):\n if exists(imgpath) and exists(imgpath.replace('png',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('png',\"json\")))\n for imgpath in glob.glob(path+\"/*.jpg\"):\n if exists(imgpath) and exists(imgpath.replace('jpg',\"json\")):\n imgs.append((imgpath,imgpath.replace(path,\"\").replace(\"/\",\"\"),\n imgpath.replace('jpg',\"json\")))\n\n def explore(path):\n if not os.path.isdir(path):\n return\n folders = [os.path.join(path, o) for o in os.listdir(path) \n if os.path.isdir(os.path.join(path,o))]\n if len(folders)>0:\n for path_entry in folders: \n explore(path_entry)\n else:\n add_json_files(path)\n\n explore(root)\n\n return imgs", "def feed(self, reset=True): \n if self.reuse:\n image_subdirs = get_random_image_sample(IMAGE_PACKAGE_SIZE, self.image_location, [])\n else:\n image_subdirs = get_random_image_sample(IMAGE_PACKAGE_SIZE, self.image_location, self.used_images)\n if reset:\n reset_directory(self.feed_location, self.image_location)\n images = self.move_images(image_subdirs, self.feed_location, folders=True)\n self.used_images.extend(images)\n return image_subdirs", "def main_one(string_path_to_folder, destination_folder):\n # .jpg and .JPG are the same\n # photos = glob.glob(\"C:/Personal/pp2_photo/dataBase/*.JPG\") # Examples of location format\n # pho = glob.glob(\"C:/Personal/pp2_photo/dataBase/*.jpg\")\n photos = glob.glob(string_path_to_folder+\"/*.JPG\")\n print(\"Number of files: \", len(photos))\n for k in photos:\n print(get_photo_date(k))\n process_all(k, destination_folder)", "def list_photos(username):\n user = Flickr.Person.findByUserName(username)\n photos = Flickr.Walker(user.getPhotos)\n for photo in photos:\n print u\"{0} - {1}\".format(photo.id, photo.title)\n\n print(\"Number of total photos: %s\" % user.getPhotos().info.total)", "def process():\n config = read_config()\n \n\n img_dir = config['DEFAULT']['images_directory']\n results_dict = {}\n images = list(get_image_files(img_dir))\n for image in tqdm.tqdm(images):\n info = hash_file(image)\n if info == 0:\n continue\n\n hash_value = info['hash']\n\n if hash_value not in results_dict:\n file_name = os.path.basename(info['_id'])\n results_dict[hash_value] = [file_name, 1]\n else:\n results_dict[hash_value][1] += 1\n\n count = list(results_dict.values())\n sorted_count = sorted(count, key=lambda x: x[1], reverse=True)\n \n with ImagesDB(IMG_INFO_DB_FILENAME) as imgDb: \n imgDb.insert_batch(sorted_count)", "def count_images(self):\n\t\treturn self.session.query(Image.id).count()", "def create_preset_images(self):\n for f in sorted(self.get_files_from_data()):\n photoInstances = {}\n for preset in self.generator.settings[\"GALLERY_PRESETS\"]:\n preset_dir = \"%s%s%s\" % (self.absolute_output_path,\n os.sep, \n preset[\"name\"])\n photoInstances[preset[\"name\"]] = Photo(self, f, preset_dir, preset)\n \n self.photos.append(photoInstances)", "def create_list(self):\n for _ in range(self.count):\n id_ = random.randint(10000, 99999)\n self.ids.append(id_)\n self.img_paths.append(f\"{self.save_path}{self.name}/images/{id_}.png\")\n if hasattr(self, \"masks\"):\n self.masks.append(f\"{self.save_path}{self.name}/masks/{id_}.png\")", "def rotate_images(data_folder, rots_per_pic):\n\n\tprint \"Rotating images...\"\n\n\t#search for images in folder iteratively\n\told_paths = []\n\tfor folder, subs, files in os.walk(data_folder):\n\t\tfor filename in files:\n\t\t\tif filename.endswith('.png') or filename.endswith('.jpg'):\n\t\t\t\told_paths.append(os.path.join(folder, filename))\n\t#sorts the paths obtained\n\told_paths.sort()\n\n\told_paths_with_sums = {}\n\n\tfor filename in old_paths:\n\t\told_paths_with_sums[filename] = 0\n\n\t#counts how many times the images were already processed \n\tnew_paths = []\n\tall_files_sum = 0\n\talready_processed_sum = 0\n\tfor filename in old_paths:\n\t\tif \"processed\" not in filename:\n\t\t\tall_files_sum = all_files_sum + 1\n\t\t\tnew_paths.append(filename)\n\t\t\tprint('File found:')\n\t\t\tprint filename\n\t\telse:\n\t\t\talready_processed_sum = already_processed_sum + 1\n\t\t\tmatching = [s for s in new_paths if ((filename.partition(\"_processed_\")[0]+\".png\")==s or (filename.partition(\"_processed_\")[0]+\".jpg\")==s)]\n\t\t\tfor i in matching:\n\t\t\t\told_paths_with_sums[i] = old_paths_with_sums[i] + 1\n\t\t\t\tif old_paths_with_sums[i] >= rots_per_pic:\n\t\t\t\t\tnew_paths.remove(i)\n\t\t\t\t\tprint('File already processed '+str(old_paths_with_sums[i])+' time(s):')\n\t\t\t\t\tprint(i)\n\t\t\t\telse:\n\t\t\t\t\tprint('File processed '+str(old_paths_with_sums[i])+' time(s):')\n\t\t\t\t\tprint(i)\n\n\tprocessed_sum = 0\n\ttoo_big_angles_sum = 0\n\tno_desc_found_sum = 0\n\tmarkers_out_of_mesh = 0\n\n\tfor current_path in new_paths:\n\t\t#rotates image as many times as needed to achieve the desired number of rotations\n\t\tfor i in range(int(rots_per_pic) - old_paths_with_sums[current_path]):\n\t\t\tpath = current_path\n\t\t\t\n\t\t\t#loads files generated by Zface if they exist and are not empty\n\t\t\tif (os.path.isfile(path+'.mesh3D') and\n\t\t\t\tos.path.isfile(path+'.mesh2D') and\n\t\t\t\tos.path.isfile(path+'.ctrl2D') and\n\t\t\t\tos.path.isfile(path+'.pars') and\n\t\t\t\tos.stat(path+'.mesh3D').st_size != 0 and\n\t\t\t\tos.stat(path+'.mesh2D').st_size != 0 and\n\t\t\t\tos.stat(path+'.ctrl2D').st_size != 0 and\n\t\t\t\tos.stat(path+'.pars').st_size != 0):\n\t\t\t\tsrc3 = np.loadtxt(path+'.mesh3D')\n\t\t\t\tsrc2 = np.loadtxt(path+'.mesh2D')\n\t\t\t\tctrl2 = np.loadtxt(path+'.ctrl2D')\n\t\t\t\tscale = np.loadtxt(path+'.pars')[0]\n\t\t\t\ttranslx = np.loadtxt(path+'.pars')[1]\n\t\t\t\ttransly = np.loadtxt(path+'.pars')[2]\n\t\t\t\tpitch = np.loadtxt(path+'.pars')[3]\n\t\t\t\tyaw = np.loadtxt(path+'.pars')[4]\n\t\t\t\troll = np.loadtxt(path+'.pars')[5]\n\n\t\t\t\t#tests wether or not initial rotation is too large\n\t\t\t\tif (abs(yaw)<radians(30) and abs(pitch)<radians(15)):\n\n\t\t\t\t\timage = data.load(path)\n\t\t\t\t\trows, cols = image.shape[0], image.shape[1]\n\n\t\t\t\t\tx = src3[:,0]\n\t\t\t\t\ty = src3[:,1]\n\t\t\t\t\tz = src3[:,2]\n\n\t\t\t\t\t#transform 3D mesh from normalized space and rotation to actual space and rotation\n\t\t\t\t\tx = x*cos(roll)+y*-sin(roll)\n\t\t\t\t\ty = x*sin(roll)+y*cos(roll)\n\t\t\t\t\tz = z\n\n\t\t\t\t\tx = x*cos(yaw)+z*sin(yaw)\n\t\t\t\t\ty = y\n\t\t\t\t\tz = x*-sin(yaw)+z*cos(yaw)\n\n\t\t\t\t\tx = x\n\t\t\t\t\ty = y*cos(pitch)+z*-sin(pitch)\n\t\t\t\t\tz = y*sin(pitch)+z*cos(pitch)\n\n\t\t\t\t\tx = x*scale+translx\n\t\t\t\t\ty = y*scale+transly\n\n\t\t\t\t\t#ortographically projects the 3D mesh to 2D (this will be our source for the Piecewise Affine Transform)\n\t\t\t\t\tsrc_cols = x\n\t\t\t\t\tsrc_rows = y\n\n\t\t\t\t\tsrc_rows, src_cols = np.meshgrid(src_rows, src_cols, sparse=True)\n\t\t\t\t\tsrc = np.dstack([src_cols.flat, src_rows.flat])[0]\n\n\t\t\t\t\t#transforms it back to normalized space\n\t\t\t\t\tx = (x-translx)/scale\n\t\t\t\t\ty = (y-transly)/scale\n\n\t\t\t\t\t#rotates it back to 0 rotation\n\t\t\t\t\tyaw = -yaw\n\t\t\t\t\tpitch = -pitch\n\t\t\t\t\troll = -roll\n\n\t\t\t\t\t#adds random rotation\n\t\t\t\t\treal_yaw = radians(random.uniform(-30, 30))\n\t\t\t\t\treal_pitch = radians(random.uniform(-15, 15))\n\t\t\t\t\treal_roll = 0\n\n\t\t\t\t\tyaw = yaw + real_yaw\n\t\t\t\t\tpitch = pitch + real_pitch\n\t\t\t\t\troll = roll + real_roll\n\n\t\t\t\t\tx = x*cos(roll)+y*-sin(roll)\n\t\t\t\t\ty = x*sin(roll)+y*cos(roll)\n\t\t\t\t\tz = z\n\n\t\t\t\t\tx = x*cos(yaw)+z*sin(yaw)\n\t\t\t\t\ty = y\n\t\t\t\t\tz = x*-sin(yaw)+z*cos(yaw)\n\n\t\t\t\t\tx = x\n\t\t\t\t\ty = y*cos(pitch)+z*-sin(pitch)\n\t\t\t\t\tz = y*sin(pitch)+z*cos(pitch)\n\n\t\t\t\t\t#transforms it back to real space\n\t\t\t\t\tx = x*scale+translx\n\t\t\t\t\ty = y*scale+transly\n\n\t\t\t\t\t#orthographic projection of new coordinates will be the destination for PiecewiseAffineTransform\n\t\t\t\t\tdst_cols = x\n\t\t\t\t\tdst_rows = y\n\t\t\t\t\tdst = np.vstack([dst_cols, dst_rows]).T\n\n\t\t\t\t\tout_rows = rows\n\t\t\t\t\tout_cols = cols\n\n\t\t\t\t\t#looks for triangles formed by Delaunay triangularion, extracts the ones associated with each facial keypoint marker\n\t\t\t\t\ttform = PiecewiseAffineTransform()\n\t\t\t\t\tsrc_triangles, dst_triangles = tform.estimate(src[:,0:2], dst)\n\t\t\t\t\tctrl2_transforms = []\n\t\t\t\t\tfor current_ctrl2 in ctrl2:\n\t\t\t\t\t\tfor i in range(len(src_triangles)):\n\t\t\t\t\t\t\ttriangle = polygon.Path(src_triangles[i])\n\t\t\t\t\t\t\tif triangle.contains_point(current_ctrl2):\n\t\t\t\t\t\t\t\tctrl2_transforms.append(tform.affines[i])\n\t\t\t\t\t\t\t\tbreak\n\t\t\t\t\tif len(ctrl2_transforms)!=49:\n\t\t\t\t\t\tmarkers_out_of_mesh = markers_out_of_mesh + 1\n\t\t\t\t\t\tprint \"didn't process image, because can't find all shape parameters:\"\n\t\t\t\t\t\tprint path\n\t\t\t\t\t\tcontinue\n\t\t\t\t\tout_ctrl2 = []\n\t\t\t\t\tfor i in range(len(ctrl2_transforms)):\n\t\t\t\t\t\t\t#performs transformation on marker\n\t\t\t\t\t\t\tout_ctrl2.append(ctrl2_transforms[i](ctrl2[i]))\n\t\t\t\t\tout_ctrl2 = np.transpose((np.transpose(out_ctrl2)[0],np.transpose(out_ctrl2)[1]))\n\t\t\t\t\tout_ctrl2 = np.squeeze(out_ctrl2)\n\n\t\t\t\t\t#transforms image to the new surface triangle by triangle using Delaunay triangulation, then interpolation to smooth it out\n\t\t\t\t\ttform = PiecewiseAffineTransform()\n\t\t\t\t\ttform.estimate(dst, src[:,0:2])\n\t\t\t\t\tout_image = warp(image, tform, output_shape=(out_rows, out_cols))\n\n\t\t\t\t\tout_path = path[:-4]+'_processed'+'_yaw_'+str(real_yaw)+'_pitch_'+str(real_pitch)+'_roll_'+str(real_roll)+path[-4:]\n\n\t\t\t\t\t#saves image and marker points\n\t\t\t\t\timsave(out_path, out_image)\n\n\t\t\t\t\tnp.savetxt(out_path+'_0.txt', out_ctrl2)\n\n\t\t\t\t\tprocessed_sum = processed_sum + 1\n\t\t\t\t\tprint(str(processed_sum)+'. file processed:')\n\t\t\t\t\tprint(path)\n\t\t\t\telse:\n\t\t\t\t\ttoo_big_angles_sum = too_big_angles_sum + 1\n\t\t\t\t\tprint(\"didn't process image, because of too big original rotation:\")\n\t\t\t\t\tprint(path)\n\t\t\telse:\n\t\t\t\tno_desc_found_sum = no_desc_found_sum + 1\n\t\t\t\tprint(\"didn't process image, beacuse descriptor documents not found:\")\n\t\t\t\tprint(path)\n\n\tout_paths = []\n\tfor folder, subs, files in os.walk(data_folder):\n\t\tfor filename in files:\n\t\t\tif filename.endswith('.png') or filename.endswith('.jpg'):\n\t\t\t\tif \"processed\" in filename:\n\t\t\t\t\tout_path = os.path.join(folder, filename).replace(data_folder, \"\")\n\t\t\t\t\tout_paths.append(out_path)\n\n\t#writes paths of generated images into contents\n\tfilename = data_folder+'/contents'\n\n\twith open(filename, 'w') as f:\n\t\tf.write('\\n'.join(out_paths))\n\n\tprint \"Shuffling contents...\"\n\t#shuffles contents\n\tshuffle_contents(filename)\n\n\n\t#prints some statistics about the process on the screen\n\tprint\n\tprint(\"Statistics:\")\n\tprint(\"-----------\")\n\tprint(\"Files found: \"+str(all_files_sum))\n\tif all_files_sum != 0:\n\t\tprint(\"Already processed: \"+str(already_processed_sum))\n\t\tprint(\"Got processed now: \"+str(processed_sum))\n\t\tprint(\"All processed: \"+str((processed_sum+already_processed_sum)*100/all_files_sum)+\"%\")\n\t\tprint(\"Can't be processed because of too big angles: \"+str(too_big_angles_sum*100/all_files_sum)+\"%\")\n\t\tprint(\"Can't be processed because of no decriptors: \"+str(no_desc_found_sum*100/all_files_sum)+\"%\")\n\t\tprint(\"Can't be processed because of markers outside of mesh: \"+str(markers_out_of_mesh*100/all_files_sum)+\"%\")", "def numberOfImages(self):\n return len(self.imageList)", "def index(request):\n photos = Image.objects.all().order_by('-created')\n no_of_photos = Image.objects.all().count()\n context = {\"photos\": photos, 'no_of_photos': no_of_photos}\n return render(request, 'photo/index.html', context=context)", "def fileCounter(directory):", "def get_ins_photo_list(self):\n photo_fn_list = get_file_list(self.ins_folder)\n # print(self.home_folder+self.ins_folder)\n # print(len(photo_list), photo_list[:10])\n if len(photo_fn_list) == 0:\n logging.error(\"The Ins folder is empty.\")\n\n return photo_fn_list", "def list_image_names(write_folder, user_name, image_size):\n image_dir = f'{write_folder}/{user_name}/{image_size}'\n # print('image_dir',image_dir)\n return os.listdir(image_dir)", "def get_subdir_filenum(super_path):\n \"\"\"获取所有子目录下的文件个数\"\"\"\n if not os.path.exists(super_path):\n return 0\n cnt = 0\n file_list =[]\n for r, dirs, files in os.walk(super_path):\n print(dirs)\n for dr in dirs:\n print(\"nothing\")\n cnt += len(glob.glob(os.path.join(r, dr + \"/*\")))\n return cnt", "def loadimages(root):\n imgs = []\n\n def add_json_files(path, ):\n for imgpath in glob.glob(path + \"/*.png\"):\n if exists(imgpath) and exists(imgpath.replace('png', \"json\")):\n imgs.append((imgpath, imgpath.replace(path, \"\").replace(\"/\", \"\"),\n imgpath.replace('png', \"json\")))\n for imgpath in glob.glob(path + \"/*.jpg\"):\n if exists(imgpath) and exists(imgpath.replace('jpg', \"json\")):\n imgs.append((imgpath, imgpath.replace(path, \"\").replace(\"/\", \"\"),\n imgpath.replace('jpg', \"json\")))\n\n def explore(path):\n if not os.path.isdir(path):\n return\n folders = [os.path.join(path, o) for o in os.listdir(path)\n if os.path.isdir(os.path.join(path, o))]\n if len(folders) > 0:\n for path_entry in folders:\n explore(path_entry)\n else:\n add_json_files(path)\n\n explore(root)\n\n return imgs", "def getimagelist(folder):\n imagefolder = Path(folder) \n imagelist = imagefolder.glob(\"**/*.png\") \n return list(imagelist)", "def photos(self):\n return self._photos", "def folders():\n\n os.makedirs('Images/')\n os.makedirs('Seg/')\n\n return", "def all_image_paths(self):\n self.labels = [i for i in (self.get_immediate_subdirectories(self.root_dir))\n if not i.startswith('.')]\n\n for root, subFolders, files in os.walk(self.root_dir):\n files = [i for i in files if not i.startswith('.')]\n files = files[:self.img_num] # hard coded - will not read in\n for i in files:\n self.all_files.append(os.path.abspath(root) + '/'.join(subFolders) + '/' + i)", "def make_imgs_list(self, imgs_dir, imgs_list):\n \n empty_list = []\n \n for img in imgs_list:\n \n img_dir = imgs_dir + '/' + img\n \n empty_list.append(img_dir)\n \n return empty_list", "def organize_my_photos(path, locale, extension):\n # Set locale\n if locale:\n locale.setlocale(category=locale.LC_ALL, locale=locale)\n # Set extensions\n extensions = EXTENSIONS\n if extension:\n extensions = (extension,)\n # Get all photos\n for root, dirs, files in os.walk(path):\n for file in files:\n if file.lower().endswith(extensions):\n # Get path file\n origin = os.path.join(root, file)\n # Get date\n date_created = get_date_created(os.path.join(root, file))\n date_list = date_created.split('/')\n date_created_day = date_list[0]\n date_created_month = date_list[1]\n date_created_year = date_list[2]\n # Make folder: format year/month day -> 2018/abr 23/photo.jpg\n dest_folder = os.path.join(date_created_year, f'{calendar.month_name[int(date_created_month)]} {date_created_day}')\n dest = os.path.join(date_created_year, f'{calendar.month_name[int(date_created_month)]} {date_created_day}', file)\n if not os.path.exists(dest_folder):\n os.makedirs(dest_folder)\n # Move photo\n shutil.move(origin, dest)", "def get_gallery():\r\n to_segment = os.listdir(TO_SEGMENT)\r\n print(to_segment)\r\n return render_template(\"gallery.html\",\r\n image_names=to_segment,\r\n next_page_text=\"Segment Images! - (might take a couple mins)\",\r\n next_page=\"get_segmented_gallery\"\r\n )", "def save_unique_image():\r\n global folder_name\r\n filelist = [file for file in os.listdir('temp') if file.endswith('.png')]\r\n\r\n if filelist:\r\n for image_path in filelist:\r\n found = 0\r\n img_to_del = Image.open(\"temp/\" + image_path)\r\n if not get_immediate_subdirectories():\r\n found = 1\r\n os.makedirs('detected_faces/1/')\r\n img_to_del.save('detected_faces/1/'+ image_path)\r\n os.remove(os.path.join(temp_path, image_path))\r\n folder_name = 1\r\n else:\r\n for folder in get_immediate_subdirectories():\r\n folder_filelist = [file for file in os.listdir(\"detected_faces/\" + folder) if\r\n file.endswith('.png')]\r\n count = len(folder_filelist)\r\n file = folder_filelist[0]\r\n img_to_compare = Image.open(\"detected_faces/\" + folder + \"/\" + file)\r\n if img_to_del.size > img_to_compare.size:\r\n temp_image_resized = img_to_del.resize(img_to_compare.size, Image.ANTIALIAS)\r\n index = get_ssim(temp_image_resized, img_to_compare)\r\n elif img_to_del.size < img_to_compare.size:\r\n img_to_compare = img_to_compare.resize(img_to_del.size, Image.ANTIALIAS)\r\n index = get_ssim(img_to_del, img_to_compare)\r\n else:\r\n index = get_ssim(img_to_del, img_to_compare)\r\n if index > min_ssim_index_val:\r\n found = 1\r\n if count < 5:\r\n img_to_del.save(pathname + \"/\" + folder + \"/\" + image_path)\r\n print image_path\r\n if os.path.isfile(os.path.join(temp_path, image_path)):\r\n os.remove(os.path.join(temp_path, image_path))\r\n if found == 0:\r\n folder_name += 1\r\n os.makedirs('detected_faces/' + str(folder_name))\r\n img_to_del.save(pathname + \"/\" + str(folder_name) + \"/\" + image_path)\r\n if os.path.isfile(os.path.join(temp_path, image_path)):\r\n os.remove(os.path.join(temp_path, image_path))", "def get_images(self, ctx, page):\n is_imgur = 'source' in page.meta and page.meta['source'] == 'imgur'\n if 'type' in page.meta and page.meta['type'] == 'album':\n album = page.meta\n images = []\n if is_imgur:\n pp.pprint(page.meta)\n # bind to template via json\n images = self.get_imgur_album_images(page)\n self.albums[album['slug']] = images\n else:\n # get paths of all of the images in the album\n srcs = []\n # get absolute paths of images in album for each file type\n for file_type in FILE_TYPES:\n imgs = glob.glob(\n GALLERY_DIR + album['slug'] + '/*.' + file_type\n )\n\n for img in imgs:\n img_rel_path = (\n REL_GALLERY_DIR +\n album['slug'] + '/' + img.split('/')[-1]\n )\n srcs.append(img_rel_path)\n\n # split full srcs and thumb srcs from srcs into two lists\n images = []\n thumb_srcs = filter(\n lambda src: src.split('/')[-1].startswith(THUMB_PREFIX),\n srcs\n )\n for thumb_src in thumb_srcs:\n src = thumb_src.replace(THUMB_PREFIX, '')\n thumb_width, thumb_height = self.calc_img_hw(thumb_src)\n width, height = self.calc_img_hw(src)\n images.append({\n 'thumb_src': thumb_src,\n 'thumb_width': thumb_width,\n 'thumb_height': thumb_height,\n\n 'src': src,\n 'width': width,\n 'height': height,\n })\n self.albums[album['slug']] = images", "def index_files():\n\n print(\"Indexing files\")\n\n for root, _, files in os.walk(image_directory):\n for item in files:\n for file_type in file_types:\n if file_type in item:\n images_in_directory.append(os.path.join(root, item))\n\n print(f'Finished indexing {len(images_in_directory)} files')\n\n pass", "def setup_image_folder(path_to_images):\n\n print(\"setup images folder...\")\n\n if os.path.isdir(path_to_images):\n print(\"folder already exists: remove...\")\n shutil.rmtree(path_to_images)\n\n os.mkdir(path_to_images)\n print(\"folder created\")", "def get_image_bases(image_root: str) -> list:\n return list(sorted(os.listdir(image_root), key=lambda x: tuple(\n int(x.split('.')[0].split('-')[i]) for i in range(1, len(x.split('-'))))))", "def print_images_in_statistics(self):\n self._print_images_statistics(self._images_in_folder, self._pose_class_names)", "def view_images(request):\n user_root = request.session['user_root']\n search_id = request.session['search_id']\n with open(os.path.join(user_root, search_id, 'info.json')) as f:\n info = json.load(f)\n object_id_list = info['object_id_list']\n image_type_list = info['image_type_list']\n search_pattern = info['search_pattern']\n image_dir = scan_images(user_root, search_id, image_type_list,relative_path=True)\n\n # Add flag for conditional representation.\n flag_scan = False\n flag_classifier=info['flag_classifier']\n if search_pattern == \"scan\":\n flag_scan = True\n bounding_box_dict = scan_bb_images(\n user_root, search_id, folder_name=\"scans\")\n else:\n bounding_box_dict = scan_bb_images(user_root, search_id)\n\n return render(request, 'gallery.html',\n {\"object_id_list\": object_id_list,\n \"image_dir\": image_dir,\n \"bounding_box\": bounding_box_dict,\n \"flag_scan\": flag_scan,\n \"flag_classifier\":flag_classifier,\n \"image_type_list\":image_type_list})", "def test_get_photos_paging(self):\n pass", "def count_dirs_and_files(directory='.'):\n pass", "def test_team_template_folders_count_get(self):\n pass", "def preprocess_images(file_path, new_file_path):\n if not os.path.isdir(new_file_path):\n os.mkdir(new_file_path)\n i = 0\n for dir in listdir(file_path):\n j = 0\n for image_path in listdir(file_path + '/' + dir):\n image = open_image(image_path)\n cv2.imwrite(file_path + '/' + image_path + '/' str(i) + '/' +str(i) + '.jpg', image)\n j += 1\n i += 1", "def count(args):\n path = os.path.abspath(args.path)\n total = 0\n\n if args.recursive:\n if os.path.exists(args.path):\n for item in os.listdir(path):\n little_path = os.path.join(path, item)\n if os.path.isfile(little_path):\n total += parse_file_count(little_path, args)\n else:\n total += count(little_path)\n else:\n print(\"EROARE: <\" + args.path +\n \"> invalid, nu putem ajunge acolo\")\n else:\n if os.path.isfile(args.path):\n total += parse_file_count(args.path, args)\n else:\n print(\"EROARE: <\" + args.pattern +\n \"> invalid, nu este fisier\")\n return total", "def image_list(request):\n return render_to_response('wainz/image_list.html', {\"images_and_votes\": ordered_images(0, 30, request.user)}, context_instance = RequestContext(request))", "def create_noobj_folder(\n folder: PathLike, \n img_ext: str = \".jpg\",\n):\n folder = Path(folder).expanduser().resolve()\n images = glob(folder, img_ext)\n \n for image in images:\n filename = image.name\n _folder = image.parent.name\n path = folder / (image.stem + \".xml\")\n img_w, img_h = get_image_size(image)\n\n tree = ET.Element(\"annotation\")\n\n et_folder = ET.SubElement(tree, \"folder\")\n et_folder.text = _folder\n\n et_filename = ET.SubElement(tree, \"filename\")\n et_filename.text = filename\n\n et_path = ET.SubElement(tree, \"path\")\n et_path.text = str(path)\n\n et_img_size = ET.SubElement(tree, \"size\")\n ET.SubElement(et_img_size, \"width\").text = str(img_w)\n ET.SubElement(et_img_size, \"height\").text = str(img_h)\n ET.SubElement(et_img_size, \"depth\").text = \"3\"\n\n content = ET.tostring(tree, encoding=\"unicode\", pretty_print=True)\n try: \n path.write_text(content)\n except KeyboardInterrupt:\n path.write_text(content)\n exit()", "def MergeClipData(clip_im_dir):\r\n\r\n image_list_dict = {'image_id':[]} \r\n\r\n clip_img = os.listdir(clip_im_dir)\r\n\r\n\r\n for folder_clip in clip_img :\r\n\r\n clip_path = os.path.join(clip_im_dir, folder_clip)\r\n # clip_path = clip_im_dir + '/' + folder_clip\r\n clip_list = os.listdir(clip_path)\r\n\r\n \r\n for folder in clip_list:\r\n\r\n images_path = os.path.join(clip_path, folder)\r\n # images_path = clip_path + '/' + folder\r\n image_list = os.listdir(images_path)\r\n\r\n image_list_dict['image_id'] += [os.path.join(images_path, name) for name in image_list]\r\n\r\n return image_list_dict", "def walk_through_dir(dir_path):\n for dirpath, dirnames, filenames in os.walk(dir_path):\n print(f\"There are {len(dirnames)} directories and {len(filenames)} images in '{dirpath}'.\")", "def test_team_template_folders_id_children_count_get(self):\n pass", "def make_gallery(post_name, image_list, config={'gallery_dir': 'galleries'}):\n gallery_name = make_gallery_name_from_post_name(post_name)\n gallery_path = get_gallery_path(gallery_name)\n output_path = os.path.join(gallery_path, \"index.md\")\n with open(output_path, \"w\") as fd:\n fd.write(make_gallery_index(gallery_name, image_list))\n\n copy_images(gallery_path, image_list)\n #make_thumbs\n #make_image_pages", "def read_images(path, image_size=None):\n c = 0\n X = []\n y = []\n folder_names = []\n for dirname, dirnames, filenames in os.walk(path):\n for subdirname in dirnames:\n folder_names.append(subdirname)\n subject_path = os.path.join(dirname, subdirname)\n for filename in os.listdir(subject_path):\n #try:\n im = cv2.imread(os.path.join(subject_path, filename), cv2.IMREAD_GRAYSCALE)\n # resize to given size (if given)\n if (image_size is not None):\n im = cv2.resize(im, image_size)\n X.append(np.asarray(im, dtype=np.uint8))\n y.append(c)\n #except IOError, (errno, strerror):\n # print \"I/O error({0}): {1}\".format(errno, strerror)\n # except:\n # print \"Unexpected error:\", sys.exc_info()[0]\n # raise\n c = c+1\n return [X,y,folder_names]", "def getAllImages(self):\n\n images = list(self._images)\n for s in self._subdirs:\n images += s.getAllImages()\n return images", "def get_existing_images(directory):\n validate_directory(directory)\n directory += '/'\n try:\n return listdir(directory)\n except:\n mkdir(directory)\n return []", "def prepare_batch(self, iterator):\n elements = []\n\n for label, album_ids in iterator:\n for album_id in album_ids:\n image_path = os.path.join(self.image_folder, album_id)\n # If path doesn't exist, continue\n if not os.path.exists(image_path):\n continue\n images = [os.path.join(image_path, img_name)\n for img_name in sorted(os.listdir(image_path))]\n # If no photo available, continue\n if len(images) == 0:\n continue\n\n elements.append((label, images))\n\n random.shuffle(elements)\n\n return sorted(elements, key=lambda p: len(p[1]), reverse=True)", "def get_check_folder():\r\n filelist = [file for file in os.listdir('temp') if file.endswith('.png')]\r\n image_count = len(filelist)\r\n if image_count == 0:\r\n print\"No faces detected in image.\"\r\n exit()\r\n print \"Detected \"+str(image_count)+\" faces in the image.\"\r\n if filelist:\r\n for image_path in filelist:\r\n target = cv2.imread(\"temp/\" + image_path)\r\n cv2.imshow(\"detected face\", target)\r\n k = cv2.waitKey(1) & 0xFF\r\n img_to_del = Image.open(\"temp/\" + image_path)\r\n for folder in get_immediate_subdirectories():\r\n count = 0\r\n val = 0\r\n folder_filelist = [file for file in os.listdir(\"detected_faces/\" + folder) if\r\n file.endswith('.png')]\r\n for file in folder_filelist:\r\n img_to_compare = Image.open(\"detected_faces/\" + folder + \"/\" + file)\r\n if img_to_del.size > img_to_compare.size:\r\n temp_image_resized = img_to_del.resize(img_to_compare.size, Image.ANTIALIAS)\r\n index = get_ssim(temp_image_resized, img_to_compare)\r\n elif img_to_del.size < img_to_compare.size:\r\n img_to_compare = img_to_compare.resize(img_to_del.size, Image.ANTIALIAS)\r\n index = get_ssim(img_to_del, img_to_compare)\r\n else:\r\n index = get_ssim(img_to_del, img_to_compare)\r\n val += index\r\n count += 1\r\n if count > 0:\r\n index = val/count\r\n if index > min_ssim_index_val:\r\n print \" Detected a face in DB folder \"+ folder\r\n if os.path.isfile(os.path.join(temp_path, image_path)):\r\n os.remove(os.path.join(temp_path, image_path))", "def num_of_images(self):\n return len(self.data['image_infos'])", "def remove_extra_images(path_to_images: str, number_of_images: int) -> None:\n last_image = 'image' + str(number_of_images) + '.jpg'\n while last_image in listdir(path_to_images):\n last_image_path = path.join(path_to_images, last_image)\n remove(last_image_path)\n print(f\"remove {last_image}\")\n number_of_images += 1\n last_image = 'image' + str(number_of_images) + '.jpg'", "def createAllImageFiles(poly, name) :\n \n for i in range(len(poly.getPaths())):\n fileName = name + \"_\" + str(i) + \".dot\"\n imgName = name + \"_\" + str(i) + \".jpg\"\n \n Command = \"neato -Tjpeg \" + fileName + \" -o \" + imgName\n run(Command, shell=True)", "def getNumberOfImages(self):\n\t\treturn self.numberOfImages", "def walk_through_dir(dir_path):\n for dirpath, dirnames, filenames in os.walk(dir_path):\n print(f\"There are {len(dirnames)} directories and {len(filenames)} images in '{dirpath}'.\")", "def read_files(self):\n files = []\n # if this is test folder then there are no labels\n if 'test' in self.list_path:\n for item in self.img_list:\n image_path = item\n name = os.path.splitext(os.path.basename(image_path[0]))[0]\n files.append({\n \"img\": image_path[0],\n \"name\": name,\n })\n else:\n for item in self.img_list:\n image_path, label_path = item\n name = os.path.splitext(os.path.basename(label_path))[0]\n files.append({\n \"img\": image_path,\n \"label\": label_path,\n \"name\": name,\n \"weight\": 1\n })\n return files", "def get_images_of_folder(folder):\n\n Settings.dev_print(\"getting images of folder: {}\".format(folder.get_title()))\n if not folder: return []\n imgs = []\n files = []\n valid_images = [\".jpg\",\".gif\",\".png\",\".tga\",\".jpeg\"]\n for f in os.listdir(folder.get_path()):\n ext = os.path.splitext(f)[1]\n if ext.lower() not in valid_images:\n continue\n file = File()\n setattr(file, \"path\", os.path.join(folder.get_path(),f))\n files.append(file)\n Settings.maybe_print(\"image path: {}\".format(os.path.join(folder.get_path(),f)))\n return files", "def get_photo_list(folder_name, extension='*.png'):\n photo_list = [os.path.basename(f) for f in glob.glob(os.path.join(folder_name, extension))]\n photo_list.sort()\n return photo_list", "def listImageFolder():\n #Note: Ignores files ending in ~ which is a backup/lock file\n return [f for f in os.listdir(imageFolder) if f[-1] is not '~']", "def get_images(self):\r\n if self.images is None:\r\n self.images = {}\r\n for name, img_num in self.images.iteritems():\r\n if isinstance(img_num, int):\r\n yield (name, img_num)", "def __save_to_dir(self, imagelist, prefix, PATH):\n for pair in imagelist:\n directory = os.path.join(PATH, pair[1])\n if not os.path.exists(directory):\n os.mkdir(directory)\n filename = prefix + pair[2]\n pair[0].save(os.path.join(directory, filename))\n print(\"Saved \" + os.path.join(directory, filename))", "def index_subset(subset):\n images = []\n print('Indexing {}...'.format(subset))\n # Quick first pass to find total for tqdm bar\n subset_len = 0\n \n \n for root, folders, files in os.walk(DATA_PATH + '/Omniglot/images_{}/'.format(subset)):\n subset_len += len([f for f in files if f.endswith('.png')])\n\n progress_bar = tqdm(total=subset_len)\n for root, folders, files in os.walk(DATA_PATH + '/Omniglot/images_{}/'.format(subset)):\n if len(files) == 0:\n continue\n\n alphabet = root.split('/')[-2]\n class_name = '{}.{}'.format(alphabet, root.split('/')[-1])\n\n for f in files:\n progress_bar.update(1)\n images.append({\n 'subset': subset, \n 'class_name': class_name,\n 'filepath': os.path.join(root, f)\n })\n\n progress_bar.close()\n return images", "def get_histograms(self, folder_name):\n histograms_folder_name = folder_name + '_histograms'\n\n try:\n print(\"Making dir \" + str(histograms_folder_name) + \" for histograms\")\n os.mkdir(histograms_folder_name)\n except OSError:\n print(\"Folder exists, have you already created these/this??\")\n return\n\n print(\"Writing to folder: \" + str(histograms_folder_name))\n photo_list = self.get_photo_list(folder_name, '*.png')\n for name in photo_list:\n image = cv2.imread(folder_name + '/' + name, cv2.IMREAD_ANYDEPTH)\n plt.hist(image.ravel(), 256, [0, 65535])\n plt.savefig(histograms_folder_name + '/' + name + 'histogram.eps', format='eps')\n plt.clf()\n # plt.show()", "def load_images(self, files, sub_dir):\n\n for f in files:\n self.images.append(Image(f, sub_dir))", "def find_pictures(inDate):\n data = {}\n # Let's do some directory searching!\n day = inDate.day.zfill(2)\n month = inDate.month.zfill(2)\n year = inDate.year\n commandTemplate = constants.findPictures\n command = commandTemplate.format(year, month, day)\n foundDirectories = exec_console_command(command)\n directoriesList = foundDirectories.split('\\n')\n\n if directoriesList:\n # Find all dates + times for all directories\n data = {}\n\n for directory in directoriesList:\n fileList = exec_console_command(\"ls \" + directory).split(\"\\n\")\n\n for fileName in fileList:\n if \".NEF\" in fileName:\n # Get filepath for NEF file\n filePath = (directory + \"/\" + fileName)\n # Find timestamp of when photo was taken\n regexSearch = re.search('(?<!\\d)\\d{6}(?!\\d)', filePath)\n fileCreationTime = \"\"\n\n if regexSearch:\n fileCreationTime = regexSearch.group(0)\n fileCreationTime = fileCreationTime[:2] + ':' + fileCreationTime[2:]\n fileCreationTime = fileCreationTime[:5] + ':' + fileCreationTime[5:]\n h, m, s = fileCreationTime.split(':')\n seconds = int(h) * 3600 + int(m) * 60 + int(s)\n offset = calendar.timegm(time.localtime()) - calendar.timegm(\n time.gmtime(time.mktime(time.localtime())))\n fileCreationTimeSeconds = seconds + offset\n fileCreationTimeReadable = time.strftime('%H:%M:%S', time.gmtime(fileCreationTimeSeconds))\n\n data[fileCreationTimeReadable] = filePath\n\n return data", "def _printout_images_info(design_path):\r\n _max_pic_number = 8\r\n images = dict()\r\n for foo in os.listdir(design_path):\r\n abs_foo = os.path.join(design_path, foo)\r\n if os.path.isfile(abs_foo):\r\n continue\r\n if foo.endswith(\"Images\"):\r\n images.setdefault(foo, list())\r\n for bar in os.listdir(abs_foo):\r\n if bar.endswith(\".png\"):\r\n images[foo].append(bar)\r\n if images:\r\n for k, v in list(images.items()):\r\n v.sort(key=sort_by_num, reverse=True)\r\n nine_images = dict()\r\n images_number = 0\r\n for i in range(0, 10):\r\n if images_number > _max_pic_number:\r\n break\r\n for k, v in list(images.items()):\r\n nine_images.setdefault(k, list())\r\n try:\r\n nine_images[k].append(v[i])\r\n images_number += 1\r\n if images_number > _max_pic_number:\r\n break\r\n except IndexError:\r\n continue\r\n say_it(\"\")\r\n say_it(\"Images Number: {}\".format(images_number))\r\n ii = 1\r\n for kk, vv in list(nine_images.items()):\r\n for foo in vv:\r\n say_it(\"-PNG{}: {}/{}\".format(ii, kk, foo))\r\n ii += 1", "def fetch_photos(n):\n\n # This is the list we will use the pass back the photo information.\n data = []\n\n # First, we search for photos taken in Manchester.\n response = requests.get(f'https://api.flickr.com/services/rest/?method=flickr.photos.search&api_key={FLICKR_API_KEY}&lat=53.48&lon=-2.23&radius=10&radius_units=km&format=json&nojsoncallback=1')\n\n # Now loop through the photos.\n for photo in sample(response.json()['photos']['photo'], n):\n\n # We will search with the photo ID.\n id = photo['id']\n\n # Get the photo details. We can get the URL to the photo from here.\n response = requests.get(f'https://api.flickr.com/services/rest/?method=flickr.photos.getSizes&api_key={FLICKR_API_KEY}&photo_id={id}&format=json&nojsoncallback=1')\n\n # Extract the photo URL from the response.\n url = response.json()['sizes']['size'][-1]['source']\n\n # Store our photo ID and URL.\n data.append({\n 'title': photo['title'],\n 'id': photo['id'],\n 'url': url,\n })\n\n # Send back our list of photos.\n return data", "def get_iss_photos(lista,size=\"small\"):\n photos = []\n lista=asciitable.read(lista)\n lista=lista.ID\n pattern_s_L=[]\n pattern_b_L=[]\n link_L=[]\n idiss=[] \n for i in lista:\n pattern_s = \"http://eol.jsc.nasa.gov/DatabaseImages/ESC/%s/%s/%s-E-%s.JPG\" % (\n size,\n i[0:6],\n i[0:6],\n i[9:])\n pattern_b = \"http://eol.jsc.nasa.gov/DatabaseImages/ESC/%s/%s/%s-E-%s.JPG\" % (\n 'large',\n i[0:6],\n i[0:6],\n i[9:])\n link = \"http://eol.jsc.nasa.gov/SearchPhotos/photo.pl?mission=%s&roll=E&frame=%s\" % (\n i[0:6],\n i[9:])\n idISS = \"%s-E-%s\" % (\n i[0:6],\n i[9:])\n pattern_s_L.append(pattern_s)\n pattern_b_L.append(pattern_b)\n link_L.append(link)\n idiss.append(idISS)\n\n tmp = dict(link_small=pattern_s,\n link_big=pattern_b,\n link=link,\n idISS=idISS\n )\n\n photos.append(tmp)\n return photos,pattern_s_L,pattern_b_L,link_L,idiss", "def __init__(self, directory='Wang_Data'):\n self.directory = directory\n self._image_names = []\n for i in range(1000):\n self._image_names.append(str(i) + '.jpg')", "def list_images():\n resource_route = \"/static/img/\"\n file_request_path = request.base_url[:request.base_url.rfind('/')] + resource_route\n path_to_current_file = os.path.dirname(os.path.abspath(__file__))\n images_path = os.path.join(path_to_current_file, 'static', 'img')\n directory_list = os.listdir(images_path)\n image_files = [f for f in directory_list if os.path.isfile(os.path.join(images_path, f))]\n image_files.sort()\n if '.gitignore' in image_files:\n image_files.remove('.gitignore')\n full_image_paths = [file_request_path + f for f in image_files]\n response_code = 200\n return make_response(jsonify({'files': full_image_paths}), response_code)", "def test_list_summary_directories_by_pagination(self):\n summary_base_dir = tempfile.mkdtemp()\n file_count = 10\n directory_count = 10\n gen_directories_and_files(summary_base_dir, file_count, directory_count)\n\n summary_watcher = SummaryWatcher()\n total, directories = summary_watcher.list_summary_directories_by_pagination(\n summary_base_dir, offset=0, limit=10)\n\n if (file_count + 1) * directory_count + file_count >= SummaryWatcher.MAX_SCAN_COUNT:\n expected_directory_count = math.ceil((SummaryWatcher.MAX_SCAN_COUNT - file_count) / (file_count + 1) + 1)\n assert total == len(directories) == expected_directory_count\n else:\n expected_directory_count = directory_count + 1\n assert total == min(expected_directory_count, SummaryWatcher.MAX_SUMMARY_DIR_COUNT)\n\n shutil.rmtree(summary_base_dir)", "def getImages(self,Project=\"\"):\n #images = [\"image1.jpg\",\"image2.jpg\",\"image3.jpg\"]\n \n os.chdir(self.dataDir)\n images = glob.glob(\"*.png\")\n \n return images", "def add_images(self, images_list, show_pbar=False):\n filenames = []\n if show_pbar:\n images_list = tqdm(images_list)\n for image in images_list:\n filenames.append(self.add_image(image))\n return filenames", "def count(self):\n \n return len(self.img_lst)", "def create_thumbnails():\n bucket = BASE_BUCKET + ARG.MANIFOLD\n result = S3_CLIENT.list_objects(Bucket=bucket, Prefix=PREFIX + \"/\", Delimiter=\"/\")\n lev1 = result.get('CommonPrefixes')\n for lev1pre in tqdm(lev1, desc=\"Prefixes\"):\n bpre = lev1pre.get('Prefix').split(\"/\")[-2]\n COUNT[\"Prefixes\"] += 1\n #result2 = S3_CLIENT.list_objects(Bucket=bucket, Prefix=\"/\".join([PREFIX, bpre]) + \"/\",\n # Delimiter=\"/\")\n paginator = S3_CLIENT.get_paginator(\"list_objects\")\n pages = paginator.paginate(Bucket=bucket, Prefix=\"/\".join([PREFIX, bpre]) + \"/\",\n Delimiter=\"/\")\n for page in pages:\n COUNT[\"Pages\"] += 1\n lev2 = page.get('CommonPrefixes')\n for lev2pre in lev2:\n body = lev2pre.get('Prefix').split(\"/\")[-2]\n COUNT[\"Body IDs\"] += 1\n if ARG.WRITE:\n invoke_lambda(bucket, body)\n else:\n LOGGER.debug(\"/\".join([bucket, bpre, body]))\n print(COUNT)", "def get_photos_by_category(self, category_id, count = 30, page = 1):\n uri = 'categories/' + category_id + '/photos'\n options = { 'per_page': count, 'page': page }\n return self.make_request(uri, options)", "def get_albums(self, ctx, page, templ_vars):\n if 'type' in page.meta and page.meta['type'] == 'index':\n album_pages = sorted(\n templ_vars['site']['categories']['gallery'],\n key=lambda album: album['datetime'],\n )\n albums = {}\n for album_page in album_pages:\n image_list = []\n images = map(\n lambda i: i['thumb_src'],\n self.albums[album_page['slug']]\n )\n image_list += images[:PREVIEW_IMGS_NUM]\n albums[album_page['slug']] = image_list\n templ_vars['site']['albums'] = albums", "def collect_image_files():\n negs = [] # Non image files found\n for filename in os.listdir('.'):\n if filename.lower().endswith('.jpg') or filename.lower().\\\n endswith('.jpeg'):\n jpg_files.append(filename)\n elif filename.lower().endswith('.gif'):\n gif_files.append(filename)\n elif filename.lower().endswith('.png'):\n png_files.append(filename)\n else:\n negs.append(filename)\n return negs", "def extract_images(dimension = (_HEIGHT, _WIDTH), n = 100, color = True, include = ['00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '10', '11', '12', '13']):\n # establish directory routes\n origin = '/Users/jaoming/Active Projects/Shopee Challenge/shopee-product-detection-dataset'\n main_train_folder = '/Users/jaoming/Active Projects/Shopee Challenge/shopee-product-detection-dataset/train/train'\n os.chdir(main_train_folder)\n if color:\n imread_color = cv2.IMREAD_COLOR\n else:\n imread_color = cv2.IMREAD_GRAYSCALE\n\n # setting up the variables \n data, labels = [], []\n for name in include:\n os.chdir(name)\n image_namelist = os.listdir()\n if '.DS_Store' in image_namelist: # removing unnecessary files\n image_namelist.remove('.DS_Store')\n count = 0\n while count < n:\n data.append(cv2.resize(\n cv2.imread(image_namelist[count], imread_color),\n dimension,\n interpolation = cv2.INTER_CUBIC\n ))\n labels.append(int(name))\n count += 1\n os.chdir(main_train_folder)\n\n os.chdir(origin)\n return data, labels", "def CreateDirs(self):\n# First, create a list of directories.\n dnames = []\n tags = ['', '_m', '_mf']\n for entry in self.info.keys():\n if self.info[entry]['type'] == 'epi':\n for tag in tags:\n fname = self.info[entry].get('imgfile%s' % tag, None)\n if fname is not None:\n dnames.append(os.path.dirname(fname))\n else:\n if self.info[entry].get('outdir',None) is not None:\n dnames.append(self.info[entry]['outdir'])\n\n# Create them if they don't already exist.\n for dname in dnames:\n if not os.path.exists(dname):\n self.MakeDir(dname)\n if self.verbose:\n print 'mkdir %s' % dname", "def _get_filenames_and_classes(dataset_dir):\n # print 'DATASET DIR:', dataset_dir\n # print 'subdir:', [name for name in os.listdir(dataset_dir)]\n # dataset_main_folder_list = []\n # for name in os.listdir(dataset_dir):\n # \tif os.path.isdir(name):\n # \t\tdataset_main_folder_list.append(name)\n dataset_main_folder_list = [name for name in os.listdir(dataset_dir) if os.path.isdir(os.path.join(dataset_dir,name))]\n dataset_root = os.path.join(dataset_dir, dataset_main_folder_list[0])\n directories = []\n class_names = []\n for filename in os.listdir(dataset_root):\n path = os.path.join(dataset_root, filename)\n if os.path.isdir(path):\n directories.append(path)\n class_names.append(filename)\n \n count = 0\n #print(directories)\n for directory in directories:\n #print(directory)\n #continue\n for filename in os.listdir(directory):\n print(filename)\n path = os.path.join(directory, filename)\n\n im = Image.open(path)\n imResize = im.resize((28,28), Image.ANTIALIAS)\n imResize.save(path, 'bmp')\n print(count)\n count = count + 1\n \n\n\n \n return" ]
[ "0.69187784", "0.6491537", "0.6410648", "0.63387346", "0.63301575", "0.60764897", "0.6062333", "0.6053507", "0.60286975", "0.6016352", "0.59886175", "0.58583134", "0.58411384", "0.5840281", "0.5804456", "0.57886666", "0.57884914", "0.5774842", "0.57680684", "0.5757683", "0.57132035", "0.5690047", "0.56581336", "0.56508267", "0.5646583", "0.5643855", "0.5633463", "0.56324136", "0.5629087", "0.56202716", "0.5584638", "0.557275", "0.5544078", "0.5538867", "0.5533944", "0.5533375", "0.5525765", "0.55251354", "0.55160576", "0.5515343", "0.55114174", "0.5510107", "0.5500884", "0.5500316", "0.5484674", "0.5483084", "0.54828763", "0.54668313", "0.54660773", "0.5457693", "0.54542834", "0.54542327", "0.54490286", "0.5447824", "0.5443793", "0.5441895", "0.5440358", "0.54397976", "0.54192823", "0.5410818", "0.54084605", "0.5407439", "0.53951", "0.5393318", "0.53909767", "0.53875697", "0.5386681", "0.53834", "0.53778183", "0.53654313", "0.5356159", "0.5350965", "0.5341321", "0.533685", "0.5333143", "0.5326544", "0.5322532", "0.5314432", "0.5311877", "0.53086185", "0.5305487", "0.53028935", "0.5300332", "0.5295278", "0.5295068", "0.52932906", "0.5288887", "0.5280881", "0.5280756", "0.5278832", "0.5276379", "0.52645344", "0.52634794", "0.52634305", "0.5261275", "0.52581835", "0.5257924", "0.525781", "0.5256439", "0.5254556" ]
0.5316852
77
Method opening all images to test their validity.
def verify_images(root_dir, root_listdir): counter = 0 for index, image_dir in enumerate(root_listdir): images_listdir = os.listdir(root_dir + "/" + image_dir) list_of_images_indices = [ image_index for image_index in range(3, len(images_listdir) - 1) if image_index % 2 == 0 ] for image_ind in list_of_images_indices: filename = root_dir + "/" + image_dir + "/" + images_listdir[image_ind] try: im = Image.open(filename) im.verify() im.close() except (OSError, ValueError): counter += 1 print("%d files caused error due to OSError and ValueError." % counter)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_all_images(self):\n self.roses.save_image()\n all_images = Images.get_all_images()\n self.assertTrue(len(all_images)<1)", "def load_from_images(self):\n logging.debug(\"load_from_images called\")\n return True", "def images_exist(self):\n pass", "def test_read(self):\n for line in TESTIMAGES.split('\\n'):\n vals = line.strip().split()\n name = vals[0]\n logger.debug(\"Testing file %s\" % name)\n dim1, dim2 = [int(x) for x in vals[1:3]]\n mini, maxi, mean, stddev = [float(x) for x in vals[3:]]\n obj = raxisimage()\n obj.read(os.path.join(os.path.dirname(self.mar), name))\n\n self.assertAlmostEqual(mini, obj.getmin(), 2, \"getmin [%s,%s]\" % (mini, obj.getmin()))\n self.assertAlmostEqual(maxi, obj.getmax(), 2, \"getmax [%s,%s]\" % (maxi, obj.getmax()))\n self.assertAlmostEqual(mean, obj.getmean(), 2, \"getmean [%s,%s]\" % (mean, obj.getmean()))\n self.assertAlmostEqual(stddev, obj.getstddev(), 2, \"getstddev [%s,%s]\" % (stddev, obj.getstddev()))\n self.assertEqual(dim1, obj.dim1, \"dim1\")\n self.assertEqual(dim2, obj.dim2, \"dim2\")\n self.assertNotEqual(obj.dim1, obj.dim2, \"dim2!=dim1\")", "def check_files(self):\n print('checking files')\n for f in self.filenames:\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)", "def check_files(self):\n print('checking files')\n for f in tqdm(self.filenames):\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)", "def testImagesPresent(self):\n\n result = self.app.get('/')\n\n images = result.html.find_all('img')\n\n # expect to find three images\n self.assertEqual(3, len(images), \"Wrong number of images found\")\n\n flowtows = result.html.find_all(class_='flowtow')\n\n image_list = self.images\n\n self.assertEqual(3, len(flowtows))\n\n # each contains the image, date, author and likes\n for index in range(3):\n div = flowtows[index]\n (path, date, user, likes) = image_list[index]\n\n self.assertIn(date, div.text)\n self.assertIn(user, div.text)\n # look for the number of likes\n self.assertIn(str(len(likes)+1), div.text, \"expected to find %d likes mentioned in:\\n\\n%s\" % (len(likes), div))\n\n # look for just one image\n img = div.find_all('img')\n self.assertEqual(1, len(img))", "def check_files(self):\n for f in self.filenames:\n img = cv2.imread(f, int(self.color))\n if img is None:\n os.remove(f)", "def check_images():\n saved_stdout, saved_stderr = sys.stdout, sys.stderr\n\n out, err = StringIO(), StringIO()\n try:\n sys.stdout, sys.stderr = out, err\n check_images_main()\n except SystemExit:\n pass\n finally:\n stdout, stderr = out.getvalue().strip(), err.getvalue().strip()\n sys.stdout, sys.stderr = saved_stdout, saved_stderr\n\n return stdout, stderr", "def test_is_image(self):\n os.chdir(\"testimages/\")\n self.assertTrue(fileactions.is_image(\"arch_001.jpg\"))\n self.assertFalse(fileactions.is_image(\"not_an_image.jpg\"))", "def test_read(self):\n for line in TESTIMAGES.split(\"\\n\"):\n vals = line.split()\n name = vals[0]\n dim1, dim2 = [int(x) for x in vals[1:3]]\n mini, maxi, mean, stddev = [float(x) for x in vals[3:]]\n obj = marccdimage()\n obj.read(self.fn[name])\n self.assertAlmostEqual(mini, obj.getmin(), 2, \"getmin\")\n self.assertAlmostEqual(maxi, obj.getmax(), 2, \"getmax\")\n self.assertAlmostEqual(mean, obj.getmean(), 2, \"getmean\")\n self.assertAlmostEqual(stddev, obj.getstddev(), 2, \"getstddev\")\n self.assertEqual(dim1, obj.dim1, \"dim1\")\n self.assertEqual(dim2, obj.dim2, \"dim2\")", "def number_of_images_a_valid():\r\n counter = 0\r\n with os.scandir(os.path.join(dir_path, \"inputs\", \"type_a\")) as filepaths:\r\n for path in filepaths:\r\n extension = os.path.splitext(path)[1].lower()\r\n if extension == \".png\" or extension == \".jpg\":\r\n counter += 1\r\n if counter >= int(number_of_images_a.get()):\r\n return True\r\n else:\r\n messagebox.showwarning(\"Invalid Image Inputs\", (\r\n \"Not enough images of type a to create \"\r\n \"requested grid.\"))\r\n return False", "def get_image_list(source_dir):\n\n dir_list = os.path.os.listdir(source_dir)\n# print(dir_list)\n image_list = []\n os.chdir(source_dir)\n for file in dir_list:\n print(\"Inspecting.... : {}\".format(file))\n\n try:\n if Image.open(file).format:\n image_list.append(file)\n print(\"{} : is an image\".format(file))\n except Exception as e:\n print(\"{} : failed the imageness test.i \\n {}\".format(file, e))\n continue\n\n# print(image_list)\n return image_list", "def _iter_images(self):\n raise NotImplementedError", "def check_image_dimensions(image_paths, image_height, image_width):\n logging.info('Using image height, width %s', str((image_height, image_width)))\n\n bad_images = []\n\n for path in image_paths:\n logging.info('Trying to read image %s', path)\n image = microscopeimagequality.dataset_creation.read_16_bit_greyscale(path)\n\n if image.shape[0] < image_height or image.shape[1] < image_width:\n bad_images.append(path)\n logging.info('Image %s dimension %s is too small.', path, str(image.shape))\n\n logging.info('Done checking images')\n\n logging.info('Found %d bad images.', len(bad_images))\n\n if bad_images:\n raise ValueError('Found %d bad images! \\n %s' % (len(bad_images), '\\n'.join(bad_images)))", "def checksImages(self):\n metadata=[]\n for image in self.meta['sources']:\n with rasterio.open(image) as src:\n metaData=src.meta\n \n assert metaData['driver'] == 'GTiff', \"Driver is not supported: {0}\".format(metaData['driver'])\n assert metaData['count'] == len(self.meta['bandNames']), \"Nbands incorrect, expected: {0}, {1} provided\".format(metaData['count'],len(self.meta['bandNames']))\n \n metadata.append({'dtype': metaData['dtype'], 'driver': metaData['driver'], 'nodata': metaData['nodata'], 'nBands': metaData['count'],'crs': src.crs.to_string()})\n \n assert len(set([item['dtype'] for item in metadata])) == 1, \"Images list dtypes aren't compatibles. Expected: 1, {1} provided\".format(metaData['count'],len(set([item['dtype'] for item in metadata])))\n assert len(set([item['driver'] for item in metadata])) == 1, \"Images list drivers aren't compatibles. Expected: 1, 1 provided\".format(metaData['count'],len(set([item['driver'] for item in metadata])))\n assert len(set([item['nodata'] for item in metadata])) == 1, \"Images list nodata values aren't compatibles. Expected: 1, {1} provided\".format(metaData['count'],len(set([item['nodata'] for item in metadata])))\n assert len(set([item['nBands'] for item in metadata])) == 1, \"Images list nBands number aren't compatibles. Expected: 1, {1} provided\".format(metaData['count'],len(set([item['nBands'] for item in metadata])))\n assert len(set([item['crs'] for item in metadata])) == 1, \"Images list crs aren't compatibles. Expected: 1, {1} provided\".format(metaData['count'],len(set([item['crs'] for item in metadata]))) \n return metadata[0]", "def test_read_image(self):\n pass", "def test_image_links(self):\r\n print('\\nTest image links: ', end='', flush=True)\r\n driver = self.driver\r\n driver.get(MY_URL)\r\n all_images = driver.find_elements_by_tag_name('img')\r\n for image in all_images:\r\n src = image.get_attribute('src')\r\n alt = image.get_attribute('alt')\r\n r = requests.get(src)\r\n assert r.status_code == 200, 'Bad http status (%d) for %s' % (r.status_code, src)\r\n assert len(alt) > 0, 'Missing or empty alt tag for %s' % (src)\r\n print('.', end=\"\", flush=True)\r\n if DEBUG:\r\n print ('Src=%s' % src)", "def scan_images(self):\n rtn = 0\n mime_list = self.db.get_mime_list()\n (results,count) = datastore.find({})\n for f in results:\n dict = f.get_metadata().get_dictionary()\n if dict[\"mime_type\"] in mime_list:\n #record the id, file size, file date, in_ds\n self.db.create_picture_record(f.object_id, f.get_file_path())\n rtn += 1\n f.destroy()\n self.db.commit()\n _logger.debug('%s entries found in journal. Number of pictures %s'%(count,rtn,))\n return rtn", "def _open_images(training_filenames, path):\n imagePaths=[os.path.join(path,f) for f in training_filenames]\n faces=[]\n for i, imagePath in enumerate(imagePaths):\n faceImg=Image.open(imagePath).convert('L')\n faceNp=np.array(faceImg,'uint8')\n faces.append(faceNp)\n return faces", "def main():\n base_dir = '/home/sjimenez/imagenes_prueba'\n out_dir = '/home/sjimenez/easy_analysis'\n for _, _, files in os.walk(base_dir, topdown=False):\n for f in files:\n print('--------- {} ---------'.format(f))\n act_dir = osp.join(base_dir, f)\n act_im = cv2.imread(act_dir)\n if act_im is not None:\n get_image_stats(act_im, out_dir, f)\n else:\n print('Not able to open the image')", "def __loadImage(self, parameters):\n # self.localConfigured = Settings.instance().readValue( key = 'Common/local-repo' )\n for pr in parameters:\n if pr['type'] == 'image':\n if pr['value'].startswith('undefined:/'):\n fileName = pr['value'].split('undefined:/')[1]\n if not os.path.exists( fileName ):\n raise Exception(\"the following image file is missing: %s \" % fileName)\n\n file = QFile(fileName)\n if not file.open(QIODevice.ReadOnly):\n raise Exception(\"error opening image file %s\" % fileName )\n else:\n imageData= file.readAll()\n pr['value'] = \"undefined:/%s\" % base64.b64encode(imageData)\n elif pr['value'].startswith('local-tests:/'):\n fileName = pr['value'].split('local-tests:/')[1]\n\n if not os.path.exists( fileName ):\n raise Exception(\"the following image file is missing: %s \" % fileName)\n \n file = QFile(fileName)\n if not file.open(QIODevice.ReadOnly):\n raise Exception(\"error opening image file %s\" % fileName )\n else:\n imageData= file.readAll()\n pr['value'] = \"local-tests:/%s\" % base64.b64encode(imageData)\n else:\n pass", "def test_list_image(self):\n pass", "def test_image(self):\n browser = self.layer.get_web_browser(smi_settings)\n\n image = self.layer.get_fixture('torvald.jpg')\n browser.login(self.username, self.username)\n self.assertEqual(browser.open('/root/edit'), 200)\n browser.macros.create(\n 'Silva Image', id='image', title='Torvald', file=image)\n self.assertEqual(\n browser.inspect.folder_listing, ['index', 'image'])\n\n # The user should by the last author on the content and container.\n self.assertEqual(\n self.root.sec_get_last_author_info().userid(),\n self.username)\n self.assertEqual(\n self.root.image.sec_get_last_author_info().userid(),\n self.username)\n\n # Visit the edit page\n self.assertEqual(\n browser.inspect.folder_listing['image'].click(),\n 200)\n self.assertEqual(browser.location, '/root/image/edit/tab_edit')\n\n # Change title\n form = browser.get_form('silvaObjects')\n self.assertEqual(\n form.get_control('field_image_title').value,\n 'Torvald')\n form.get_control('field_image_title').value = u'Picture of Torvald'\n form.get_control('submit:method').click()\n self.assertEqual(browser.inspect.feedback, ['Changes saved.'])\n\n # Change format\n form = browser.get_form('editform.scaling')\n self.assertEqual(form.get_control('field_web_format').value, 'JPEG')\n form.get_control('field_web_format').value = 'PNG'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change scaling\n form = browser.get_form('editform.scaling')\n form.get_control('field_web_scaling').value = '100x200'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change image\n form = browser.get_form('editform.upload')\n form.get_control('field_file').value = image\n form.get_control('upload_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Image updated.'])\n\n self.assertEqual(\n browser.inspect.breadcrumbs,\n ['root', 'Picture of Torvald'])\n browser.inspect.breadcrumbs['root'].click()\n browser.macros.delete('image')", "def __init__(self, data_dir, file_prefix, num_images):\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n self.images = []\n self.display_match = False\n self.useBlending = False\n print('found %d images' % len(self.files))", "def _compare_images(self, ax, filename, tol=10):\n assert isinstance(ax, Artist)\n if GENERATE_BASELINE:\n savefig(os.path.join(BASELINE_DIR, filename))\n savefig(os.path.join(self.tempdir, filename))\n err = compare_images(os.path.join(BASELINE_DIR, filename),\n os.path.join(self.tempdir, filename),\n tol, in_decorator=True)\n if err:\n raise ImageComparisonFailure('images not close: %(actual)s '\n 'vs. %(expected)s '\n '(RMS %(rms).3f)' % err)", "def initImages(self):\n pass", "def initImages(self):\n pass", "def initImages(self):\n pass", "def check_images():\n\n print(f'Looking for duplicate images...')\n\n for image in images_in_directory:\n duplicate = check_image_for_duplicates(image)\n\n if (duplicate):\n print(f'Found {duplicate} to be a duplicate image of: {image}')\n remove_image(duplicate)\n pass", "def __init__(self, data_dir, file_prefix, num_images):\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n print('found %d images' % len(self.files))", "def readImages(self):\r\n\r\n #Read the file camera.csv for the image file name\r\n lines = [line.strip() for line in open(self.cameraFile)]\r\n i = 0;\r\n\tself.centers = []\r\n\tself.lefts = []\r\n\tself.rights = []\r\n\r\n for line in lines:\r\n info = line.split(',')\r\n \r\n\r\n if info[0] == 'seq':\r\n i += 1\r\n continue\r\n \r\n if info[4] == 'left_camera':\r\n self.lefts.append(info)\r\n if info[4] == 'center_camera':\r\n self.centers.append(info)\r\n if info[4] == 'right_camera':\r\n self.rights.append(info)\r\n i += 1\r\n\r\n print \"Total Frames: %d \" % (len(self.centers))", "def number_of_images_valid():\r\n if number_of_images_a_valid() and number_of_images_b_valid():\r\n return True\r\n else:\r\n return False", "def main():\n \n # for inserting other images, add tem to /input folder and list them here\n images = (\n 'image-0',\n 'image-1',\n 'image-2'\n )\n\n for image_name in images:\n print(image_name, \"image:\")\n\n image = open_image(image_name)\n display_image(image, \"Original input \" + image_name)\n\n grayscale_v = transform_colors(image)\n display_image(grayscale_v[:,:,0], \"Grayscale \" + image_name)\n save_image(image_name + \"-grayscale\", grayscale_v[:,:,0])\n\n contours_v, contours = get_contours(grayscale_v)\n display_image(contours_v, \"Contours \" + image_name)\n save_image(image_name + \"-contours\", contours_v)\n\n labeled_img, areas = get_measures(image, contours[1:])\n display_image(labeled_img, \"Labeled \" + image_name)\n save_image(image_name + \"-labeled\", labeled_img)\n\n areas_histogram(areas, image_name)", "def images_are_present(file_info):\n currentdir = os.path.join(WORKDIR, file_info['folder'])\n if not os.path.exists(currentdir):\n return False\n count = len([x for x in os.listdir(currentdir) if x.endswith('.png')])\n if count != file_info['size']:\n print([x for x in os.listdir(currentdir) if x.endswith('.png')])\n print('Count does not match')\n print(count)\n print(file_info['size'])\n return False\n return True", "def openFile(self):\r\n from SXM import FileIO,Data\r\n fname = str(QFileDialog.getOpenFileName(self.widget,self.tr(\"Open File\"), \\\r\n \".\",FileIO.getFilterString(types=(Data.Image,))))\r\n if len(fname) > 0:\r\n root, ext = os.path.splitext(fname)\r\n self.statusBar().showMessage(self.tr(\"Loading data: %1\").arg(fname),2000)\r\n image = FileIO.fromFile(fname)\r\n image.load()\r\n imwin = ImageWindow(self,image)\r\n self.Images.append(imwin)\r\n self.updateImageList()\r\n imwin.windowModality = False\r\n imwin.show()", "def test_check_color_and_image_input(self):\n\n from m3_save_images.m3_save_images import check_color_and_image_input\n valid_path = \"../img\"\n invalid_path = \"../imgfab7841\"\n valid_image = \"citrony.jpg\"\n invalid_image = \"citrony87465.jpg\"\n valid_image_color = \"White\"\n invalid_image_color = \"White45781\"\n\n # self.assertTrue(check_path_and_img-_input(valid_path, valid_image) is None)\n with self.assertRaises(Exception):\n check_color_and_image_input(invalid_path, valid_image, valid_image_color)\n with self.assertRaises(SystemExit):\n check_color_and_image_input(valid_path, invalid_image, valid_image_color)\n with self.assertRaises(SystemExit):\n check_color_and_image_input(valid_path, valid_image, invalid_image_color)\n self.assertTrue(check_color_and_image_input(valid_path, valid_image, valid_image_color) is None)", "def test_full_resize(self):\n number_of_pixels = 300\n destination = base_path +'/test_data/rendering_tests/resized_images/'\n source_folder = base_path + '/test_data/rendering_tests/filter_database/'\n\n\n for the_file in os.listdir(destination):\n file_path = os.path.join(destination, the_file)\n if os.path.isfile(file_path):\n os.unlink(file_path)\n\n\n self.assertEqual(0, len(os.listdir(destination)))\n rb.find_all_files(number_of_pixels,source_folder, destination)\n self.assertEqual(6, len(os.listdir(destination)))\n for the_file in os.listdir(destination):\n file_path = os.path.join(destination,the_file)\n with Image.open(file_path) as f:\n self.assertNotEqual(number_of_pixels+5, f.size[0])\n self.assertNotEqual(number_of_pixels+5, f.size[1])\n # the above checks that the size does not vary as needed\n # probably not necessary\n self.assertEqual(number_of_pixels, f.size[0])\n self.assertEqual(number_of_pixels, f.size[1])", "def load(self, dirname):\n loaded_filenames = set()\n ini_filename = os.path.join(dirname, \"xpresser.ini\")\n if os.path.exists(ini_filename):\n config = ConfigParser.ConfigParser()\n config.read(ini_filename)\n for section_name in config.sections():\n if section_name.startswith(\"image \"):\n image_name = section_name.split(None, 1)[1]\n try:\n image_filename = config.get(section_name, \"filename\")\n except ConfigParser.NoOptionError:\n raise ImageDirError(\"Image %s missing filename option\"\n % image_name)\n image_filename = os.path.join(dirname, image_filename)\n if not os.path.exists(image_filename):\n raise ImageDirError(\"Image %s file not found: %s\" %\n (image_name, image_filename))\n try:\n image_similarity = config.getfloat(section_name,\n \"similarity\")\n except ConfigParser.NoOptionError:\n image_similarity = None\n except ValueError:\n value = config.get(section_name, \"similarity\")\n raise ImageDirError(\"Image %s has bad similarity: %s\"\n % (image_name, value))\n \n try:\n value = config.get(section_name, \"focus_delta\")\n match = CLICK_POSITION_RE.match(value)\n if not match:\n raise ImageDirError(\"Image %s has invalid click \"\n \"position: %s\" %\n (image_name, value))\n image_focus_delta = (int(match.group(\"x\")),\n int(match.group(\"y\")))\n except ConfigParser.NoOptionError:\n image_focus_delta = None\n image = Image(name=image_name,\n filename=image_filename,\n similarity=image_similarity,\n focus_delta=image_focus_delta)\n self._images[image_name] = image\n loaded_filenames.add(image_filename)\n\n # Load any other images implicitly with the default arguments.\n for basename in os.listdir(dirname):\n filename = os.path.join(dirname, basename)\n if filename not in loaded_filenames:\n ftype, fencoding = mimetypes.guess_type(filename)\n if ftype and ftype.startswith(\"image/\"):\n image_name = os.path.splitext(basename)[0]\n self._images[image_name] = Image(name=image_name,\n filename=filename)", "def load_images(self, image_paths):\n \n fill_list = []\n \n for idx in tqdm(range(len(image_paths))):\n path = image_paths[idx]\n yield cv2.imread(path)", "def _verify(self) -> None:\n # Check if the files already exist\n if os.path.exists(os.path.join(self.root, self.image_root)):\n return\n\n # Check if .zip files already exists (if so extract)\n exists = []\n for filename, md5 in zip(self.filenames, self.md5s):\n filepath = os.path.join(self.root, filename)\n if os.path.isfile(filepath):\n if self.checksum and not check_integrity(filepath, md5):\n raise RuntimeError(\"Dataset found, but corrupted.\")\n exists.append(True)\n extract_archive(filepath)\n else:\n exists.append(False)\n\n if all(exists):\n return\n\n # Check if the user requested to download the dataset\n raise RuntimeError(\n \"Dataset not found in `root` directory, either specify a different\"\n + \" `root` directory or manually download the dataset to this directory.\"\n )", "def open_images_in(directory):\n\n files = [\n filename\n for filename in os.listdir(directory)\n if \"_\" in filename and not filename.startswith(\"joined\")\n ]\n tiles = []\n if len(files) > 0:\n i = 0\n for file in files:\n pos = get_image_column_row(file)\n im = Image.open(os.path.join(directory, file))\n\n position_xy = [0, 0]\n count = 0\n for a, b in zip(pos, im.size):\n position_xy[count] = a * b\n count = count + 1\n tiles.append(\n Tile(\n image=im,\n position=pos,\n number=i + 1,\n coords=position_xy,\n filename=file,\n )\n )\n i = i + 1\n return tiles", "def test_read(self):\n for root, dirs, files in os.walk(os.path.join(self.test_dir, 'files')):\n for filename in files:\n if filename.endswith('.bin'):\n d = Dataset(os.path.join(root, filename))\n data = d.as_dict()\n for freq_dict in data['frequencies']:\n x = freq_dict['easting']\n y = freq_dict['northing']\n image = freq_dict['intensity']\n self.assertIsInstance(x, np.ndarray)\n self.assertIsInstance(y, np.ndarray)\n self.assertIsInstance(image, np.ndarray)", "def load_image(self, image):\n if isinstance(image, Image.Image):\n return image\n\n img = None\n for folder in self.reference_folders:\n try:\n return Image.open(os.path.join(folder, image))\n except IOError:\n pass\n\n self.error_handler.report_error(\"Not opened image {} in [{}]\".format(image, \", \".join(self.reference_folders)))\n raise errors.CanNotOpenImageException(image)", "def test_image(self):\r\n self.testdata = open(TESTDATA_FILENAME).read()", "def number_of_images_b_valid():\r\n counter = 0\r\n with os.scandir(os.path.join(dir_path, \"inputs\", \"type_b\")) as filepaths:\r\n for path in filepaths:\r\n extension = os.path.splitext(path)[1].lower()\r\n if extension == \".png\" or extension == \".jpg\":\r\n counter += 1\r\n if ((number_of_images_b.get() == \"\") or\r\n (counter >= int(number_of_images_b.get()))):\r\n return True\r\n else:\r\n messagebox.showwarning(\"Invalid Image Inputs\", (\r\n \"Not enough images of type b to create \"\r\n \"requested grid.\"))\r\n return False", "def errorChecks(self):\n stop_calculation = False\n found_error = False\n errors = {\"Info\": [], \"Critical\": []}\n error_types = []\n ori_images = 0\n of_images = 0\n depth_images = 0\n back_of_images = 0\n\n if os.path.exists(self.savePathJoin(\"Images\")):\n ori_images = len(\n listDirectory(self.savePathJoin(\"Images\"), extension=\"png\")\n )\n # Check image folder\n if self.img_exist and not os.path.exists(self.savePathJoin(\"Images\")):\n if os.path.exists(self.user[\"Video\"]):\n errors[\"Info\"].append(\n \"Images folder {0} doesn't exist -> Recreate it and recalculate optical flow and depth estimations\".format(\n self.savePathJoin(\"Images\")\n )\n )\n error_types.append(\"NoImages\")\n else:\n stop_calculation = True\n errors[\"Critical\"].append(\n (\n \"Images folder {0} and video file {1} don't exist -> Stopping run\".format(\n self.savePathJoin(\"Images\"), self.user[\"Video\"]\n )\n )\n )\n elif self.img_exist and os.path.exists(self.user[\"Video\"]):\n errors[\"Info\"].append(\n \"Both the video {0} and Images folder {1} exist -> using Images folder by default\".format(\n self.user[\"Video\"], self.savePathJoin(\"Images\")\n )\n )\n elif not self.img_exist and not os.path.isfile(self.user[\"Video\"]):\n stop_calculation = True\n errors[\"Critical\"].append(\n (\n \"Images folder {0} and video file {1} don't exist -> Stopping run\".format(\n self.savePathJoin(\"Images\"), self.user[\"Video\"]\n )\n )\n )\n\n # Check video file\n if self.user[\"Video\"] != \"\" and not os.path.isfile(self.user[\"Video\"]):\n if os.path.exists(self.savePathJoin(\"Images\")):\n errors[\"Info\"].append(\n (\n \"Video file {0} doesn't exist -> Using images in the Images folder instead\".format(\n self.user[\"Video\"]\n )\n )\n )\n else:\n stop_calculation = True\n errors[\"Critical\"].append(\n (\n \"Images folder {0} and video file {1} don't exist -> Stopping run\".format(\n self.savePathJoin(\"Images\"), self.user[\"Video\"]\n )\n )\n )\n elif os.path.isfile(self.user[\"Video\"]) and os.path.exists(\n self.savePathJoin(\"Images\")\n ):\n pass\n\n # Check optical flow\n if self.of_exist and not os.path.exists(self.savePathJoin(\"Of\")):\n errors[\"Info\"].append(\n (\n \"Optical flow folder {0} doesn't exist -> Recalculating optical flow\".format(\n self.savePathJoin(\"Of\")\n )\n )\n )\n error_types.append(\"NoOf\")\n elif self.of_exist:\n of_images = len(listDirectory(self.savePathJoin(\"Of\"), extension=\"png\"))\n if of_images != ori_images - 1 and ori_images != 0:\n errors[\"Info\"].append(\n (\n \"Optical flow image number {0} doesn't match video image number {1} - 1 -> Recalculating optical flow\".format(\n of_images, ori_images\n )\n )\n )\n error_types.append(\"NoOf\")\n\n # Check backward optical flow\n if self.back_of_exist and not os.path.exists(self.savePathJoin(\"Back_Of\")):\n errors[\"Info\"].append(\n (\n \"Backward optical flow folder {0} doesn't exist -> Recalculating backward optical flow\".format(\n self.savePathJoin(\"Back_Of\")\n )\n )\n )\n error_types.append(\"NoOf\")\n elif self.back_of_exist:\n back_of_images = len(\n listDirectory(self.savePathJoin(\"Back_Of\"), extension=\"png\")\n )\n if back_of_images != of_images:\n errors[\"Info\"].append(\n (\n \"Backward optical flow image number {0} doesn't match optical flow image number {1} -> Recalculating backward optical flow\".format(\n back_of_images, of_images\n )\n )\n )\n error_types.append(\"NoOf\")\n\n # Check depth estimation\n if self.depth_exist and not os.path.exists(self.savePathJoin(\"Depth\")):\n errors[\"Info\"].append(\n (\n \"Depth folder {0} doesn't exist -> Recalculating depth\".format(\n self.savePathJoin(\"Depth\")\n )\n )\n )\n error_types.append(\"NoDepth\")\n elif self.depth_exist:\n depth_images = len(\n listDirectory(self.savePathJoin(\"Depth\"), extension=\"png\")\n )\n if depth_images != ori_images and ori_images != 0:\n errors[\"Info\"].append(\n (\n \"Depth image number {0} doesn't match video image number {1} -> Recalculating depth\".format(\n depth_images, ori_images\n )\n )\n )\n error_types.append(\"NoDepth\")\n\n # Check ground truth\n if self.gt_exist and not os.path.isfile(self.user[\"GT\"]):\n errors[\"Info\"].append(\n (\n \"Ground Truth file {0} doesn't exist -> File won't be used\".format(\n self.user[\"GT\"]\n )\n )\n )\n error_types.append(\"NoGT\")\n\n # Check super pixel labels\n if (\n self.super_pixel_method != \"\"\n and os.path.exists(\n os.path.join(self.savePathJoin(\"Super_Pixel\"), self.super_pixel_method)\n )\n and ori_images != 0\n and len(\n listDirectory(\n os.path.join(\n self.savePathJoin(\"Super_Pixel\"), self.super_pixel_method\n ),\n extension=\".npy\",\n )\n )\n != ori_images\n ):\n errors[\"Info\"].append(\n (\n \"Super pixel label number {0} doesn't match image number {1} -> Recalculating super pixel labels\".format(\n len(\n listDirectory(\n os.path.join(\n self.savePathJoin(\"Super_Pixel\"),\n self.super_pixel_method,\n ),\n extension=\".npy\",\n )\n ),\n ori_images,\n )\n )\n )\n error_types.append(\"LabelError\")\n\n # Check object detection\n if self.ui.c_object_detection.isChecked() and os.path.exists(\n self.savePathJoin(\"ObjectDetection\")\n ):\n if (\n len(\n listDirectory(\n self.savePathJoin(\"ObjectDetection\"), extension=\".png\"\n )\n )\n != ori_images\n ):\n errors[\"Info\"].append(\n \"Object Detection image number {0} doesn't match image number of video {1} -> Recalculating object detection\".format(\n len(\n listDirectory(\n self.savePathJoin(\"ObjectDetection\"), extension=\".png\"\n )\n ),\n ori_images,\n )\n )\n error_types.append(\"ObDetError\")\n elif (\n len(\n listDirectory(\n self.savePathJoin(\"ObjectDetection\"), extension=\".npy\"\n )\n )\n != ori_images\n ):\n errors[\"Info\"].append(\n \"Object Detection numpy array number {0} doesn't match image number of video {1} -> Recalculating object detection\".format(\n len(\n listDirectory(\n self.savePathJoin(\"ObjectDetection\"), extension=\".npy\"\n )\n ),\n ori_images,\n )\n )\n error_types.append(\"ObDetError\")\n\n answer = \"\"\n if len(errors[\"Info\"]) > 0 and len(errors[\"Critical\"]) == 0:\n msg = QMessageBox()\n msg.setIcon(QMessageBox.Information)\n msg.setText(\n \"Some calculations might not run the way you expect them.\\nIn show details check the right side of the arrows to see what will happen.\"\n )\n msg.setWindowTitle(\"Information\")\n all_info = \"\"\n for info in errors[\"Info\"]:\n all_info += info + \"\\n\\n\"\n msg.setDetailedText(all_info)\n msg.setStandardButtons(QMessageBox.Ok | QMessageBox.Abort)\n answer = msg.exec_()\n elif len(errors[\"Critical\"]) > 0:\n msg = QMessageBox()\n msg.setIcon(QMessageBox.Critical)\n msg.setText(\n \"Found critical error\\nCouldn't start run, see show details for more information\"\n )\n msg.setWindowTitle(\"Critical Error\")\n all_info = \"\"\n for info in errors[\"Critical\"]:\n all_info += info + \"\\n\"\n msg.setDetailedText(all_info)\n msg.setStandardButtons(QMessageBox.Abort)\n answer = msg.exec_()\n\n if answer != int(\"0x00040000\", 16):\n for ty in error_types:\n logging.info(\"Solve error: {0}\".format(ty))\n if ty == \"NoImage\":\n self.img_exist = False\n self.of_exist = False\n self.back_of_exist = False\n self.depth_exist = False\n elif ty == \"NoOf\":\n self.of_exist = False\n self.back_of_exist = False\n elif ty == \"NoDepth\":\n self.depth_exist = False\n elif ty == \"NoGT\":\n self.gt_exist = False\n self.user[\"GT\"] = \"\"\n elif ty == \"LabelError\":\n self.create_super_pixel_label = True\n shutil.rmtree(\n os.path.join(\n self.savePathJoin(\"Super_Pixel\"), self.super_pixel_method\n )\n )\n elif ty == \"ObDetError\":\n self.object_detection_dir_exist = False\n shutil.rmtree(self.savePathJoin(\"ObjectDetection\"))\n\n return answer == int(\"0x00040000\", 16) or stop_calculation", "def load_images(self, filename):\n\n self.images = self.load(filename)\n self.length = len(self.images)\n self.create_teacher()", "def onclick_open_image(self):\n filename = select_file(\n \"Select Image\",\n \"../\",\n \"Image Files (*.jpeg *.jpg *.png *.gif *.bmg)\")\n if filename:\n param_name = select_file(\n \"Select Parameter\", \"../\", \"Parameter Files (*.json)\")\n if param_name:\n self.moildev = Moildev(param_name)\n self.image = read_image(filename)\n self.h, self.w = self.image.shape[:2]\n self.show_to_window()", "def execute_file(self, event=None):\n file_list = self.get_path_list()\n print(file_list)\n if not file_list:\n return\n # merge image\n # 修复内存泄露的bug,由于没有清除之前打开的图片,第二次打开的图片仍然为之前的图片\n try:\n self.photos.destroy()\n except:\n pass\n self.photos.imgs = file_list \n merged_photo = self.photos.merge_photos()\n\n # show image\n try:\n window.destroy()\n except:\n import traceback\n traceback.print_exc()\n window.build_img_canvas()\n window.show_img_in_canvas(merged_photo)", "def load_images(self):\n for image in self.gltf.images:\n self.images.append(image.load(self.path.parent))", "def load_images(self):\n self.img_paths = sorted(glob(self.img_pattern))\n self.imgs = []\n for idx, this_path in enumerate(self.img_paths):\n try:\n this_img = cv2.imread(this_path)\n if self.downscale > 1:\n this_img = cv2.resize(this_img, (0, 0),\n fx=1/float(self.downscale),\n fy=1/float(self.downscale),\n interpolation=cv2.INTER_LINEAR)\n except Exception as e:\n print(\"error loading img: %s\" % (this_path))\n if this_img is not None:\n self.imgs.append(this_img)\n print(\"loaded img %d size=(%d,%d): %s\" %\n (idx, this_img.shape[0], this_img.shape[1], this_path))\n print(\"loaded %d images\" % (len(self.imgs)))", "def checkFiles(self):\n if self.user[\"Save\"] != \"\":\n self.of_exist = os.path.exists(os.path.join(self.user[\"Save\"], \"Of\"))\n self.back_of_exist = os.path.exists(\n os.path.join(self.user[\"Save\"], \"Back_Of\")\n )\n self.img_exist = os.path.exists(os.path.join(self.user[\"Save\"], \"Images\"))\n self.depth_exist = os.path.exists(os.path.join(self.user[\"Save\"], \"Depth\"))\n\n self.object_detection_dir_exist = os.path.exists(\n os.path.join(self.user[\"Save\"], \"ObjectDetection\")\n )\n\n self.gt_exist = self.user[\"GT\"] != \"\"\n\n self.create_super_pixel_label = (\n self.super_pixel_method != \"\"\n and not os.path.exists(\n os.path.join(self.savePathJoin(\"Super_Pixel\"), self.super_pixel_method)\n )\n )\n\n self.ui.c_crash_plot_video.setEnabled(self.ui.c_crash_plot.isChecked())\n self.ui.t_low.setEnabled(not self.ui.c_optimize.isChecked())\n self.ui.t_high.setEnabled(not self.ui.c_optimize.isChecked())\n self.ui.c_optimize.setEnabled(self.gt_exist)\n self.ui.c_error_plot.setEnabled(self.gt_exist)\n self.ui.c_error_plot_video.setEnabled(self.ui.c_error_plot.isChecked())\n self.ui.c_speed_plot_video.setEnabled(self.ui.c_speed_plot.isChecked())\n self.ui.c_super_pixel_video.setEnabled(\n self.ui.combo_superpixel.currentIndex() != 0\n )\n self.ui.c_csv.setEnabled(self.ui.c_error_plot.isChecked())\n\n if self.runRequirements():\n self.ui.b_run.setEnabled(True)\n else:\n self.ui.b_run.setEnabled(False)", "def check_image(df, fname_col, img_dir):\n\n\tfor filename in df[fname_col].values[0:4]:\n\n\t\tif not os.path.isfile(img_dir+filename):\n\t\t\tlogger.error(\"path {} does not exit\".format(img_dir+filename))\n\t\t\tsuccess = False\n\t\telse:\n\t\t\ttry:\n\t\t\t\timg = mpimg.imread(img_dir + filename)\n\t\t\t\tsuccess = True\n\t\t\texcept OSError:\n\t\t\t\tsuccess = False\n\t\t\t\tlogger.error(\"image is {} corrupted/missing\".\n\t\t\t\t\t\t\t\t\t\t\t\t\tformat(filename))\n\t\t\t\t\n\treturn success", "def checkImageDimensions(self, filenames):\n\t\ts = None\n\t\thashStr = filenames[:]\n\t\thashStr.sort()\n\t\thashStr = str(hashStr)\n\t\t# check to see if there's already a result of the check for these filenames in the cache\n\t\tif hashStr in self.dimensionCheck:\n\t\t\tLogging.info(\"Using cached result for dimensions check: %s\"%(str(self.dimensionCheck[hashStr])))\n\t\t\treturn self.dimensionCheck[hashStr]\n\t\t\t\n\t\tfor file in filenames:\n\t\t\tif file not in self.imageDims:\n\t\t\t\tprint \"Trying to open\",type(file)\n\t\t\t\ttry:\n\t\t\t\t\tself.ext = file.split(\".\")[-1].upper()\n\t\t\t\t\tif self.ext == \"TIF\":\n\t\t\t\t\t\tself.ext = \"TIFF\"\n\t\t\t\t\tif self.ext == \"JPG\":\n\t\t\t\t\t\tself.ext = \"JPEG\"\n\n\t\t\t\t\tif self.ext == \"VTI\":\n\t\t\t\t\t\treader = vtk.vtkXMLImageReader()\n\t\t\t\t\telse:\n\t\t\t\t\t\treader = eval(\"vtk.vtk%sReader()\"%self.ext)\n\t\t\t\t\treader.SetFileName(file)\n\t\t\t\t\treader.UpdateInformation()\n\t\t\t\texcept IOError, ex:\n\t\t\t\t\ttraceback.print_exc()\n\t\t\t\t\traise Logging.GUIError(\"Cannot open image file\", \"Cannot open image file %s\" % file)\n\n\t\t\t\textent = reader.GetDataExtent()\n\t\t\t\tfSize = (extent[1],extent[3])\n\t\t\t\tself.imageDims[file] = fSize\n\t\t\telse:\n\t\t\t\tfSize = self.imageDims[file]\n\t\t\tif s and fSize != s:\n\t\t\t\tx0, y0 = s\n\t\t\t\tx1, y1 = fSize\n\t\t\t\tself.dimensionCheck[hashStr] = False\n\t\t\t\treturn 0\n\t\t\ts = fSize \n\t\t\tfn = file\n\t\tself.dimensionCheck[hashStr] = True\n\t\treturn 1", "def load_images(image_filename):\n\n # Write code here to loop over image data and populate DB.", "def gather_images():\n # Import an empty image\n null_img = Image.open('assests/null/null.png')\n null_img = ImageTk.PhotoImage(null_img.resize((100,100), Image.ANTIALIAS))\n\n # Import image and icon for X\n X_img = Image.open('assests/X_Assets/X.png')\n X_icon = ImageTk.PhotoImage(X_img.resize((15, 12), Image.ANTIALIAS))\n X_img = ImageTk.PhotoImage(X_img.resize((95, 80), Image.ANTIALIAS))\n\n # Import horizontally striked X\n X_hor = Image.open('assests/X_Assets/X_hor.png')\n X_hor = ImageTk.PhotoImage(X_hor.resize((95, 80), Image.ANTIALIAS))\n\n # Import vertically striked X\n X_vert = Image.open('assests/X_Assets/X_vert.png')\n X_vert = ImageTk.PhotoImage(X_vert.resize((95, 80), Image.ANTIALIAS))\n\n # Import diagonally strikedX\n X_diag = Image.open('assests/X_Assets/X_diag.png')\n X_diag = ImageTk.PhotoImage(X_diag.resize((95, 80), Image.ANTIALIAS))\n\n # Import another diagonally striked X\n X_diag2 = Image.open('assests/X_Assets/X_diag2.png')\n X_diag2 = ImageTk.PhotoImage(X_diag2.resize((95, 80), Image.ANTIALIAS))\n\n # Import image and icon for O\n O_img = Image.open('assests/O_Assets/O.png')\n O_icon = ImageTk.PhotoImage(O_img.resize((14, 14), Image.ANTIALIAS))\n O_img = ImageTk.PhotoImage(O_img.resize((90, 90), Image.ANTIALIAS))\n\n # Import horizontally striked O\n O_hor = Image.open('assests/O_Assets/O_hor2.png')\n O_hor = ImageTk.PhotoImage(O_hor.resize((90, 90), Image.ANTIALIAS))\n\n # Import vertically striked O\n O_vert = Image.open('assests/O_Assets/O_vert2.png')\n O_vert = ImageTk.PhotoImage(O_vert.resize((90, 90), Image.ANTIALIAS))\n\n # Import diagonally striked O\n O_diag = Image.open('assests/O_Assets/O_diag.png')\n O_diag = ImageTk.PhotoImage(O_diag.resize((90, 90), Image.ANTIALIAS))\n\n # Import another diagonally striked O\n O_diag2 = Image.open('assests/O_Assets/O_diag2.png')\n O_diag2 = ImageTk.PhotoImage(O_diag2.resize((90, 90), Image.ANTIALIAS))\n\n return (null_img, X_icon, X_img, X_hor, X_vert, X_diag, X_diag2, O_icon, O_img, O_hor, O_vert, O_diag, O_diag2)", "def show_files(file_locations):\n for file_loc in file_locations:\n show_image(file_loc)", "def open_files(self):\n if not self.unbalanced:\n if not self.validation:\n datas={}\n for var in self.variables:\n datas[var]=xr.open_dataset(\n f'/{self.dlfile_directory}/{self.climate}_{self.variable_translate(var).lower()}_{self.mask_str}_dldata_traintest.nc')\n return datas\n if self.validation:\n datas={}\n for var in self.variables:\n datas[var]=xr.open_dataset(\n f'/{self.dlfile_directory}/{self.climate}_{self.variable_translate(var).lower()}_{self.mask_str}_dldata_traintest_valid.nc')\n return datas\n if self.unbalanced:\n if not self.validation:\n datas={}\n for var in self.variables:\n datas[var]=xr.open_dataset(\n f'/{self.dlfile_directory}/{self.climate}_{self.variable_translate(var).lower()}_{self.mask_str}_dldata_traintest_unbalanced.nc')\n return datas\n if self.validation:\n datas={}\n for var in self.variables:\n datas[var]=xr.open_dataset(\n f'/{self.dlfile_directory}/{self.climate}_{self.variable_translate(var).lower()}_{self.mask_str}_dldata_traintest_unbalanced_valid.nc')\n return datas", "def test_get_image(self):\n\n spine_data_loader = SpineDataLoader(dirpath_data=self.dirpath,\n batch_size=4)\n\n for idx in range(4):\n image = spine_data_loader.get_image(str(idx))\n assert image.shape == (256, 256, 1)\n assert image.min() == 0.0\n assert image.max() == 1.0\n assert image.dtype == 'float64'", "def test_image():\n def get_images_name(folder):\n \"\"\"Create a generator to list images name at evaluation time\"\"\"\n onlyfiles = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]\n for f in onlyfiles:\n yield f\n\n def pil_loader(path):\n \"\"\"Load images from /eval/ subfolder, convert to greyscale and resized it as squared\"\"\"\n with open(path, 'rb') as f:\n with Image.open(f) as img:\n sqrWidth = np.ceil(np.sqrt(img.size[0]*img.size[1])).astype(int)\n return img.convert('L').resize((sqrWidth, sqrWidth))\n\n eval_loader = torch.utils.data.DataLoader(ImageFolder(root=args.evalf, transform=transforms.Compose([\n transforms.Resize(28),\n transforms.CenterCrop(28),\n transforms.ToTensor(),\n transforms.Normalize((0.1307,), (0.3081,))\n ]), loader=pil_loader), batch_size=1, **kwargs)\n\n # Name generator\n names = get_images_name(os.path.join(args.evalf, \"images\"))\n model.eval()\n with torch.no_grad():\n for data, target in eval_loader:\n data, target = data.to(device), target.to(device)\n output = model(data)\n label = output.argmax(dim=1, keepdim=True).item()\n print (\"Images: \" + next(names) + \", Classified as: \" + str(label))", "def _open_img(self, img_name):\n try:\n img = Image.open(img_name)\n photo = ImageTk.PhotoImage(img)\n return photo\n except IOError:\n Debug.printi(\"Unable to find image \" + img_name, Debug.Level.ERROR)", "def isopen(self):\n return _image.image_isopen(self)", "def copy_image_files(self) -> None:\n if self.images:\n if self.config.epub_fix_images or self.config.epub_max_image_width:\n if not Image:\n logger.warning(__('Pillow not found - copying image files'))\n super().copy_image_files()\n else:\n self.copy_image_files_pil()\n else:\n super().copy_image_files()", "def figure_roi_inspect_all(self):\n for roiNumber in range(len(self.rois)):\n self.figure_roi_inspect(roiNumber,saveAs=\"roi_%02d.png\"%roiNumber)", "def populate_images(self):\n print \"Populating images info...\"\n images = self.get_all_images()\n for i in images:\n\n associated_snapshots = self.get_snapshots_of(i)\n\n self.spreadsheet[i.id] = dict(name=i.name, Name_tag=self.get_name_tag(i), id=i.id,\n KEEP_tag=self.get_keep_tag(i), PROD_tag=self.is_production(i),\n region=i.region.name,\n created=i.creationDate,\n associated_snapshots=associated_snapshots,\n description=i.description)", "def ensure_loaded(self, frames):\n if isinstance(frames, list):\n return [self.ensure_np_array(frame) for frame in frames]\n\n elif isinstance(frames, str):\n return Image.open(frames)\n\n elif isinstance(frames, np.ndarray):\n return Image.fromarray(frames)\n \n return frames", "def check_init_files_and_folders():\n\t#['cascade_wimb_bus_front_100_stages_1000_pos_3000_neg.xml', 'cascade_wimb_bus_front_33_stages_1000_pos_3000_neg_wrong.xml', 'color_detect_2.py', 'dedupe.py', 'detect_image_group_ku.py', 'detect_shape_5.py', 'get_cam_id_2.py', 'get_image_8.py', 'gui_hsv.py', 'knaps.py', 'knapsack_2.py', 'maps.html', 'program_detect_rectangle.zip', 'start_capture.py']\n\tfile_list=[\n\t#'cascade_wimb_bus_front_100_stages_1000_pos_3000_neg.xml', \n\t'models/cascade_wimb_bus_front_33_stages_1000_pos_3000_neg_wrong.xml', \n\t#'color_detect_2.py', \n\t#'dedupe.py', \n\t'detect_bus_haar_group.py', \n\t#'detect_shape_5.py', \n\t'get_cam_detail.py', \n\t'get_image.py', \n\t#'gui_hsv.py', \n\t#'knaps.py', \n\t#'knapsack_2.py', \n\t#'maps.html', \n\t#'program_detect_rectangle.zip', \n\t'start_wimb.py',\n\t'g.php',\n\t]\n\tdirectory_list=[\n\t'images',\n\t'images_bgs',\n\t'images_bgs_mask',\n\t#'images_bgs_result',\n\t'images_color',\n\t'images_haar',\n\t'images_haar_result',\n\t'images_number',\n\t'images_number_result',\n\t'models',\n\t'images_old',\n\t'text_number',\n\t]\n\t\n\tfor file_name in file_list: print 'file '+file_name+' existed: '+str(os.path.isfile(file_name))\n\tfor directory_name in directory_list: \n\t\tprint 'directory '+directory_name+' existed: '+str(os.path.isdir(directory_name))\n\t\tif not os.path.isdir(directory_name): \n\t\t\tos.makedirs(directory_name)\n\t\tif \"images\" in directory_name: shutil.copy(path+'/g.php',path+'/'+directory_name+'/g.php')", "def _iter_images(self):\n for image in self._images:\n yield image", "def test_getImages(self): # GIVEN the group chat has at least one image\n testBot = bot.Bot(os.environ['bot_id'], os.environ['token'], os.environ['group_ID'])\n imageList = testBot.run() #AND THEN post_images calls the private get_images method which returns an array\n self.assertTrue(len(imageList) > 0) #THEN there should be at least one element in the array", "def check_image_size(image_folder_path, height=None, width=None):\n total_img_list = glob.glob(os.path.join(image_folder_path, \"*\"))\n counter = 0\n for image in tqdm(total_img_list, desc=\"Checking in progress\"):\n try:\n img = cv2.imread(image)\n\n # Review Comments:\n #\n # I assume you were trying to initialize width and height\n # if they are not defined by the caller. I have rewritten\n # your code to do this successfully - before you were just\n # comparing the height and width of each image with\n # itself.\n if height is None:\n height = img.shape[1]\n\n if width is None:\n width = img.shape[0]\n\n if not (height == img.shape[1] and width == img.shape[0]):\n counter += 1\n # Review Comments: What exception are you trying to catch here?\n # In general, you should not have a bare except block.\n except:\n print(\"this {} is corrupted\".format(image))\n continue\n return counter", "def __init__(self, data_dir, file_prefix, num_images):\n print(file_prefix)\n self.file_prefix = file_prefix\n self.files = [os.path.join(data_dir, '%s%03d.jpg' % (file_prefix, i + 1)) for i in range(num_images)]\n self.files = list(filter(os.path.exists, self.files))\n self.panoramas = None\n self.homographies = None\n print('found %d images' % len(self.files))", "def main():\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('drive', 'v3', http=http)\n\n i = 0\n total = 0\n nextPageToken=None\n while True:\n results = service.files().list(\n pageSize=30,\n fields=\"nextPageToken, files(id, name, mimeType, modifiedTime)\",\n spaces='photos',\n pageToken=nextPageToken\n ).execute()\n\n items = results.get('files', [])\n nextPageToken = results.get(\"nextPageToken\")\n if not items:\n print('No files found.')\n else:\n for item in items:\n if item['mimeType'].split('/')[0] != 'image':\n continue\n if vcoll.findBySrcId(item['id']) is not None:\n continue\n destination = 'image_tags/validation/' + item['name']\n file_content = get_file_stream(service, item['id'])\n if file_content and image_handler.is_valid_image(file_content):\n file_handler.upload_file_stream(destination, file_content)\n vcoll.insertValidationImage(destination, item['id'], item['modifiedTime'])\n total += 1\n print(\"Downloaded {0} photos\".format(total))\n i += 1", "def test_all_merge(self):\n\n test_folder = os.path.join('test_data', 'merging_tests', 'batch_test')\n # test_folder = base_path + '/test_data/merging_tests/batch_test/'\n results_folder = os.path.join(test_folder, 'results')\n # results_folder = test_folder+\"results/\"\n\n if not os.path.isdir(results_folder):\n os.mkdir(results_folder)\n\n # delete all files in output folder\n for the_file in os.listdir(results_folder):\n file_path = os.path.join(results_folder, the_file)\n if os.path.isfile(file_path):\n os.unlink(file_path)\n\n backgrounds_folder = os.path.join(test_folder, 'backgrounds')\n obj_poses_folder = os.path.join(test_folder, 'object_poses')\n\n mi.generate_for_all_objects(obj_poses_folder, backgrounds_folder, results_folder, adjust_brightness = True)\n self.assertEqual(len(os.listdir(obj_poses_folder)), len(os.listdir(results_folder)))\n\n for the_file in os.listdir(results_folder):\n file_path = os.path.join(results_folder, the_file)\n im = Image.open(file_path)\n self.assertEqual((300,300), im.size)\n self.assertEqual('JPEG', im.format)\n self.assertNotEqual('PNG', im.format)", "def display_images(filenames):\n for filename in filenames:\n display(Image(filename))", "def load_images(filename):\n images = _load(filename)\n #_info_image(image, title=os.path.basename(filename))\n return images", "def load_images(files, open_fn=None):\n if open_fn is None:\n import cv2\n open_fn = cv2.imread\n images = list()\n for _file in files:\n images.append(np.asarray(open_fn(_file)))\n return images", "def run_images_analysis(filepath, ID, method):\n for path in filepath:\n try:\n Image.open(path)\n except IOError:\n msg = 'Please import images files, or just a single zip archive'\n else:\n filename, extension = get_file_name(path)\n\n # Save raw image to database\n msg = client.upload_file(ID, filename, extension, path)\n\n err, msg = check_msg(msg)\n\n if err is False: # if no error in uploading image\n # Request to process image\n client.process_image(ID, filename, method)\n return msg", "def load_images(subdir):\n with perform(\n name='dbutils load_images',\n before='Loading images to gallery',\n fail='Error occured while loading images to gallery',\n after='Images succesfully loaded'\n ):\n load_dummy_images(subdir)", "def test_save_image(self):\n self.roses.save_image()\n image = Images.objects.all()\n self.assertEqual(len(image), 1)", "def image_present_check(self):\r\n if not self.master.images: # If no images present in the list\r\n messagebox.showerror(\"Error\", 'No image selected') # Throw up the error messagebox\r\n\r\n else:\r\n return True # If there are images present in the list, then return True value\r", "def _load_images(self, resolutions=None):\n images = {}\n\n for block, url in self.image_declarations:\n file_name = normalize_filename(url)\n if file_name not in images:\n img_resolutions = {}\n img = Image.open(file_name)\n img_resolutions[1] = img\n width, height = img.size\n\n if resolutions:\n for resolution in resolutions:\n # Get the correct filename for this resolution\n if resolution != 1:\n root, ext = os.path.splitext(file_name)\n res_file_name = '{root}-{resolution}x{ext}'.format(\n root=root, resolution=resolution, ext=ext)\n\n img = Image.open(res_file_name)\n if img.size[0] / resolution != width:\n raise ValueError('Invalid width for {0}'.format(\n res_file_name))\n if img.size[1] / resolution != height:\n raise ValueError('Invalid height for {0}'.format(\n res_file_name))\n img_resolutions[resolution] = img\n\n images[file_name] = img_resolutions\n\n return images", "def _image_set(gt_txt, images_dir):\n with open(gt_txt) as f:\n filename = f.readline().rstrip()\n total = 1\n while filename:\n log.debug(filename)\n image = Image(os.path.join(images_dir, filename), filename)\n face_num = int(f.readline().rstrip())\n\n if face_num == 0:\n log.warning('No faces for {}. Ignoring next line {}'.format(image.filename, f.readline().rstrip()))\n\n log.debug(face_num)\n for _ in range(face_num):\n anno = f.readline().rstrip().split()\n log.debug(anno)\n face = Face(anno)\n if face.is_valid():\n image.add_face(face)\n else:\n log.debug('Skipping INVALID %s from %s', face, image)\n filename = f.readline().rstrip()\n total += 1\n yield image", "def check_image(self, render_count):\n if render_count != 1:\n return\n\n # If it's the first render, autoscale to make sure it lines up\n # properly. See update_image for why this is necessary\n local_plot = self.main_image_dialog.get_plot()\n local_plot.do_autoscale()\n\n # divided by the width of the image 1.0 / 0.4 is a guessed\n # value that seems to provide appropriate balance between\n # startup looks and non-breaking functionality when the\n # image is clicked.\n ratio = 1.0 / 0.4\n local_plot.set_aspect_ratio(ratio, lock=False)\n\n # Change the plot axis to have 0 in the lower left corner\n local_plot.set_axis_limits(0, -85, self.image_height)", "def test_format_files(self):\n shutil.copytree(\"testimages/\", \"testimages_to_format/\")\n os.chdir(\"testimages_to_format\")\n self.vimiv.quit()\n self.init_test([\"arch_001.jpg\"])\n self.vimiv[\"fileextras\"].format_files(\"formatted_\")\n files = [fil for fil in os.listdir() if \"formatted_\" in fil]\n files = sorted(files)\n expected_files = [\"formatted_001.jpg\", \"formatted_002\",\n \"formatted_003.bmp\", \"formatted_004.svg\",\n \"formatted_005.tiff\", \"formatted_006.png\"]\n self.assertEqual(files, expected_files)\n os.chdir(\"..\")\n # Should not work without a path\n self.vimiv.paths = []\n self.vimiv[\"fileextras\"].format_files(\"formatted_\")\n self.check_statusbar(\"INFO: No files in path\")\n # Should not work in library\n self.vimiv[\"library\"].focus(True)\n self.vimiv[\"fileextras\"].format_files(\"formatted_\")\n self.check_statusbar(\"INFO: Format only works on opened image files\")", "def create_image_urls(self):\n self._image_urls = []\n while True:\n image_url = self._create_random_url()\n request = urllib2.Request(image_url)\n opener = urllib2.build_opener(NoRedirection)\n try:\n response = opener.open(request)\n code = response.code\n except urllib2.HTTPError as error:\n code = error.code\n if code == 200:\n print \"Found a successful url!\"\n self._image_urls.append(image_url)\n if len(self._image_urls) > 100:\n break\n print self._image_urls\n image_url_file = open(self._image_urls_file_name, 'w')\n for image_url in self._image_urls:\n image_url_file.write(image_url + '\\n')\n image_url_file.close()", "def run_yolo_indir(images_path):\n for filename in os.listdir(images_path):\n try:\n # print(filename)\n Image.open(os.path.join(images_path, filename))\n test_detector(b'cfg/voc.data', b'cfg/yolo.cfg', b'yolo.weights', os.path.join(\n images_path, filename).encode('utf-8'), parameters.YOLO_THRES, 0.5)\n w, h, o = read_bounding_boxes('bounding_boxes.txt')\n crop_all_bounding_boxes(o, filename, os.path.join, images_path)\n except:\n print('Cannot test image', filename)\n continue", "def check_png_directories(self):\n check_dir_of = Locations.check_dir_of\n check_dir_of(self.HISTO_PNG)\n check_dir_of(self.LABELS_PNG)\n check_dir_of(self.SOURCE_PNG)", "def test_Image():\n assert Image(cur, \"Simple_Linear\").detect_image() == True\n assert Image(cur, \"Logistic_Linear\").detect_image() == False\n assert Image(cur, \"Simple_Linear\").date == \"2021-04-20\"\n assert Image(cur, \"Breslow-Day_Test\").source == \"Course BIOSTAT703 slide\"", "def load_images(self, files, sub_dir):\n\n for f in files:\n self.images.append(Image(f, sub_dir))", "def test_save_image(self):\n\n from m3_save_images.m3_save_images import save_images\n folder_destination_name = \"unittest-sorted-images\"\n path_source = \"../img\"\n image_name = [\"00ff00.png\", \"aqua.png\", \"black.jpg\", \"yellow.png\", \"red2.jpg\", \"green.jpg\"]\n image_color = [\"Lime\", \"Aqua\", \"Black\", \"Yellow\", \"Red\", \"Green\"]\n # new empty folder is needed for testing save_image() function\n if os.path.isdir(folder_destination_name):\n shutil.rmtree(folder_destination_name)\n os.mkdir(folder_destination_name)\n # creating folders\n for i in range(0, 4):\n save_images(folder_destination_name, path_source, image_name[i], image_color[i])\n self.assertEqual(''.join(os.listdir(os.path.join(folder_destination_name, image_color[i]))), image_name[i])\n save_images(folder_destination_name, path_source, image_name[i], image_color[5])\n self.assertNotEqual(''.join(os.listdir(os.path.join(folder_destination_name, image_color[i]))), image_name[5])", "def test_true_images(dl1_file):\n from ctapipe.io.tableloader import TableLoader\n\n with TableLoader(\n dl1_file, load_dl1_parameters=False, load_true_images=True\n ) as table_loader:\n table = table_loader.read_telescope_events([\"MST_MST_NectarCam\"])\n assert \"true_image\" in table.colnames", "def load_images(folder_path, num_images):\n imgs = np.zeros(shape=[num_images, 400, 400, 3])\n for i in range(1, num_images + 1):\n image_name = \"satImage_%.3d\" % i\n image_path = folder_path + image_name + \".png\"\n if os.path.isfile(image_path):\n print('Loading ' + image_path)\n img = mpimg.imread(image_path)\n\n #imgs[i - 1] = np.asarray(img).reshape(400, 400, 3)\n imgs[i - 1] = img.reshape(400, 400, 3)\n else:\n print('File ' + image_path + ' does not exist')\n return imgs", "def _build_docker_images(self):\n print(f\"+ building {len(self.neurodocker_specs)} Docker images\")\n self.docker_status = []\n for sha1, neurodocker_dict in self.neurodocker_specs.items():\n try:\n print(\"++ building image: {}\".format(neurodocker_dict))\n cg.docker_main(\n self.working_dir,\n neurodocker_dict,\n sha1,\n build_context=self.build_context,\n )\n self.docker_status.append(\"docker ok\")\n except Exception as e:\n self.docker_status.append(\n \"failed to build image with SHA1 {}: {}\".format(sha1, e)\n )", "def test_create_image_stream_for_all_namespaces(self):\n pass", "def _check_integrity(self):\n root = self.root\n for scene_name in self.scene_list:\n if not(os.path.isdir(os.path.join(root,scene_name)) and \n os.path.isdir(os.path.join(root,scene_name, images_dir)) and\n os.path.isfile(os.path.join(root,scene_name,annotation_filename))):\n return False\n return True", "def _check_integrity(self):\n root = self.root\n for scene_name in self.scene_list:\n if not(os.path.isdir(os.path.join(root,scene_name)) and \n os.path.isdir(os.path.join(root,scene_name, images_dir)) and\n os.path.isfile(os.path.join(root,scene_name,annotation_filename))):\n return False\n return True", "def load_test_images(images):\n loaded = {}\n for description, _ in images.items():\n loaded[description] = load_from_netcdf(description)\n return loaded", "def loadImages(self):\n for map_name, img in self.maps.items():\n if img is None or map_name not in __class__.input_tr:\n continue\n getCyclesImage(img)", "def validate(model: str = \"hog\"):\n for filepath in Path(\"validation\").rglob(\"*\"):\n if filepath.is_file():\n recognize_faces(\n image_location=str(filepath.absolute()), model=model\n )" ]
[ "0.6742113", "0.62553185", "0.6229397", "0.6169011", "0.61681324", "0.6136686", "0.60898274", "0.60487515", "0.5997627", "0.5935533", "0.58759665", "0.58210576", "0.5768145", "0.5766127", "0.5718156", "0.5691568", "0.5679888", "0.56696707", "0.5667723", "0.5602127", "0.5572963", "0.55521595", "0.5550424", "0.55474514", "0.55456865", "0.5536247", "0.55299145", "0.55299145", "0.55299145", "0.552102", "0.5511544", "0.5501667", "0.54940504", "0.54936814", "0.5478643", "0.54582906", "0.5453629", "0.54529196", "0.54468465", "0.54442614", "0.54364854", "0.5431571", "0.5431317", "0.5425116", "0.5413462", "0.5400883", "0.53911185", "0.5390786", "0.53883815", "0.5375269", "0.53710663", "0.5355848", "0.535279", "0.5351557", "0.53474146", "0.5343914", "0.53411406", "0.5321712", "0.529719", "0.52958816", "0.52950454", "0.52936053", "0.52840996", "0.5280306", "0.527063", "0.526756", "0.5267265", "0.5263638", "0.5257966", "0.5247375", "0.5247067", "0.52469003", "0.5246258", "0.5244286", "0.5240992", "0.5240249", "0.5232069", "0.521888", "0.52099115", "0.5198868", "0.5196483", "0.51954997", "0.5194371", "0.51915574", "0.51874703", "0.5186194", "0.51823044", "0.51794827", "0.5178933", "0.5174295", "0.5170094", "0.5169241", "0.51683795", "0.51676947", "0.51672626", "0.51662177", "0.51662177", "0.5157771", "0.51544124", "0.5152705" ]
0.663587
1
for a given template and list of extensions, find every file related to that template which has one of the extensions.
def find_template_companion_files(template: Path, extensions: Iterable[str], recurse_up_to: Path = None) -> Set[Path]: files_to_check = [] # Get a list of all file names to look for in each folder data_file_names = [] basename = template.name.split('.')[0] for i in range(len(template.suffixes)): ext = ''.join(template.suffixes[:i+1]) for data_file_ext in extensions: data_file_names.append(Path(basename + ext).with_suffix(data_file_ext)) # Look for those files in the template's current folder (a.k.a. parent directory) files_to_check.extend([template.parent / file_name for file_name in data_file_names]) if recurse_up_to and recurse_up_to in template.parents: # Look for those files in every parent directory up to `recurse_up_to`, # excluding the template's parent directory which has already been checked relative_path = template.parent.relative_to(recurse_up_to) for folder in relative_path.parents: for file in data_file_names: files_to_check.append(recurse_up_to / folder / file) return set([file for file in files_to_check if file.is_file()])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def filter_files_by_extension(\n files: list ,\n extensions: list\n):\n filtered_files = []\n for file in files:\n file_ext = os.path.splitext(file)[-1].lower()\n file_ext = _remove_dot_from_extension(file_ext)\n for extension in extensions:\n ext = _remove_dot_from_extension(extension).lower()\n # print(\"ext \\n\", ext)\n # print(\"file_ext \\n\", file_ext)\n if file_ext == ext:\n filtered_files.append(file)\n\n return filtered_files\n ...", "def find_template_files(dir_name):\n list_files = []\n for dirName, subdirList, fileList in os.walk(dir_name):\n # Construct file path relative to the dir_name.\n for file_name in fileList:\n fp = os.path.join(dirName, file_name)\n r = re.compile(\".+\\.template$\")\n if r.match(fp): # if the file is a .template...\n # Save the template file for later.\n print_debug(\"Found template file {}\".format(fp))\n list_files.append(fp)\n return list_files", "def find_files(extensions):\n\n return [fname for fname in os.listdir('.') if fname.endswith(extensions)]", "def _get_contents_by_ext(self, collection):\n contents_by_ext = defaultdict(list)\n collection_dir = os.path.join(self.root_dir, collection)\n for name in sorted(os.listdir(collection_dir)):\n path = os.path.join(collection_dir, name)\n if os.path.isfile(path):\n root, ext = os.path.splitext(name)\n contents_by_ext[ext].append(root)\n return contents_by_ext", "def list_templates(\n self,\n extensions: t.Optional[t.Collection[str]] = None,\n filter_func: t.Optional[t.Callable[[str], bool]] = None,\n ) -> t.List[str]:\n assert self.loader is not None, \"No loader configured.\"\n names = self.loader.list_templates()\n\n if extensions is not None:\n if filter_func is not None:\n raise TypeError(\n \"either extensions or filter_func can be passed, but not both\"\n )\n\n def filter_func(x: str) -> bool:\n return \".\" in x and x.rsplit(\".\", 1)[1] in extensions # type: ignore\n\n if filter_func is not None:\n names = [name for name in names if filter_func(name)]\n\n return names", "def select_files_with_ext(file_resources: Dict[str, str], ext: str, contains: Optional[str] = None) -> Dict[str, str]:\n subset_file_resources = {}\n for filename, filepath in file_resources.items():\n if not isinstance(filepath, str): continue\n if filename.endswith(ext) and (contains is None or contains in filename):\n subset_file_resources[filename] = filepath\n\n return subset_file_resources", "def get_templates(template_folder, search_term=''):\n return [template for template in os.listdir(template_folder)\n if search_term in template]", "def test_get_filenames_by_ext():\n tmpdir = os.path.join(tempfile.gettempdir(), \"jade-test-tmp87alkj8ew\")\n os.makedirs(tmpdir, exist_ok=True)\n\n data = {\"A\": 1, \"B\": 2}\n json_file = os.path.join(tmpdir, \"a.json\")\n dump_data(data, json_file)\n\n toml_file = os.path.join(tmpdir, \"b.toml\")\n dump_data(data, toml_file)\n\n filenames = get_filenames_by_ext(tmpdir, \".json\")\n assert \"a.json\" in next(filenames)\n\n filenames = get_filenames_by_ext(tmpdir, \".toml\")\n assert \"b.toml\" in next(filenames)", "def find_files_by_extensions(cls, search_path, allowed_ext):\n file_list = []\n for root, dirnames, filenames in os.walk(search_path):\n for filename in filenames:\n name, extension = os.path.splitext(filename)\n if extension in allowed_ext:\n file_list.append(os.path.join(root, filename))\n\n return file_list", "def _index(search_path, ext=None):\n\n if ext is None:\n ext = \"TCASE\"\n\n tcases = set([])\n for _, _, files in os.walk(search_path):\n for tc_fname in files:\n if os.path.splitext(tc_fname)[-1] in EXTS[ext]:\n tcases.add(tc_fname)\n\n return tcases", "def get_templates(templates_path_pattern):\n templates_paths = glob.glob(templates_path_pattern)\n cars = []\n notcars = []\n for template_path in templates_paths:\n if 'non-vehicles' in template_path:\n notcars.append(template_path)\n else:\n cars.append(template_path)\n return cars, notcars", "def _get_files(p, fs, extensions=None):\n p = Path(p)\n res = [\n p / f\n for f in fs\n if not f.startswith(\".\")\n and ((not extensions) or f'.{f.split(\".\")[-1].lower()}' in extensions)\n ]\n return res", "def collect_files_with_extensions(self, extension: str) -> List[str]:\n occurrences = []\n for position in os.listdir(self.directory):\n if os.path.isdir(position):\n for file in os.listdir(position):\n if os.path.isfile(os.path.join(position, file)) and file.endswith(\n extension\n ):\n occurrences.append(os.path.join(self.directory, position, file))\n return occurrences", "def extension_templates(self) -> List[str]:\n default = [self.extension_file(), \"mako\"]\n return self.options.get(\"extensions\").get(\"templates\", default)", "def get_template_files(fs, template_type):\n # no template fitting for null runs\n if fs[\"null_run\"]:\n template_type = None\n\n if \"template_type\" in fs:\n if template_type == fs[\"template_type\"]:\n return\n\n fs[\"template_type\"] = template_type\n\n # find all corresponding foreground templates\n if template_type is None:\n fs[\"template_root\"] = None\n fs[\"template_root2\"] = None\n fs[\"template_files\"] = None\n fs[\"template_files2\"] = None\n fs[\"template_noise_root\"] = None\n fs[\"template_noise_root2\"] = None\n fs[\"template_noise_files\"] = None\n fs[\"template_noise_files2\"] = None\n fs[\"num_template\"] = 0\n fs[\"num_template_noise\"] = 0\n else:\n num_template_noise = None\n for hm in [\"1\", \"2\"]:\n suff = \"\" if hm == \"1\" else \"2\"\n troot = os.path.join(\n fs[\"data_root\"],\n \"templates_{}\".format(template_type),\n \"halfmission-{}\".format(hm),\n )\n ### this block is so sims with template type like\n # 353_100_gauss_003 can use ensemble in 353_100_gauss\n tp = template_type.split(\"_\")\n ttype = template_type\n if tp[-1].isdigit():\n if ttype[-7:] not in [\"353_100\", \"217_100\"]:\n ttype = \"_\".join(tp[:-1])\n\n tnroot = os.path.join(\n fs[\"data_root\"],\n \"templates_noise_{}\".format(ttype),\n \"halfmission-{}\".format(hm),\n )\n\n tfiles = []\n tnfiles = []\n for f in fs[\"map_files\"]:\n nfile = f.replace(fs[\"map_root\"], troot)\n if not os.path.exists(nfile):\n raise OSError(\"Missing hm-{} template for {}\".format(hm, f))\n tfiles.append(nfile)\n nfiles = sorted(\n glob.glob(\n f.replace(fs[\"map_root\"], tnroot).replace(\n \".fits\", \"_*.fits\"\n )\n )\n )\n if not len(nfiles):\n raise OSError(\n \"Missing hm-{} template noise for {}\".format(hm, f)\n )\n tnfiles.append(nfiles)\n if num_template_noise is not None:\n if len(nfiles) != num_template_noise:\n raise OSError(\n \"Wrong number of template noise sims. \"\n \"Found {} files, expected {}.\".format(\n len(nfiles), num_template_noise\n )\n )\n\n num_template_noise = len(nfiles)\n\n tfiles = np.asarray(tfiles)\n tnfiles = np.asarray(tnfiles)\n fs[\"template_root{}\".format(suff)] = troot\n fs[\"template_files{}\".format(suff)] = tfiles\n fs[\"template_noise_root{}\".format(suff)] = tnroot\n fs[\"template_noise_files{}\".format(suff)] = tnfiles\n\n fs[\"num_template\"] = len(fs[\"template_files\"])\n fs[\"num_template_noise\"] = num_template_noise\n self.log(\n \"Found {} templates in {}\".format(\n fs[\"num_template\"], fs[\"template_root\"]\n ),\n \"info\",\n )\n self.log(\n \"Found {} template noise files in {}\".format(\n fs[\"num_template_noise\"], fs[\"template_noise_root\"]\n ),\n \"info\",\n )\n self.log(\"Template files: {}\".format(fs[\"template_files\"]), \"debug\")\n\n fields = [\n \"template_type\",\n \"template_root\",\n \"template_root2\",\n \"template_files\",\n \"template_files2\",\n \"template_noise_root\",\n \"template_noise_root2\",\n \"template_noise_files\",\n \"template_noise_files2\",\n \"num_template\",\n \"num_template_noise\",\n ]\n for k in fields:\n setattr(self, k, fs[k])", "def list_templates(extensions: Optional[List[str]] = None) -> List[str]:\n if environment is None or not hasattr(environment, 'loader'):\n return []\n return environment.list_templates(extensions=extensions)", "def get_files_with_extension(self, extension=sys.argv[1]) -> list:\n if extension == \"\":\n raise EnvironmentError(\"No extension provided!\")\n\n result = []\n for idx, file in enumerate(self.file_list):\n if re.search(extension + \"$\", file):\n result.append(file)\n\n if len(result) == 0:\n raise Exception(\"No {} files found.\".format(extension))\n\n return result", "def searchfiles(directory, filenames, ext=None):\n if ext:\n filenames = [f'{file}{ext}' for file in filenames]\n return [\n file for file in Path(directory).glob('*')\n if file.name in filenames\n ]", "def get_matched_extensions(request):\n\n def _match(e):\n return e.obj if e.obj.matches(request) else None\n\n result = EXTENSION_MANAGER.map(_match)\n return filter(bool, result)", "def find_template_filename(self, template_name):\n\n def next_file():\n filename = self.path / template_name\n yield filename\n try:\n exts = self.default_file_extensions\n except AttributeError:\n return\n\n strfilename = str(filename)\n for ext in exts:\n yield Path(strfilename + ext)\n\n for filename in next_file():\n if filename.is_file():\n return filename", "def get_files(path, extension):\n extension = listify(extension)\n return [p for p in path.ls() if p.suffix in extension and \"(\" not in p.stem]", "def get_preferable_files(project, input_template):\n preferable_files = PreferableFile.objects.filter(\n input_template=input_template\n )\n files = []\n if len(preferable_files) > 0:\n for file in project.files:\n if PreferableFile.match_any(file.filename, preferable_files):\n files.append(file)\n if len(files) > 0:\n return files\n return []", "def search_data(templates, pols, matched_pols=False, reverse_nesting=False, flatten=False):\n # type check\n if isinstance(templates, str):\n templates = [templates]\n if isinstance(pols, (str, int)):\n pols = [pols]\n # search for datafiles\n datafiles = []\n datapols = []\n for pol in pols:\n dps = []\n dfs = []\n for template in templates:\n _dfs = glob.glob(template.format(pol=pol))\n if len(_dfs) > 0:\n dfs.extend(_dfs)\n dps.extend([pol for df in _dfs])\n if len(dfs) > 0:\n datafiles.append(sorted(dfs))\n datapols.append(dps)\n # get unique files\n allfiles = [item for sublist in datafiles for item in sublist]\n allpols = [item for sublist in datapols for item in sublist]\n unique_files = set()\n for _file in allfiles:\n for pol in pols:\n if f\".{pol}.\" in _file:\n unique_files.update({_file.replace(f\".{pol}.\", \".{pol}.\")})\n break\n unique_files = sorted(unique_files)\n # check for unique files with all pols\n if matched_pols:\n Npols = len(pols)\n _templates = []\n for _file in unique_files:\n goodfile = True\n for pol in pols:\n if _file.format(pol=pol) not in allfiles:\n goodfile = False\n if goodfile:\n _templates.append(_file)\n\n # achieve goal by calling search_data with new _templates that are polarization matched\n datafiles, datapols = search_data(_templates, pols, matched_pols=False, reverse_nesting=False)\n # reverse nesting if desired\n if reverse_nesting:\n datafiles = []\n datapols = []\n for _file in unique_files:\n dfs = []\n dps = []\n for pol in pols:\n df = _file.format(pol=pol)\n if df in allfiles:\n dfs.append(df)\n dps.append(pol)\n datafiles.append(dfs)\n datapols.append(dps)\n # flatten\n if flatten:\n datafiles = [item for sublist in datafiles for item in sublist]\n datapols = [item for sublist in datapols for item in sublist]\n\n return datafiles, datapols", "def find_files(directory, extensions):\n res = set()\n for filename in os.listdir(directory):\n if filename.endswith(extensions):\n res.add(\"{}/{}\".format(directory, filename))\n return list(res)", "def find_files(path='', ext='', level=None, typ=list, dirs=False, files=True, verbosity=0):\n path = expand_path(path)\n gen = generate_files(path, ext=ext, level=level, dirs=dirs, files=files, verbosity=verbosity)\n if isinstance(typ(), Mapping):\n return typ((ff['path'], ff) for ff in gen)\n elif typ is not None:\n return typ(gen)\n else:\n return gen", "def get_files_from_of_type(path: str, ext: str) -> List[str]:\n files = []\n for root, dirnames, filenames in os.walk(path):\n for filename in fnmatch.filter(filenames, \"*.\" + str(ext)):\n files.append(os.path.join(root, filename))\n if not files:\n logging.error(\"No language files found in folder: \" + str(os.sep.join([convert_vars.BASE_PATH, \"source\"])))\n logging.debug(f\" --- found {len(files)} files of type {ext}. Showing first few:\\n* \" + str(\"\\n* \".join(files[:3])))\n return files", "def search_extension(path, ext):\n output = []\n for root, dirs, files in os.walk(path, topdown=True):\n for file in files:\n if file.endswith(ext):\n path = os.path.join(root, file)\n output.append(path)\n\n return output", "def list_extensions():\n formats = FileFormat.list_formats()\n return render_template('home.html', formats=formats)", "def _readFiles(self):\n template_files = []\n for file in os.listdir(self.template_folder):\n if file.endswith(\".xml\"):\n template_files.append(file)\n return template_files", "def get_files(template_path, resource_type, skip_customer_resources=False):\n try:\n json_files = []\n for file_path in find_files(template_path, '*.json'):\n folder_list = list(file_path.split(\"/\"))\n if not ('/customer/' in file_path.lower() and skip_customer_resources):\n folder_name = list(folder_list[-1].split('.'))\n file_name = folder_name[-2]\n if resource_type in ['template', 'script', 'policy']:\n if '_ignore' not in file_path and not file_name.startswith('Blueprint_'):\n if folder_list[-2] == file_name:\n json_files.append(file_path)\n else:\n if file_name.startswith('Blueprint_'):\n folder_name = \"Blueprint_%s\" % folder_list[-2]\n if folder_name == file_name:\n json_files.append(file_path)\n else:\n json_file = open(file_path, 'r')\n content = json.dumps(json_file.read()).encode('utf-8')\n json_file.close()\n content = json.loads(content)\n blueprint_details = json.loads(content)\n bp_name = \"Blueprint_%s\" % blueprint_details.get('name')\n if bp_name == file_name:\n json_files.append(file_path)\n return json_files\n except Exception as e:\n sys.stderr.write(e.message)\n exit(1)", "def _glob_files(directories, extensions):\n pwd = Path(__file__).resolve().parent\n open3d_root_dir = pwd.parent\n\n file_paths = []\n for directory in directories:\n directory = open3d_root_dir / directory\n for extension in extensions:\n extension_regex = \"*.\" + extension\n file_paths.extend(directory.rglob(extension_regex))\n file_paths = [str(file_path) for file_path in file_paths]\n file_paths = sorted(list(set(file_paths)))\n return file_paths", "def _search_files(self,\n timestamp,\n custom_templ=None,\n str_param=None,\n custom_datetime_format=None):\n if custom_templ is not None:\n raise NotImplementedError\n else:\n fname_templ = self.fname_templ\n\n if custom_datetime_format is not None:\n dFormat = {self.dtime_placeholder: custom_datetime_format}\n\n else:\n dFormat = {self.dtime_placeholder: self.datetime_format}\n\n sub_path = ''\n if self.subpath_templ is not None:\n for s in self.subpath_templ:\n sub_path = os.path.join(sub_path, timestamp.strftime(s))\n\n fname_templ = fname_templ.format(**dFormat)\n\n if str_param is not None:\n fname_templ = fname_templ.format(**str_param)\n\n search_file = os.path.join(self.path, sub_path,\n timestamp.strftime(fname_templ))\n\n if self.exact_templ:\n raise NotImplementedError\n else:\n filename = glob.glob(search_file)\n if len(filename) > 1:\n for templ in self.fn_templ_priority:\n fname_templ = templ.format(**dFormat)\n if str_param is not None:\n fname_templ = fname_templ.format(**str_param)\n search_file = os.path.join(self.path, sub_path,\n timestamp.strftime(fname_templ))\n filename = glob.glob(search_file)\n if len(filename) == 1:\n break\n\n if not filename:\n filename = []\n\n return filename", "def test_19_file_extensions_within_type(self):\n print (self.test_19_file_extensions_within_type.__doc__)\n\n stats_maker = StatsMakerFiles()\n r = stats_maker.view_file_extensions_within_type(file_type=FILE_TYPE_OCTET_STREAM)\n\n num_unique_extensions = r.result_data.get('number_unique_extensions')\n\n # check number of extensions\n #\n self.assertEqual(num_unique_extensions, 67)\n\n # check that list length matches number of extensions\n #\n ext_counts = r.result_data.get('records', [])\n self.assertEqual(len(ext_counts), 67)\n\n print ('ext_counts', ext_counts[4])\n # check 5th listing in extension count list\n #\n listing_5 = OrderedDict([('extension', u'.docx'), ('count', 15), ('total_count', 437), ('percent_string', '3.432%')])\n\n self.assertEqual(listing_5, ext_counts[4])", "def list_type_in_dir(path, extension):\n path, extension = check_args(path, extension)\n files = os.listdir(path)\n file_list = [os.path.join(path, f)\n for f in fnmatch.filter(files, '*' + extension)]\n\n return file_list", "def filter_target_extensions(self, files_dict):\n files_filtered = defaultdict(list)\n supported_formats = self.sox_get_supported_formats()\n logging.info('Filtering audio files ...')\n paths = list(files_dict.keys())\n\n for path in paths:\n if not path.endswith('letmehear'):\n files = sorted(files_dict[path])\n for f in files:\n if os.path.splitext(f)[1].lstrip('.').lower() in supported_formats:\n files_filtered[path].append(f)\n return files_filtered", "def extract_resource_extensions(resources):\r\n\r\n if resources:\r\n for resource in resources:\r\n _, ext = os.path.splitext(resource)\r\n yield ext", "def get_files_patterns(m_type, pattern, wdir, Ldir=False, Linverse=False, Lparents=None):\n Lshow = False\n matched_files=[]\n ### Codes for prefix\n dir_files = os.listdir(wdir)\n i=0\n for fname in dir_files:\n for patt in pattern:\n if m_type == 'p':\n if re.match(patt, fname):\n if Ldir or not os.path.isdir(fname):\n matched_files.append(fname)\n #print (patt, fname)\n ### for suffix\n elif m_type == 's':\n if fname.endswith(patt):\n #if not Linverse:\n if Ldir or not os.path.isdir(fname):\n matched_files.append(fname)\n ### included parents and directories\n if Lparents:\n #relative_files = get_relatives_suff(fname, dir_files)\n fnlist = re.split('\\.', fname)\n if os.path.isdir(fnlist[0]):\n rel_dir = fnlist[0]\n print(f\"{i:02d}: relative files {rel_dir}\")\n matched_files.append(rel_dir)\n i += 1\n ### for search\n elif m_type == 'm':\n if re.search(patt, fname):\n ### if it is dir skip\n if Ldir or not os.path.isdir(fname):\n matched_files.append(fname)\n if Lshow:\n print(f\"detect {fname}\") # in {match} {matches}\")\n \n #elif Linverse:\n # if not os.path.isdir(fname):\n # matched_files.append(fname)\n return matched_files", "def get_doc_files(extensions=MARKDOWN_EXTENSIONS + STATIC_ASSET_EXTENSIONS):\n file_list = []\n # doc files on toplevel\n for ext in extensions:\n file_list += config[\"topdir\"].glob('*' + ext)\n # doc files in include dirs\n for incdir in config['incdirs']:\n for ext in extensions:\n file_list += config[\"topdir\"].joinpath(incdir).rglob('*' + ext)\n return file_list", "def SearchFileType(ext, message0 = \"\", message1 = \"\", message2 = \"\"):\n extList = glob.glob('*'+ext)\n ChooseNumOption(extList, \"file\", ext, message0, message1, message2, True)", "def child_containing_matching_f(root, ext=('opus', 'ogg', 'mp3',\n'flac'),not_ext=('!qB',)):\n # only compressed files\n candidates = []\n for x in ext:\n candidates.extend(c.parent for c in root.rglob('*.' + x)\n if c.parent.name != '+')\n for x in not_ext:\n candidates = [c for c in candidates if not list(c.rglob('*.' + x))]\n candidates = list(set(candidates))\n if candidates:\n return random.choice(candidates)", "def search(self):\n files = os.listdir(self.filePath)\n txt_file = []\n for f in files:\n f_ext = f.split('.')[-1]\n if f_ext == self.flag:\n if self.flag == 'txt':\n txt_file.append(FileTxt(os.sep.join([self.filePath, f])))\n\n if self.flag == 'csv':\n txt_file.append(FileCsv(os.sep.join([self.filePath, f])))\n\n return txt_file", "def find(self, path, all=False):\n matches = []\n if path in self.sources:\n for match in self.loader.get_template_sources(path):\n if not all:\n return match\n matches.append(match)\n return matches", "def list_all_exts(top_path, exts):\n if not top_path.endswith('/'):\n top_path += '/'\n ext_list = []\n for extension in exts:\n if not extension.startswith('.'):\n extension = '.' + extension\n ext_list.append(extension.lower())\n file_list = []\n for dirpath, dirnames, filenames in os.walk(top_path):\n for filename in filenames:\n if os.path.splitext(filename)[1].lower() in ext_list:\n file_list.append(os.path.join(dirpath, filename))\n return file_list", "def find_files(path, extension):\n matches = []\n for root, dirnames, filenames in os.walk(path):\n for filename in fnmatch.filter(filenames, extension):\n matches.append(os.path.join(root, filename))\n return matches", "def list_a_file_type(path, extension):\n path, extension = check_args(path, extension)\n file_list = [os.path.join(dirpath, f)\n for dirpath, dirnames, files in os.walk(path)\n for f in fnmatch.filter(files, '*' + extension)]\n\n return file_list", "def elastixTemplates():\n\t\ttransformations = []\n\t\tfileNames = os.listdir(AppVars.transformationsPath())\n\t\tfor fileName in fileNames:\n\t\t\tfullFileName = os.path.join(AppVars.transformationsPath(), fileName)\n\t\t\ttransformation = ParameterList()\n\t\t\tif transformation.loadFromFile(fullFileName):\n\t\t\t\ttransformations.append(transformation)\n\t\treturn transformations", "def glob_ext_files(dirname, ext=\"fa\") -> list:\n fnames = glob(os.path.join(dirname, f\"*.{ext}*\"))\n return [f for f in fnames if f.endswith((ext, f\"{ext}.gz\"))]", "def list_files(directory, extension):\n file_list = listdir(directory)\n included_list = []\n for f in file_list:\n for ext in extension:\n if f.endswith('.' + ext):\n included_list.append(f)\n break\n return included_list", "def read_template_files(self, template_path):\n templates = dict()\n for file in listdir(template_path):\n template_file = re.search(r\"(.*?).html$\", file)\n if template_file:\n template_name = template_file.groups()[0]\n templates[template_name] = open(os.path.join(template_path, file)).read()\n return templates", "def find_files():\n \n p = re.compile(REGEX_PART_NUMBER)\n job_files = []\n \n for root, dirs, files in os.walk(project_path): # r at start of string need to prevent unicode error\n for filename in files:\n re_part_number = p.match(filename)\n if re_part_number:\n file_ext = filename.split(\".\")[-1].lower() # extract file extension \n file_size = os.path.getsize((os.path.join(root, filename))) # filesize in bytes \n \n part_number = re_part_number.group() # extract part number from regular expression match\n part_code = part_number.split(\"-\")[0]\n \n destinations = [] # destinations is a list in case a filetype is both a source and output filetype\n \n if (file_ext in EXTS_SOURCE_FILES) and flag_find_source_files:\n destinations.append(os.path.join(target_source_path,part_code,part_number)) \n \n if (file_ext in EXTS_OUTPUT_FILES) and flag_find_output_files:\n destinations.append(os.path.join(target_source_path,part_code,part_number)) \n \n if destinations: \n job_files.append(File(filename,root,file_size,destinations,part_number,part_code))\n print(f\"Found: {filename}\")\n \n return job_files", "def find_files(directory, extension, magictext):\n global files_logged\n global found_text\n dir_path = os.path.abspath(directory)\n dir_files = os.listdir(dir_path)\n for file in dir_files:\n if file.endswith(extension) and file not in files_logged:\n logger.info('New file found: {}'.format(file))\n files_logged.append(file)\n if file.endswith(extension):\n file_path = os.path.join(dir_path, file)\n if find_string_in_files(file_path, magictext):\n break\n for file in files_logged:\n if file not in dir_files:\n logger.info('File deleted: {}'.format(file))\n files_logged.remove(file)\n found_text[file] = 0", "def get_makefile_dependencies(self, template: Union[Path, str]) -> List[Path]:\n if isinstance(template, Path):\n template = template.read_text()\n dependencies = self.variable_files + self.yasha_extensions_files\n referenced_template_partials = find_referenced_templates(self.env.parse(template)) # returns a generator\n # convert the generator to a list, filtering out the None values\n referenced_template_partials: List[str] = list(filter(bool, referenced_template_partials))\n\n for relative_path in referenced_template_partials:\n for basepath in self.env.loader.searchpath: # type: ignore\n if not isinstance(basepath, Path): basepath = Path(basepath)\n template_path = basepath / relative_path\n if template_path.is_file:\n # we've found the template partial inside this basepath\n dependencies.append(template_path)\n return dependencies", "def get_gti_extensions_from_pattern(lchdulist, name_pattern=\"GTI\"):\n hdunames = [h.name for h in lchdulist]\n pattern_re = re.compile(\"^\" + name_pattern + \"$\")\n gtiextn = []\n for ix, extname in enumerate(hdunames):\n if pattern_re.match(extname):\n gtiextn.append(ix)\n return gtiextn", "def find_matching_images(template, directory):\n\n files = os.listdir(directory)\n\n # to turn the template to a regular expression want to replace\n # however many #'s with EXACTLY the same number of [0-9] tokens,\n # e.g. ### -> ([0-9]{3})\n\n # change 30/may/2008 - now escape the template in this search to cope with\n # file templates with special characters in them, such as \"+\" -\n # fix to a problem reported by Joel B.\n\n length = template.count(\"#\")\n regexp_text = re.escape(template).replace(\"\\\\#\" * length, \"([0-9]{%d})\" % length)\n regexp = re.compile(regexp_text)\n\n images = []\n\n for f in files:\n match = regexp.match(f)\n\n if match:\n images.append(int(match.group(1)))\n\n images.sort()\n\n return images", "def filesearch(word=\"\"):\n logger.info('Starting filesearch')\n file = []\n for f in glob.glob(\"*\"):\n if word[0] == \".\":\n if f.endswith(word):\n file.append(f)\n\n elif word in f:\n file.append(f)\n #return file\n logger.debug(file)\n return file", "def search(filename):\n\n template, directory = scan_helper_image_files.image_to_template_directory(\n filename\n )\n\n indices = scan_helper_image_files.template_directory_to_indices(\n template, directory\n )\n\n return [\n scan_helper_image_files.template_directory_index_to_image(\n template, directory, index\n )\n for index in indices\n ]", "def file_list(files_dict: dict):\r\n files_to_transfer = []\r\n for file_base, ext_dict in files_dict.items():\r\n for file_ext in ext_dict.keys():\r\n if ext_dict[file_ext]:\r\n full_filename = file_base + file_ext\r\n files_to_transfer.append(full_filename)\r\n return files_to_transfer", "def filter_ext(exts=[]):\n\n def decorator(function):\n\n def wrapper(*args, **kwargs):\n\n files = function(*args, **kwargs)\n return [file for file in files if file.split('.')[-1] in exts]\n\n return wrapper\n\n return decorator", "def get_file_extensions():\n my_files_ext = []\n for file in os.listdir(os.getcwd()):\n if os.path.isfile(file):\n file_info = os.path.splitext(file)\n file_ext = file_info[1]\n my_files_ext.append(file_ext)\n return [file for file in my_files_ext]", "def getFiles(searchDir = './', extension = 'source'):\n from glob import glob \n\n return glob(searchDir+'/*.'+extension)", "def get_all_files(directory, extension):\n return (f for f in os.listdir(directory) if f.endswith(extension) and os.path.isfile(os.path.join(directory, f)))", "def gen_find(filepat, top):\n for path, dir_list, file_list in os.walk(top):\n for name in fnmatch.filter(file_list, filepat):\n yield os.path.join(path, name)", "def find_file(directory, extensions):\n for filename in os.listdir(directory):\n if filename.endswith(extensions):\n return \"{}/{}\".format(directory, filename)\n return None", "def _generate_src():\n for ext in extensions:\n yield self.src_format[ext](f=\"{}{}\".format(name, ext))", "def dir_search(ext, file_path='./'):\n try:\n return [file_path + \"/\" + file for file in os.listdir(file_path) if file.endswith(ext)]\n except OSError as e:\n logger.error(e)\n return []\n except Exception as e:\n logger.error(e)\n return []", "def filter_files(files, lang=EXTES):\n\n lang_specific = []\n\n for f in files:\n if f.endswith(lang):\n lang_specific.append(f)\n return lang_specific", "def getFiles(directory, showName, extension):\n os.chdir(directory)\n \n list = []\n \n for file in glob.glob(\"*\"+showName+\"*.\"+extension):\n list.append(file)\n \n return list", "def test_extensions(self):\n dummy_folder = TestOspaListDir.get_dummy_folder()\n need_result = []\n for i in range(1, 4):\n need_result.append(os.path.join(dummy_folder, 'memes', 'meme monty python', 'meme{}.jpg'.format(i)))\n need_result.append(os.path.join(dummy_folder, 'memes', 'meme1.jpg'))\n need_result.append(os.path.join(dummy_folder, 'memes', 'meme2.png'))\n need_result.append(os.path.join(dummy_folder, 'memes', 'meme4.jpg'))\n need_result.append(os.path.join(dummy_folder, 'memes', 'meme4.png'))\n\n for i in ['antigravity.png',\n 'egg.png',\n 'holy_grenade.png',\n 'spam.jpg',\n ]:\n need_result.append(os.path.join(dummy_folder, i))\n\n result = listdir(dummy_folder, full_path=True, only_files=True, walk=True, extensions=['jpg', 'png'])\n self.assertEqual(sorted(result), sorted(need_result))\n result = listdir(dummy_folder, full_path=True, only_files=True, walk=True, extensions=['.jpg', '.png'])\n self.assertEqual(sorted(result), sorted(need_result))\n result = listdir(dummy_folder, full_path=True, only_files=True, walk=True, extensions=['.JPG', 'png'])\n self.assertEqual(sorted(result), sorted(need_result))\n result = listdir(dummy_folder, full_path=True, only_files=True, walk=True, extensions=('.JPG', 'png'))\n self.assertEqual(sorted(result), sorted(need_result))\n result = listdir(dummy_folder, full_path=True, only_files=True, walk=True, extensions={'.JPG', 'png'})\n self.assertEqual(sorted(result), sorted(need_result))", "def matchExt(category):\n settings = settingsLoader()\n categoryExtention = (settings['categoriesDictSettings']\n [category]\n ['matches']\n ['matchContentExtention'])\n logging.debug(\"SORT: matchExt: %s\" % categoryExtention)\n for EachExtention in categoryExtention:\n logging.debug(\"SORT: matchExt: trying %s\" % EachExtention)\n for EachFile in listOfFiles:\n logging.debug(\"SORT: matchExt: trying %s inside of %s\" % (\n EachExtention, EachFile))\n if fnmatch.fnmatch(EachFile, EachExtention):\n return True\n return False", "def find_custom_template(args):\n for arg in args:\n if os.path.isdir(arg):\n dirlist = os.listdir(arg)\n if \"custom.html\" in dirlist:\n return os.path.join(arg, \"custom.html\")\n elif \"custom.jinja\" in dirlist:\n return os.path.join(arg, \"custom.jinja\")", "def find_with_ext(abs_root: str, compile_root: str, ext: str) -> iter([str]):\n for dirpath, dirnames, filenames in walk(abs_root):\n rpath = relpath(dirpath, start=compile_root)\n dirnames[:] = [x for x in dirnames if x not in {'.git'}]\n for fn in filenames:\n if splitext(fn)[1] == ext:\n yield join(rpath, fn)", "def get_fnames_by_ext(fnames, ext):\n found_fnames = []\n for fname in fnames:\n if ext in fname:\n found_fnames.append(fname)\n return found_fnames", "def _findfile(self,path,label):\n files=[];filenames=os.listdir(path)\n for name in filenames:\n if os.path.splitext(name)[0]==str(label):\n files.append(name)\n return files", "def get_label_files(path, ext=\".txt\"):\n return get_files(path, ext)", "def _collect_files(folders, extention='Default'):\r\n if isinstance(extention, str):\r\n if extention.lower() == 'default':\r\n extention = ['.*']\r\n else:\r\n extention = [extention]\r\n files = []\r\n for f in folders:\r\n for e in extention:\r\n files += glob(os.path.join(f, f'*{e}'))\r\n return files", "def get_templates(instrument=''):\n import os, json\n template_path = os.path.dirname(__file__)\n template_names = [fn\n for fn in os.listdir(template_path)\n if fn.endswith(\".json\") and fn.startswith(instrument)]\n templates = dict([(tn[len(instrument)+1:-5],\n json.loads(open(os.path.join(template_path, tn), 'r').read()))\n for tn in template_names])\n return templates", "def search_ext(self,strz):\n\t\tfor ext in file_type:\t#file_type = list of allow extension words\n\t\t\tif strz.endswith(ext):\n\t\t\t\tself.extension=ext\n\t\t\t\treturn strz.replace(ext,\"\")\n\t\treturn strz", "def build_from_c_and_cpp_files(extensions):\n for extension in extensions:\n sources = []\n for sfile in extension.sources:\n path, ext = os.path.splitext(sfile)\n if ext in ('.pyx', '.py'):\n if extension.language == 'c++':\n ext = '.cpp'\n else:\n ext = '.c'\n sfile = path + ext\n sources.append(sfile)\n extension.sources = sources", "def recognize_files(list_of_filenames):\n reg_exp = define_regex()\n pattern = re.compile(reg_exp) \n matched = []\n for filename in list_of_filenames:\n match = pattern.match(filename)\n if match != None:\n matched.append(filename)\n return matched", "def find(cls, paths):\r\n pythons = []\r\n for path in paths:\r\n for fn in cls.expand_path(path):\r\n basefile = os.path.basename(fn)\r\n if any(matcher.match(basefile) is not None for matcher in cls.REGEXEN):\r\n try:\r\n pythons.append(cls.from_binary(fn))\r\n except Exception as e:\r\n TRACER.log('Could not identify %s: %s' % (fn, e))\r\n continue\r\n return pythons", "def test_return_only_filenames_under_ext():\n hardcodedpath = \"/home/dados/VideoAudio/Yt videos/Soc Sams vi/Lang Sams vi/Chinese Sams vi/\" \\\n \"Harbin Mandarin yu/BMC 19v 12' 2018 4h Beginning Mandarin Chinese Lessons yu Harbin ytpl/a\"\n path_in_arg_if_any = None\n for arg in sys.argv:\n if arg.startswith('-ppath='):\n path_in_arg_if_any = arg[len('-ppath='):]\n if path_in_arg_if_any is None:\n path_in_arg_if_any = hardcodedpath\n ppath = path_in_arg_if_any\n filenames = os.listdir(ppath)\n filenames.sort()\n print(filenames)\n retlist = return_only_filenames_under_ext(filenames, ppath)\n print(retlist)", "def getFilesForImportWizard(self, extension):\n storage = FileSystemStorage(join(settings.MEDIA_ROOT, 'models'))\n folderPath = self.client_session.modelInfo.uri[:self.client_session.modelInfo.uri.rfind(\n os.path.sep)+1]\n fullFolderPath = join(storage.base_location, folderPath)\n return self._findFilesEntriesInFolderByExtension(fullFolderPath, f'.{extension}', True, [])", "def match_extensions(filename):\n return any(filename.endswith(e) for e in extensions)", "def load_template_files(self):\n templates = dict()\n template_path = settings.CUSTOM_VERTO_TEMPLATES\n templates.update(self.read_template_files(template_path))\n if hasattr(self, \"extra_converter_templates_directory\"):\n directory = self.extra_converter_templates_directory\n template_path = os.path.join(template_path, directory)\n templates.update(self.read_template_files(template_path))\n return templates", "def _FindTemplateFile(self, topdir):\n if topdir.endswith('..'):\n topdir = '/'.join(topdir.split('/')[:-2])\n fnames = os.listdir(topdir)\n for fname in fnames:\n filename = '%s/%s' % (topdir, fname)\n if filename.endswith('.yaml') and not os.path.isdir(filename) and \\\n os.path.exists(filename):\n f = open(filename, 'r')\n magic_code = f.read(22)\n f.close()\n if '#!fmri_file_template' in magic_code:\n return filename\n return None", "def buildListOfFiles(searchGlob):\n return [fpath for fpath in glob2.iglob(searchGlob) if os.path.isfile(fpath)]", "def files(pathspec):\n\treturn [f for f in glob.glob(pathspec)]", "def include_templates(self):\n if self._include_templates is None:\n result = []\n for inc in self._includes:\n result.append(self.manager.get_template(inc))\n self._include_templates = result\n return self._include_templates", "def findExtInDF(df, ext='abf', labrow=1, refCol=0, outfile=None):\n fils = []\n for i in range(df.shape[0]): # For every row\n for c in df.columns:\n try:\n temp_ = df.iloc[i][c]\n except:\n print(i, c)\n try:\n if temp_.split('.')[-1] == ext:\n try: # Could be multiple files\n temp_ = ''.join([u for u in temp_ if u != ' '])\n csplit = temp_.split(',')\n for ent in csplit:\n fils.append([df.iloc[i][df.columns[refCol]], ent, df[c].values[labrow]])\n except:\n fils.append([df.iloc[i][df.columns[refCol]], temp_, df[c].values[labrow]]) # there was only one file\n except:\n pass\n print('Found %i hits' %len(fils))\n \n # This part will save only the filenames in a txt doc\n # which can be passed to getPaths.sh to return the full paths to each file.\n if outfile is not None:\n with open(outfile, 'w') as fOut:\n for f in fils:\n fOut.write(','.join([str(i) for i in f]))\n fOut.write('\\n')\n return fils", "def list_templates(self):\n # Compile regular expression to match all templates with '.tpl'\n # extension, while avoiding hidden files (files preceded with a\n # full-stop, e.g. .MyTemplate.py.tpl.swp)\n regex = re.compile(r'(^[^\\.].*)\\.tpl(\\.\\w+)?$')\n\n # Find templates matching the regex\n template_list = self._template_env.list_templates(\n filter_func=lambda template_name: re.match(regex, template_name))\n\n return template_list", "def find_photos(source_path, common_extensions=('JPG', 'CR2', 'ORF', 'ARW', 'TIFF', 'DNG'), ignore=[]):\n # combinedignored = re.compile('|'.join('(?:{0})'.format(x) for x in ignore))\n # use endswith , ignore must be a tuple then\n # if ignore and dirpath.endswith(ignore):\n # for duplication, at the end cll the same funciton\n\n source_files = list()\n\n for (dirpath, dirnames, filenames) in os.walk(source_path):\n for f in filenames:\n if f.upper().endswith(common_extensions):\n # source_files.append(os.path.join(dirpath, f))\n parent = os.path.basename(os.path.normpath(dirpath))\n source_files.append({'dir':dirpath,\n 'filename':f,\n 'parent_folder':parent})\n\n return source_files", "def get_html_files_for_candidates(f_name):\n candidates = ['Joe Biden','Kamala Harris','Elizabeth Warren','Bernie Sanders']\n candidates_files = ['{}.html'.format(candidate) for candidate in candidates]\n results = []\n files = sorted([os.path.join(f_name, f) for f in os.listdir(f_name) if os.path.isfile(os.path.join(f_name, f))]) #extra check if is file unnecessary\n for file in files:\n for c in candidates_files:\n if c in file:\n results.append(file)\n print(results)\n return results", "def __is_file(extension_p, all_extensions_p):\n return extension_p in all_extensions_p", "def get_filelist(import_path, extension):\n filelist = []\n for root, dirs, files in os.walk(import_path):\n filelist += glob.glob(os.path.join(root, '*.' + extension))\n return filelist", "def getAllFilesWithExtension(directory,extension):\n filesWithExtension = []\n for root, dirs, files in os.walk(directory):\n for file in files:\n if file.endswith(extension):\n filesWithExtension.append(os.path.realpath(os.path.join(root, file)))\n return filesWithExtension", "def find_by_extension(extension):\n for format in FORMATS:\n if extension in format.extensions:\n return format\n\n raise UnknownFormat('No format found with extension \"%s\"' % extension)", "def find_files(self,start_dir=None,pattern=\"*\",file_extention=\"*.fif\",recursive=True,debug=False,abspath=False,\n ignore_case=False):\n pattern = self.update_pattern(pattern,ignore_case=ignore_case)\n \n if not isinstance(file_extention,(list)):\n s = file_extention\n file_extention = list()\n file_extention.append(s)\n \n if debug or self.debug:\n logger.debug(\"start dir : {}\\n\".format(start_dir) +\n \" -> glob pattern : {}\\n\".format(pattern) +\n \" -> file extention : {}\\n\".format(file_extention) +\n \" -> glob recursive : {}\\n\".format(recursive) +\n \" -> adding abs path: {}\\n\".format(abspath)\n )\n files_found = []\n with self.working_directory(start_dir):\n for fext in file_extention: # ToDo fext re /\\.vhdr|vmrk|eeg$/\n for f in glob.iglob(pattern + fext,recursive=recursive):\n #print(f)\n if abspath:\n files_found.append(os.path.abspath(os.path.join(start_dir,f)))\n else:\n files_found.append(f)\n \n files_found.sort()\n return files_found", "def process_all_files():\n src_files = get_doc_files()\n\n for src_pathname in src_files:\n if src_pathname.suffix in MARKDOWN_EXTENSIONS:\n process_file_markdown(src_pathname)\n elif src_pathname.suffix in STATIC_ASSET_EXTENSIONS:\n process_file_copytodest(src_pathname)", "def find_files(substring, path, recursive=False,\n check_ext=None, ignore_invisible=True,\n ignore_substring=None):\n def check_file(f, path):\n if not (ignore_substring and ignore_substring in f):\n if substring in f:\n compl_path = os.path.join(path, f)\n if os.path.isfile(compl_path):\n return compl_path\n return False\n\n results = []\n\n if recursive:\n for par, nxt, fnames in os.walk(path):\n for f in fnames:\n fn = check_file(f, par)\n if fn:\n results.append(fn)\n\n else:\n for f in os.listdir(path):\n if ignore_invisible and f.startswith('.'):\n continue\n fn = check_file(f, path)\n if fn:\n results.append(fn)\n\n if check_ext:\n results = [r for r in results if os.path.splitext(r)[-1] == check_ext]\n\n return results", "def ImportTemplate(template_names):\n for file in template_names:\n try:\n return __import__(file)\n except ImportError:\n pass\n raise ImportError, 'No files matching %s' % template_names" ]
[ "0.6474418", "0.63438606", "0.63118434", "0.6272479", "0.6263339", "0.6136152", "0.6116507", "0.6069046", "0.60550666", "0.6040801", "0.599823", "0.5954619", "0.5945622", "0.59430933", "0.5937887", "0.58905417", "0.58869183", "0.5864783", "0.58605796", "0.58385235", "0.5812919", "0.5753414", "0.5749823", "0.5739253", "0.57257795", "0.5712119", "0.56785357", "0.5668057", "0.56670326", "0.5654497", "0.5634818", "0.5634637", "0.5623426", "0.56233555", "0.5611521", "0.5596653", "0.55907226", "0.55902505", "0.5585948", "0.5576665", "0.5566917", "0.55565155", "0.5556419", "0.5527172", "0.55160266", "0.54981434", "0.54923975", "0.5489226", "0.5478076", "0.54703647", "0.546866", "0.5464813", "0.5464508", "0.54611796", "0.5460293", "0.54557866", "0.5454655", "0.5452189", "0.54491466", "0.544731", "0.544329", "0.54421777", "0.54402673", "0.5439876", "0.5437169", "0.54365975", "0.54297936", "0.5427491", "0.5411574", "0.5408735", "0.5406055", "0.5404166", "0.53977334", "0.5360686", "0.535714", "0.5354462", "0.53499335", "0.53480166", "0.53370726", "0.533283", "0.5328979", "0.5312096", "0.5310987", "0.53094363", "0.52981114", "0.52968436", "0.528992", "0.52855265", "0.52822816", "0.5278115", "0.5267069", "0.5263624", "0.5262578", "0.5260993", "0.5258565", "0.52440697", "0.5242932", "0.52346367", "0.52333695", "0.5231862" ]
0.74726915
0
The core component of this software is the Yasha class. When used as a commandline tool, a new instance will be create with each invocation. When used as a library, multiple different instances can be created with different configurations
def __init__(self, root_dir: Path = Path('.'), variable_files: List[Union[Path,str]] = list(), inline_variables = dict(), yasha_extensions_files: List[Union[Path,str]] = list(), template_lookup_paths: List[Union[Path,str]] = list(), mode: Union[Literal['pedantic'], Literal['debug'], None] = None, encoding: str = ENCODING, **jinja_configs): self.root = root_dir self.parsers = PARSERS.copy() self.template_lookup_paths = [Path(p) for p in template_lookup_paths] self.yasha_extensions_files = [Path(p) for p in yasha_extensions_files] self.variable_files = [Path(f) for f in variable_files] self.encoding = encoding self.env = Environment() if mode == 'pedantic': self.env.undefined = StrictUndefined if mode == 'debug': self.env.undefined = DebugUndefined self.env.filters.update(FILTERS) self.env.tests.update(TESTS) for jinja_extension in CLASSES: self.env.add_extension(jinja_extension) if jinja_configs: for config, value in jinja_configs.items(): setattr(self.env, config, value) for ext in self.yasha_extensions_files: self._load_extensions_file(ext) self.env.loader = FileSystemLoader(self.template_lookup_paths) self._load_data_files(self.variable_files) # data from the data files becomes the baseline for jinja global vars self.env.globals.update(inline_variables) # data from inline variables / directly-specified global variables overrides data from the data files
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def main(cls):\n raise NotImplementedError", "def __init__(self):\r\n self._config = Config.load()\r\n self._bootstrap_jar_url = self._config.get('ivy', 'bootstrap_jar_url',\r\n default=self._DEFAULT_URL)\r\n self._timeout = Amount(self._config.getint('ivy', 'bootstrap_fetch_timeout_secs', default=1),\r\n Time.SECONDS)\r\n self._version_or_ivyxml = self._config.get('ivy', 'ivy_profile', default=self._DEFAULT_VERSION)\r\n self._classpath = None", "def __init__(self):\n self.libpath = os.sep.join(os.path.abspath(__file__).split(os.sep)[:-1])\n sys.path.append(self.libpath)\n libpath2 = os.sep.join(self.libpath.split(os.sep)[:-1])\n sys.path.append(libpath2)\n # Initialize TCMetaSchema with correct libpath\n TCMetaSchema(self.libpath)\n self.args, self.unknown = IceteaManager._parse_arguments()\n # If called with --clean, clean up logs.\n if self.args.clean:\n _cleanlogs(silent=self.args.silent, log_location=self.args.log)\n\n LogManager.init_base_logging(self.args.log, verbose=self.args.verbose,\n silent=self.args.silent, color=self.args.color,\n no_file=(self.args.list or self.args.listsuites),\n truncate=not self.args.disable_log_truncate)\n\n self.logger = LogManager.get_logger(\"icetea\")\n self.pluginmanager = None\n self.resourceprovider = ResourceProvider(self.args)\n self._init_pluginmanager()\n self.resourceprovider.set_pluginmanager(self.pluginmanager)", "def main(self):\r\n pass", "def main():\n pass", "def __init__(self):\n \n self.label = \"ArcSDM Tools\"\n self.alias = \"ArcSDM\" \n\n # List of tool classes associated with this toolbox\n self.tools = [PartitionNNInputFiles, CombineNNOutputFiles, NeuralNetworkOutputFiles, NeuralNetworkInputFiles, \n CalculateWeightsTool,SiteReductionTool,CategoricalMembershipToool,\n CategoricalAndReclassTool, TOCFuzzificationTool, CalculateResponse, LogisticRegressionTool, Symbolize, \n ROCTool, AgterbergChengCITest, AreaFrequencyTable, GetSDMValues, GrandWofe]", "def main(self) -> None:\n pass", "def __init__(self, **kwargs):\n self.config = kwargs[\"config\"]\n self.cli = client.DefaultClient(app_key=self.config[\"app_key\"], app_secret=self.config[\"app_secret\"])\n self.req = None", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.pythia", "def tool(self, tool_cls, *args, options={}, **kwargs):\n tool_options = self.OPTIONS.copy()\n tool_options.update(options)\n\n tool_instance = tool_cls(\n *args,\n flags=self.FLAGS,\n bin_dir=self.BIN_DIR,\n HHLIB=self.HHLIB,\n options=tool_options,\n **kwargs)\n tool_instance.tool = unittest.mock.MagicMock()\n return tool_instance", "def main(self, **kwargs) -> None:\n ...", "def main():\n\tcli = Cli()\n\tcli.run()", "def __init__(self):\n self.label = \"Create\"\n self.alias = \"\"\n\n # List of tool classes associated with this toolbox\n if core.get_pass():\n self.tools = [Fbound, Roads, Diekdikisi]\n else:\n self.tools = []", "def __init__(self):\n self.timeout = Config.conf['timeout']\n self.ctimeout = Config.conf['ctimeout']\n self.download_timeout = Config.conf['download_timeout']\n self.agent = Config.conf['http_agent']\n self.http_proxy = Config.conf['http_proxy']\n self.cache_support = False\n self.insecure = Config.conf['http_insecure']\n self._curl_exec = Config.conf['use_curl_executable']\n self._select_implementation()", "def __init__(self):\n\n # Primary configuration of the module is via the container environment.\n # We need to recognise that some or all of these may not be defined.\n # All run-time config that's required is given a __CFG prefix to\n # simplify checking whether all that's required has been defined.\n #\n # The SQUONK2_SLUG is limited to 10 characters, when combined with\n # \"Fragalysis {SLUG} \", this leaves (80-22) 58 characters for the\n # use with the target-access-string and session project strings\n # to form Squonk2 Unit and Project names.\n self.__CFG_SQUONK2_ASAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_ASAPI_URL')\n self.__CFG_SQUONK2_DMAPI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_DMAPI_URL')\n self.__CFG_SQUONK2_UI_URL: Optional[str] =\\\n os.environ.get('SQUONK2_UI_URL')\n self.__CFG_SQUONK2_ORG_UUID: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_UUID')\n self.__CFG_SQUONK2_UNIT_BILLING_DAY: Optional[str] =\\\n os.environ.get('SQUONK2_UNIT_BILLING_DAY')\n self.__CFG_SQUONK2_PRODUCT_FLAVOUR: Optional[str] =\\\n os.environ.get('SQUONK2_PRODUCT_FLAVOUR')\n self.__CFG_SQUONK2_SLUG: Optional[str] =\\\n os.environ.get('SQUONK2_SLUG', '')[:_MAX_SLUG_LENGTH]\n self.__CFG_SQUONK2_ORG_OWNER: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER')\n self.__CFG_SQUONK2_ORG_OWNER_PASSWORD: Optional[str] =\\\n os.environ.get('SQUONK2_ORG_OWNER_PASSWORD')\n self.__CFG_OIDC_AS_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_AS_CLIENT_ID')\n self.__CFG_OIDC_DM_CLIENT_ID: Optional[str] = \\\n os.environ.get('OIDC_DM_CLIENT_ID')\n self.__CFG_OIDC_KEYCLOAK_REALM: Optional[str] = \\\n os.environ.get('OIDC_KEYCLOAK_REALM')\n\n # Optional config (no '__CFG_' prefix)\n self.__DUMMY_TARGET_TITLE: Optional[str] =\\\n os.environ.get('DUMMY_TARGET_TITLE')\n self.__DUMMY_USER: Optional[str] =\\\n os.environ.get('DUMMY_USER')\n self.__DUMMY_TAS: Optional[str] =\\\n os.environ.get('DUMMY_TAS')\n self.__SQUONK2_VERIFY_CERTIFICATES: Optional[str] = \\\n os.environ.get('SQUONK2_VERIFY_CERTIFICATES')\n\n # The integer billing day, valid if greater than zero\n self.__unit_billing_day: int = 0\n # True if configured...\n self.__configuration_checked: bool = False\n self.__configured: bool = False\n # Ignore cert errors? (no)\n self.__verify_certificates: bool = True\n\n # The record ID of the Squonk2Org for this deployment.\n # Set on successful 'pre-flight-check'\n self.__org_record: Optional[Squonk2Org] = None\n\n self.__org_owner_as_token: str = ''\n self.__org_owner_dm_token: str = ''\n self.__keycloak_hostname: str = ''\n self.__keycloak_realm: str = ''\n\n # The Safe QuerySet from the security module.\n # Used when we are given a tas (target access string).\n # It allows us to check that a user is permitted to use the access ID\n # and relies on ISPyB credentials present in the environment.\n self.__ispyb_safe_query_set: ISpyBSafeQuerySet = ISpyBSafeQuerySet()", "def main(self):", "def main(args=None):", "def main(args=None):", "def __init__(self):\r\n self.label = \"ProcessAirQuality\"\r\n self.alias = \"ProcessAirQuality\"\r\n\r\n # List of tool classes associated with this toolbox\r\n self.tools = [AirQuality]", "def __init__(\n self,\n logger: Log,\n console: Console,\n base_path: Path,\n home_path: Path = None,\n ):\n self.logger = logger\n self.input = console\n self.base_path = Path(base_path)\n self.home_path = Path(\n os.path.expanduser(home_path if home_path else Path.home())\n )\n\n self.host_arch = self.platform.machine()\n self.host_os = self.platform.system()\n\n self.app_tools: DefaultDict[AppConfig, ToolCache] = defaultdict(\n lambda: ToolCache(\n logger=self.logger,\n console=self.input,\n base_path=self.base_path,\n home_path=self.home_path,\n )\n )\n\n # Built-in tools without any external dependencies\n Subprocess.verify(tools=self)\n Download.verify(tools=self)", "def __init__(self): \n\t\n\t # get the environment\n\t\tself.env = env()", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def __init__(self, config=None, # configfile is needed to store parameters. None simulates one\n **kwargs):\n self.logger = logging.getLogger(name=__name__)\n #self.license()\n # make or retrieve the config file\n if isinstance(config, MemoryTree):\n self.c = config\n else:\n self.c = MemoryTree(config)\n # get the parameters right (in order of increasing priority):\n # 1. defaults\n # 2. environment variables\n # 3. config file\n # 4. command line arguments\n # 5. (if missing information) request from GUI or command-line\n self.parameters = defaultparameters # BEWARE: By not copying the\n # dictionary, defaultparameters are modified in the session (which\n # can be advantageous for instance with hostname in unit_tests)\n\n # get parameters from os.environment variables\n if not self.parameters['silence_env']:\n for k in self.parameters.keys():\n if \"REDPITAYA_\"+k.upper() in os.environ:\n newvalue = os.environ[\"REDPITAYA_\"+k.upper()]\n oldvalue = self.parameters[k]\n self.parameters[k] = type(oldvalue)(newvalue)\n if k == \"password\": # do not show the password on the screen\n oldvalue = \"********\"\n newvalue = \"********\"\n self.logger.debug(\"Variable %s with value %s overwritten \"\n \"by environment variable REDPITAYA_%s \"\n \"with value %s. Use argument \"\n \"'silence_env=True' if this is not \"\n \"desired!\",\n k, oldvalue, k.upper(), newvalue)\n # settings from config file\n try:\n update_with_typeconversion(self.parameters, self.c._get_or_create('redpitaya')._data)\n except BaseException as e:\n self.logger.warning(\"An error occured during the loading of your \"\n \"Red Pitaya settings from the config file: %s\",\n e)\n # settings from class initialisation / command line\n update_with_typeconversion(self.parameters, kwargs)\n # get missing connection settings from gui/command line\n if self.parameters['hostname'] is None or self.parameters['hostname']=='':\n gui = 'gui' not in self.c._keys() or self.c.gui\n if gui:\n self.logger.info(\"Please choose the hostname of \"\n \"your Red Pitaya in the hostname \"\n \"selector window!\")\n startup_widget = HostnameSelectorWidget(config=self.parameters)\n hostname_kwds = startup_widget.get_kwds()\n else:\n hostname = raw_input('Enter hostname [192.168.1.100]: ')\n hostname = '192.168.1.100' if hostname == '' else hostname\n hostname_kwds = dict(hostname=hostname)\n if not \"sshport\" in kwargs:\n sshport = raw_input('Enter sshport [22]: ')\n sshport = 22 if sshport == '' else int(sshport)\n hostname_kwds['sshport'] = sshport\n if not 'user' in kwargs:\n user = raw_input('Enter username [root]: ')\n user = 'root' if user == '' else user\n hostname_kwds['user'] = user\n if not 'password' in kwargs:\n password = raw_input('Enter password [root]: ')\n password = 'root' if password == '' else password\n hostname_kwds['password'] = password\n self.parameters.update(hostname_kwds)\n\n # optional: write configuration back to config file\n self.c[\"redpitaya\"] = self.parameters\n\n # save default port definition for possible automatic port change\n self.parameters['defaultport'] = self.parameters['port']\n # frequency_correction is accessed by child modules\n self.frequency_correction = self.parameters['frequency_correction']\n # memorize whether server is running - nearly obsolete\n self._serverrunning = False\n self.client = None # client class\n self._slaves = [] # slave interfaces to same redpitaya\n self.modules = OrderedDict() # all submodules\n\n # provide option to simulate a RedPitaya\n if self.parameters['hostname'] in ['_FAKE_REDPITAYA_', '_FAKE_']:\n self.startdummyclient()\n self.logger.warning(\"Simulating RedPitaya because (hostname==\"\n +self.parameters[\"hostname\"]+\"). Incomplete \"\n \"functionality possible. \")\n return\n elif self.parameters['hostname'] in ['_NONE_']:\n self.modules = []\n self.logger.warning(\"No RedPitaya created (hostname==\"\n + self.parameters[\"hostname\"] + \").\"\n \" No hardware modules are available. \")\n return\n # connect to the redpitaya board\n self.start_ssh()\n # start other stuff\n if self.parameters['reloadfpga']: # flash fpga\n self.update_fpga()\n if self.parameters['reloadserver']: # reinstall server app\n self.installserver()\n if self.parameters['autostart']: # start client\n self.start()\n self.logger.info('Successfully connected to Redpitaya with hostname '\n '%s.'%self.ssh.hostname)\n self.parent = self", "def __init__(self, api_use=False):\n self.api_use = api_use", "def make(self):\n pass", "def __init__(self):\n self._inst = {}", "def setup_class(cls):\n cls.cwd = os.getcwd()\n cls.t = tempfile.mkdtemp()\n dir_path = Path(\"packages\")\n tmp_dir = cls.t / dir_path\n src_dir = cls.cwd / Path(ROOT_DIR, dir_path)\n shutil.copytree(str(src_dir), str(tmp_dir))\n shutil.copytree(Path(CUR_PATH, \"data\", \"dummy_aea\"), Path(cls.t, \"dummy_aea\"))\n os.chdir(Path(cls.t, \"dummy_aea\"))\n cls.runner = CliRunner()", "def setup_class(cls):\n cls.cwd = os.getcwd()\n cls.t = tempfile.mkdtemp()\n dir_path = Path(\"packages\")\n tmp_dir = cls.t / dir_path\n src_dir = cls.cwd / Path(ROOT_DIR, dir_path)\n shutil.copytree(str(src_dir), str(tmp_dir))\n shutil.copytree(Path(CUR_PATH, \"data\", \"dummy_aea\"), Path(cls.t, \"dummy_aea\"))\n os.chdir(Path(cls.t, \"dummy_aea\"))\n cls.runner = CliRunner()", "def setup_class(cls):\n cls.runner = CliRunner()\n cls.agent_name = \"myagent\"\n cls.cwd = os.getcwd()\n cls.t = tempfile.mkdtemp()\n # copy the 'packages' directory in the parent of the agent folder.\n shutil.copytree(Path(CUR_PATH, \"..\", \"packages\"), Path(cls.t, \"packages\"))\n cls.connection_id = str(HTTP_CLIENT_PUBLIC_ID)\n cls.connection_name = \"http_client\"\n\n os.chdir(cls.t)\n result = cls.runner.invoke(cli, [*CLI_LOG_OPTION, \"init\", \"--author\", AUTHOR])\n assert result.exit_code == 0\n result = cls.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"create\", \"--local\", cls.agent_name],\n standalone_mode=False,\n )\n assert result.exit_code == 0\n os.chdir(cls.agent_name)\n result = cls.runner.invoke(\n cli,\n [*CLI_LOG_OPTION, \"add\", \"--local\", \"connection\", cls.connection_id],\n standalone_mode=False,\n )\n assert result.exit_code == 0", "def __init__(self):\n self._opts = {} # dict of dicts of (opt:, override:, default:)\n self._groups = {}\n self._deprecated_opts = {}\n\n self._args = None\n\n self._oparser = None\n self._namespace = None\n self._mutable_ns = None\n self._mutate_hooks = set([])\n self.__cache = {}\n self.__drivers_cache = {}\n self._config_opts = []\n self._cli_opts = collections.deque()\n self._validate_default_values = False\n self._sources = []\n self._ext_mgr = None\n # Though the env_driver is a Source, we load it by default.\n self._use_env = True\n self._env_driver = _environment.EnvironmentConfigurationSource()\n\n self.register_opt(self._config_source_opt)", "def main():\n indicator = AyatanaIndicator()\n indicator.run()", "def setup_class(cls):\n initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST)", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.phe", "def main(self):\n pass", "def __init__(self):\n '''Lets find ot the system we run on'''\n self.syst = platform.system()\n '''And where we are'''\n self.module_abs_path = os.path.abspath(os.path.dirname(__file__))\n if self.syst == 'Windows':\n self.sonata_suite_config_json = self.vm_logsrv_cnf_location = os.path.join(self.module_abs_path,\n \"..\\\\configs_sonata\\sonata_conf.json\")\n elif self.syst == 'Linux':\n self.sonata_suite_config_json = self.vm_logsrv_cnf_location = os.path.join(self.module_abs_path,\n \"../configs_sonata/sonata_conf.json\")\n '''get some tools ready'''\n self.__utils__=var_utils.Varutils()\n '''MAP OF CONFIG PARAMS FROM JSON'''\n self.sonata_suite_config = self.__utils__.read_json_to_map(data_location=self.sonata_suite_config_json)", "def __init__(self, config, health_string, tasks_obj):\n self.session = cloudpassage.HaloSession(config.halo_api_key,\n config.halo_api_secret_key,\n api_host=config.halo_api_host,\n api_port=config.halo_api_port,\n integration_string=config.ua)\n self.product_version = config.product_version\n self.monitor_events = config.monitor_events\n self.slack_channel = config.slack_channel\n self.health_string = health_string\n self.tasks = tasks_obj\n self.flower_host = config.flower_host\n self.config = config\n return", "def use(self):\n pass", "def __init__(self, mauka_config: config.MaukaConfig):\n self.config: config.MaukaConfig = mauka_config\n \"\"\"Configuration dictionary\"\"\"\n\n self.name_to_plugin_class = {}\n \"\"\"Name of plugin to its corresponding Python class\"\"\"\n\n self.name_to_process = {}\n \"\"\"Name of plugin to its corresponding process (if it has one)\"\"\"\n\n self.name_to_exit_event = {}\n \"\"\"Name of plugin to its corresponding exit event object (if it has one)\"\"\"\n\n self.name_to_enabled = {}\n \"\"\"Name of plugin to whether or not the plugin is enabled\"\"\"\n\n self.zmq_context = zmq.Context()\n \"\"\"ZeroMQ context\"\"\"\n\n # noinspection PyUnresolvedReferences\n # pylint: disable=E1101\n self.zmq_pub_socket = self.zmq_context.socket(zmq.PUB)\n \"\"\"ZeroMQ publishing socket (allows publishing messages to plugins)\"\"\"\n\n self.cli_parser = MaukaCli()\n\n self.tcp_server_exit_event = multiprocessing.Event()\n\n self.zmq_pub_socket.connect(self.config.get(\"zmq.mauka.plugin.pub.interface\"))\n self.init_cli()", "def __init__(self):\n self.label = \"Data Assistant\"\n self.alias = \"dla\"\n\n # List of tool classes associated with this toolbox\n self.tools = [Append, Stage, NewFile, Preview, Replace]", "def __init__(self):\n super(StarCCM_wrapper, self).__init__()\n self.command = ['qsub', 'runStarCCM.pbs']\n self.starccmLic = os.environ['STAR_POWER_ON_DEMAND_LIC']\n self.CDLMD_LicFile = os.environ['LM_LICENSE_FILE']", "def main(args):", "def main(args):", "def use(self):", "def main():\n\tpass", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main() -> None:", "def main():\n\n # pylint: disable=import-outside-toplevel\n\n import sys\n cmd = ToyMaker()\n sys.exit(cmd.main())", "def GetInstance():\n pass", "def setup_class(cls):\n cls.runner = CliRunner()\n cls.agent_name = \"agent_1\"\n cls.cwd = os.getcwd()\n cls.t = tempfile.mkdtemp()\n os.chdir(cls.t)", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.ratchet", "def __init__(self):\n self._ll = LowLevelLibs()\n self._lib = self._ll.ratchet", "def configure(self):", "def configure(self):", "def configure(self):", "def configure(self):", "def cli():\n pass", "def main():\n return", "def __init__(self, *args, **kwargs):\n super(PythonTaskWrapper, self).__init__(*args, **kwargs)\n\n self.setOption(\n 'executableName',\n self.__pythonExecutable\n )", "def _setup(self):", "def _setup(self):", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():", "def cli():" ]
[ "0.6384319", "0.5974639", "0.59633154", "0.59085184", "0.58305883", "0.58266973", "0.5822428", "0.58058023", "0.5801817", "0.5789056", "0.57864296", "0.57859546", "0.5772526", "0.5756182", "0.5749882", "0.5728382", "0.57279474", "0.57279474", "0.57224405", "0.57149786", "0.56971276", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5687171", "0.5686563", "0.56785166", "0.5672052", "0.5661772", "0.56573224", "0.56573224", "0.56340134", "0.5631375", "0.56277347", "0.5614668", "0.56117684", "0.5583546", "0.55825645", "0.55788124", "0.5575398", "0.5566257", "0.5565972", "0.5543631", "0.5542776", "0.5542776", "0.5535548", "0.5529453", "0.5524578", "0.5524578", "0.5524578", "0.5524578", "0.55218816", "0.55205345", "0.5514956", "0.550971", "0.550971", "0.5507943", "0.5507943", "0.5507943", "0.5507943", "0.5504071", "0.5498716", "0.5496252", "0.548503", "0.548503", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727", "0.5484727" ]
0.0
-1
Render a single template
def render_template(self, template: Union[Path, str], find_data_files = True, find_extension_files = True, jinja_env_overrides = dict(), output: BinaryIO = None) -> Union[str, BinaryIO]: if isinstance(template, Path): # Automatic file lookup only works if template is a file. # If template is a str (like, for example, something piped in to Yasha's STDIN), then don't bother trying to find related files if find_extension_files: # load extension files related to this template, updating the local env and the local parsers dict extension_files = find_template_companion_files(template, EXTENSION_FILE_FORMATS, self.root) for ext in extension_files: self._load_extensions_file(ext) if find_data_files: # load variable files related to this template, merging their variables into the local env's globals object data_files = find_template_companion_files(template, self.parsers.keys(), self.root) self._load_data_files(data_files) # Add the template's directory to the template loader's search path self.env.loader.searchpath.append(template.parent) # type: ignore # Read the template string from the template path template_text = template.read_text() else: template_text = template for k, v in jinja_env_overrides: setattr(self.env, k, v) if output: # Don't return the rendered template, stream it to a file compiled_template: TemplateStream = self.env.from_string(template_text).stream() compiled_template.enable_buffering(5) compiled_template.dump(output, encoding=self.encoding) return output else: return self.env.from_string(template_text).render()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def render(request, template):\r\n return render_to_response('static_templates/' + template, {})", "def render(self, template: str, **vars) -> str:", "def render(self, _template, **context):\n context['_request'] = self.request\n self.response.write(self.jinja2.render_template(_template, **context))", "def get(self, request, *args, **kwargs):\n return render(request, self.template_name)", "def get(self, request, *args, **kwargs):\n return render(request, self.template_name)", "def get(self, request, *args, **kwargs):\n return render(request, self.template_name)", "def get(self, request, *args, **kwargs):\n return render(request, self.template_name)", "def get(self, request, *args, **kwargs):\n return render(request, self.template_name)", "def render(self, _template, context=None):\n variables = {}\n if context:\n variables.update(context)\n rv = self.jinja2.render_template(_template, **variables)\n self.response.write(rv)", "def render_only(self, template, *args, **kwargs):\n names = self._custom_template_names(template)\n names.append(template)\n tmpl = self._env.select_template(names)\n return tmpl.render(*args, **kwargs)", "def render_template(self, context=None):\n if context is None:\n context = self.get_template_context()\n return self.get_template_object().render(context)", "def render(self, template, **options):\n template_path = os.path.join(self.template_path, self.app.config['THEME'], template)\n return render_template(template_path, **options)", "def generic(request):\n\n # Load context\n json_path = os.path.join(settings.STATIC_ROOT,'json/context.json')\n context = simplejson.loads(''.join(open(json_path).readlines()))\n\n # Determine template name\n template_path = request.path[1:]\n if template_path == '':\n template_path = 'index.html'\n if template_path.endswith('/'):\n template_path += 'index.html'\n elif not template_path.endswith('.html'):\n template_path += '.html'\n\n # Check if template exists \n template_found = False\n for template_dir in settings.TEMPLATE_DIRS:\n full_template_path = os.path.join(template_dir, template_path)\n if os.path.isfile(full_template_path):\n template_found = True\n break\n\n if not template_found:\n raise Http404\n\n return direct_to_template(request, template_path, context)", "def render_template(self, template_name, **kwargs):\n template = django_template_loader.get_template(template_name)\n return template.render(DjangoContext(kwargs))", "def index(self, **kw):\n\n template = self.context\n request = self.request\n\n request.response.setHeader('content-type',\n template.content_type)\n\n return template.render(request, **kw)", "def render_template(self, template_name, **kwargs):\n template = self.env.get_template(template_name)\n return template.render(kwargs)", "def template(template_name, **props):\n return render_template(template_name, **template_context(**props))", "def render(self, template_name, **kwargs):\n raise NotImplementedError()", "def render_template(self, _template, _template_values={}):\n\t\ttemplate = JINJA_ENVIRONMENT.get_template(_template)\n\t\trv = template.render(_template_values)\n\t\tself.response.write(rv)", "def render(self, template, **kwargs):\n\t\tkwargs.update({\n\t\t\t\t\t\t\"login_url\": url_for(\"user.index\", action=\"login\"),\n\t\t\t\t\t\t\"logout_url\": url_for(\"user.index\", action=\"logout\", next=url_for(\"pages.index\", name=\"home\")),\n\t\t\t\t\t\t\"current_year\": time.strftime(\"%Y\"),\n\t\t\t\t\t\t\"plugin_urls\": self.get_plugin_urls(location=current_app.config[\"STATIC_PATH\"]+\"/plugins\", base_url=url_for(\"static\", filename=\"\"))\n\t\t\t\t\t})\n\t\t# data common to all modules\n\t\tkwargs.update(self.common)\n\t\t# ==\n\n\t\tif not self.check_role():\n\t\t\tabort(403)\n\n\t\tkwargs.update(page_name=template[template.rfind(\"/\")+1:template.rfind(\".html\")])\n\t\tself.load_jinja_fns()\n\t\treturn render_template(template, **kwargs)", "def render_response(self, *args, **kwargs):\n if self.template_name is not None:\n template = get_template(loader, self.template_name)\n self.response.write(template.render(**self.get_context(*args, **kwargs)))\n else:\n raise ValueError('No template provided.')", "def renderPage():\n return render_template(\"index.html\")", "def render(self, template: str, **vars) -> str:\n vars.setdefault('ctx', self._ctx)\n return self._renderer.render(template, **vars)", "def render(*args, **kwargs):\n if args:\n assert len(args) == 1, \\\n 'Expected exactly one argument, but got %r' % (args,)\n template = loader.load(args[0])\n else:\n template = cherrypy.thread_data.template\n ctxt = Context(url=cherrypy.url)\n ctxt.push(kwargs)\n return template.generate(ctxt)", "def render_reponse(self, template, **context):\n content = self.jinja2.render_template(template, **context)\n self.response.write(content)", "def render_template(template_path_from_root, **template_argv):\n _rendered = template.render(template_path_from_root, template_argv)\n return _rendered", "def render_template(template_name, **context):\n ctx = stack.top\n return _render(_lookup(ctx.app).get_template(template_name),\n context, ctx.app)", "def get(self, request):\n return render(request, self.template, self.context)", "def render_template(template: str, context: dict) -> str:\n if template is None:\n return \"\"\n return Template(template).render(Context(context))", "def render_template(\n template_name: str = \"index.html\", context: t.Dict[str, str] = {}\n):\n html_str: str\n with open(template_name, \"r\") as f:\n html_str = f.read()\n html_str = html_str.format(**context)\n return html_str\n # return f\"<h1>Hello {path=}</h1>\\n{template_name=}\"", "def render_template(template, **template_variables):\n return render_to_response(template, template_variables)", "def _render(self) -> str:\n html = self._template.render(self._transient_context)\n self._transient_context = None\n return html", "def _render_template(self, tplfile, env):\n with open(tplfile) as fp:\n tpl = Template(fp.read())\n return tpl.render(Context(env))", "def render_template(self, template_path, context={}):\n template_str = self.resource_string(template_path)\n return Template(template_str).render(Context(context))", "def render_template(self, template_path, context={}):\n template_str = self.resource_string(template_path)\n return Template(template_str).render(Context(context))", "def render_template(self, template_path, context={}):\n template_str = self.load_resource(template_path)\n return Template(template_str).render(Context(context))", "def render_template(self, *args, **kargs):\n try:\n return super(self.__class__,self).render_template(*args, **kargs)\n except jinja2.exceptions.TemplateNotFound:\n from flask.templating import render_template_string\n err = \"template@\\\"{T}\\\" not found, using literal\"\n err = err.format(T=self.template)\n report(err)\n assert self._template is not None, \"No login template on filesystem or in class!\"\n return render_template_string(self._template, **kargs)", "def render_template(self, template_path, context = {}):\n template_str = self.load_resource(template_path)\n return Template(template_str).render(Context(context))", "def render(self, template, **kw):\n self.write(self.render_string(template, **kw))", "def render(\n path_or_template: str,\n **kwargs,\n) -> str:\n if isinstance(path_or_template, Template):\n template = path_or_template\n elif path_or_template.startswith(\"<\"):\n template = Template(path_or_template)\n else:\n with open(path_or_template, \"r\") as filp:\n contents = filp.read()\n template = Template(contents)\n return template.safe_substitute(**kwargs)", "def render_template(template_path, context=None): # pragma: NO COVER\n if context is None:\n context = {}\n\n template_str = load_resource(template_path)\n template = Template(template_str)\n return template.render(Context(context))", "def render_template(template_path, context=None): # pragma: NO COVER\n if context is None:\n context = {}\n\n template_str = load_resource(template_path)\n template = Template(template_str)\n return template.render(Context(context))", "def get(self, request, *args, **kwargs):\n context = self.get_context_data(request)\n return render(\n request,\n self.template_name,\n context\n )", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def render(self, template, **kw):\n self.write(self.render_str(template, **kw))", "def dev_show_template(request, template):\r\n try:\r\n return render_to_response(template, request.GET.dict())\r\n except TopLevelLookupException:\r\n return HttpResponseNotFound(\"Couldn't find template {tpl}\".format(tpl=template))", "def render(request, template, data=None, mimetype=None, status=200):\n t = get_template(template)\n c = RequestContext(request, data)\n return HttpResponse(t.render(c), mimetype=mimetype, status=status)", "def render(self):\n return render_to_string(\n self.template_name, self.get_context_data(), request=self.request\n )", "def RenderResponse(self, template, **context):\n jinja2_renderer = jinja2.get_jinja2(app=self.app)\n rendered_value = jinja2_renderer.render_template(template, **context)\n self.response.write(rendered_value)", "def render(self, tmpl_name, context_env):\n return self.tmpl._render(tmpl_name, context_env)", "def render(self, style = None):\r\n from pylons import c\r\n style = style or c.render_style or 'html'\r\n template = self.template(style)\r\n if template:\r\n res = template.render(thing = self)\r\n return res if (style and style.startswith('api')) else unsafe(res)\r\n else:\r\n raise NoTemplateFound, repr(self)", "def main_function(template_file):\n\n content = load(template_file)\n assert content, \"Couldn't load template\"\n\n template = Template(content)\n\n return template.render(context(content))", "def render_to_response(template, context, request, *args, **kwargs):\n from django.shortcuts import render_to_response as rtr\n from django.template import RequestContext\n return rtr(template, context, context_instance=RequestContext(request), *args, **kwargs)", "def render_response(template, *args, **kwargs):\n\treturn render_template(template, *args, user=current_user(), **kwargs)", "def render_template(*args, **kwargs):\r\n params = {'cache_buster': cache_buster, 'user': {}, 'user_json': {}, 'PROD': PRODUCTION,\r\n 'static_route': 'http://cdn1.pythonhackers.com'}\r\n params.update(**kwargs)\r\n\r\n return template_render(*args, **params)", "def get(self):\n return render_template ('nome do html')", "def render(self):\n context = {'groups': self._groups}\n\n return loader.render_to_string(self._template_path, dictionary=context)", "def render(self, template, context):\n try:\n template = self.environment.from_string(template)\n except TemplateSyntaxError as e:\n raise TemplateError(e)\n try:\n return template.render(**context)\n except (UndefinedError, TypeError) as e:\n raise TemplateError(e)", "def render_template(self, filename, **kwargs):\n\n kwargs.update({\n 'user': users.get_current_user(),\n 'login_url': users.create_login_url('/register'),\n 'logout_url': users.create_logout_url('/')\n })\n\n template = self.jinja_environment.get_template(filename)\n self.response.out.write(template.render(**kwargs))", "def render(self, template_name, **kwargs):\n template = self._jinja_env.get_template(template_name)\n try:\n html = self._render(template, **kwargs)\n except TemplateSyntaxError as e:\n self.error_page('Template syntax error at {}:{}:<br> {}'.format(e.name, e.lineno, e.message), 500)\n return\n except Exception as e:\n log.exception('Jinja2 exception while rendering the template {}'.format(template_name))\n self.error_page('Jinja2 template exception: {}'.format(e), 500)\n return\n self.finish(html)", "def render(self):\n\n template_string = parseValues(self.data)[1]\n context = self.context\n\n # Run the prebuild plugins, we can't use the standard method here because\n # plugins can chain-modify the context and data.\n for plugin in self.site._plugins:\n if hasattr(plugin, 'preBuildPage'):\n context, data = plugin.preBuildPage(self.site, self, context, data)\n\n if self.site.two_phase:\n initial_string = Template(template_string).render(context)\n return Template(initial_string).render(context)\n\n\n return Template(template_string).render(context)", "def __call__(self, template, obj=None):\n for engine in self.engines:\n filename = engine.find_template_filename(template)\n if filename:\n if obj:\n self.res.locals.update(obj)\n html = engine.render_source(filename, self.res.locals)\n self.res.send_html(html)\n break\n else:\n raise ValueError(\"Could not find a template with name '%s'\" % template)", "def get(self):\n self.render('view.html')", "def _render(self, request, template=None, status=200, context={}, headers={}, prefix_template_path=True):\n\n format = self._get_format(request)\n\n # Render 406 Not Acceptable if the requested format isn't supported.\n if not format:\n return HttpResponse(status=406)\n\n if template:\n\n if prefix_template_path:\n template_path = '%s.%s' % (self.template_path + template, format.extension)\n else:\n template_path = '%s.%s' % (template, format.extension)\n\n try:\n response = render(\n request = request,\n template_name = template_path,\n dictionary = context,\n status = status,\n content_type = '%s; charset=%s' % (format.content_type, settings.DEFAULT_CHARSET)\n )\n except TemplateDoesNotExist:\n try:\n response = HttpResponse(\n content = serializers.find(format)(context).serialize(request),\n content_type = '%s; charset=%s' % (format.content_type, settings.DEFAULT_CHARSET),\n status = status\n )\n except serializers.UnknownSerializer:\n raise self.Error(\n 'No template exists at %(template_path)s, and no serializer found for %(format)s' % {\n 'template_path': template_path,\n 'format': format\n }\n )\n else:\n response = HttpResponse(\n content = serializers.find(format)(context).serialize(request),\n content_type = '%s; charset=%s' % (format.content_type, settings.DEFAULT_CHARSET),\n status = status\n )\n\n for header, value in headers.items():\n response[header] = value\n\n return response", "def render(self, template, *args, **kwargs):\n self._render(template, sys.stdout, *args, **kwargs)", "def render(template, context):\n if not template:\n return None\n\n text = \"\"\n filename = \"templates/\" + template\n with open(filename) as f:\n text = f.read()\n # First compile template into extended base template.\n is_child = re.search(extend_search, text.splitlines()[0])\n if is_child:\n base_filename = \"templates/\" + is_child.group(2)\n with open(base_filename) as base:\n text = extend_template(base.read(), text)\n # Run conditional checks\n has_conditions = re.search(if_search, text)\n if has_conditions:\n text = render_conditionals(text, context)\n # Replace any variables passed to the render function.\n for replace in context.replaces.keys():\n arg_search = re.compile(\"{{ \" + replace + \" }}\")\n text = re.sub(arg_search, context.replaces[replace], text)\n return text", "def render(self, template, **kw):\n self.write(self._render_str(template, **kw))", "def render_template(self, variables=None):\n if self._template is None:\n return None\n if variables is None:\n variables = self._variables\n if variables is None:\n return None\n rendered = self._template.render(variables)\n lines = rendered.splitlines()\n for line in lines:\n matches = re.search(r\"\\{\\{[^\\{\\}]*\\}\\}\", line)\n if matches is not None:\n return None\n return rendered", "def get(self, *args, **kwargs):\n self.render(\n os.path.join(self.application.template_home, \"nyi.html\"),\n **self.get_template_args()\n )", "def render_main_template(model, request, contenttile='content'):\n ActionContext(model, request, contenttile)\n return render_template_to_response(\n cone.app.cfg.main_template,\n request=request,\n model=model\n )", "def index():\n return render_template('home.jinja2')", "def render_string(self, template: str, **vars) -> str:", "def render_template(self, string, context=None):\n context = context or {}\n context = Context(context)\n return Template(string).render(context)", "def render_template(self, tmpl_name, **kwargs):\n tmpl = self.tplenv.get_template(tmpl_name)\n return tmpl.render(plugin_shortname=self.plugin.get_shortname(), plugin_version=self.plugin.get_version(),\n plugin_info=self.plugin.get_info(), p=self.plugin,\n **kwargs)", "def render(filename, context):\n\ttemplate = parser.Template(open(TEMPLATES_DIR + '/' + filename).read())\n\treturn template.eval(context)", "def render(self, activity, context, typename=None):\n if not isinstance(context, dict):\n raise ContextTypeException('context must be dict. it should not Context or RequestContext')\n template_names = self.get_template_names(activity, typename)\n template = select_template(template_names)\n context = self.prepare_context(activity, context,\n typename=typename)\n return template.render(context)", "def get_template(self):\n template_string = self.remgr.render_template(self)\n return self.provider.format_template(template_string)", "def content():\n try:\n url = request.args.get('url')\n if not url:\n raise Exception('Expected url parameter')\n return render(cached_content(url=url), template='content.jinja2')\n except Exception, e:\n traceback.print_exc()\n return render({'url': request.url, 'error': str(e)},\n template='error.jinja2')", "def render_home():\r\n\treturn render_template(\"index.html\")", "def render_template(template, **kwargs):\n\n template_loader = jinja2.FileSystemLoader(searchpath=\"templates/\")\n template_env = jinja2.Environment(loader=template_loader)\n template = template_env.get_template(template)\n return template.render(**kwargs)", "def post(self, request, *args, **kwargs):\n return render(request, self.template_name, self.get_context_data(**kwargs))", "def main_page():\n return render_template(\"main_page.html\")", "def render(request, *args, **kw):", "def main_page():\n return render_template(\"index.html\")", "def render_template_def(template_name, def_name, **context):\n ctx = stack.top\n template = _lookup(ctx.app).get_template(template_name)\n return _render(template.get_def(def_name), context, ctx.app)", "def templated(template=None):\n def decorator(f):\n @wraps(f)\n def decorated_function(*args, **kwargs):\n # Run the view\n ctx = f(*args, **kwargs)\n # Create a context if needed\n if ctx is None:\n ctx = {}\n # Or return exotic value. A redirect for example\n elif not isinstance(ctx, dict):\n return ctx\n # Compute the template name if needed\n template_name = template\n if template_name is None:\n template_name = request.endpoint.replace('.', '/') + '.html'\n # Render\n return render_template(template_name, **ctx)\n return decorated_function\n return decorator", "def render(self, tmpl_file, context):\n template = Template(tmpl_file.read_text(), keep_trailing_newline=True)\n return template.render(context)", "def get(self):\n path = os.path.join(os.path.dirname(__file__), '../pages/upload_information.html')\n self.response.out.write(template.render(path, {}))", "def render_template(template_path, **kwargs):\n template = JinjaEnv._get().get_template(template_path)\n return template.render(**kwargs)", "def main():\n return render_template(\"main.html\")", "def main():\n return render_template(\"main.html\")", "def render_to_response(template_name, dictionary=None, context_instance=None, namespace='main', **kwargs):\r\n\r\n # see if there is an override template defined in the microsite\r\n template_name = microsite.get_template_path(template_name)\r\n\r\n dictionary = dictionary or {}\r\n return HttpResponse(render_to_string(template_name, dictionary, context_instance, namespace), **kwargs)", "def render(self, tmpl_name, request_env):\n return super().render(tmpl_name, request_env)", "def index():\n return render_template(\"main.html\")", "def get(self):\n return render_template(\"index.html\")", "def index():\n return render_template('main.html')", "def index():\n return render_template('main.html')", "def index_page():\n\n return render_template(\"index.html\")", "def index_page():\n\n return render_template(\"index.html\")", "def index_page():\n\n return render_template(\"index.html\")" ]
[ "0.73298955", "0.72438174", "0.71931773", "0.7177177", "0.7177177", "0.7177177", "0.7177177", "0.7177177", "0.7039566", "0.70391536", "0.6924555", "0.69022024", "0.68838", "0.68724924", "0.6871491", "0.6865003", "0.6862738", "0.68552256", "0.6819078", "0.67653996", "0.6763914", "0.673967", "0.67374283", "0.6730136", "0.6694194", "0.6693522", "0.6681765", "0.66746867", "0.6672671", "0.66670316", "0.6665735", "0.6661113", "0.66335094", "0.6631682", "0.6631682", "0.6607002", "0.6565104", "0.65577096", "0.6555554", "0.6554506", "0.6546492", "0.6546492", "0.65421903", "0.6531114", "0.6531114", "0.6531114", "0.65296316", "0.65287733", "0.65210927", "0.65084267", "0.64662117", "0.6455585", "0.643284", "0.63963026", "0.638537", "0.6379421", "0.637844", "0.63661724", "0.6356295", "0.6354818", "0.6348146", "0.63433254", "0.6341684", "0.6337262", "0.633633", "0.6332319", "0.6329912", "0.63236076", "0.63043267", "0.6298505", "0.62981987", "0.6294329", "0.6283713", "0.62479186", "0.6246716", "0.62436396", "0.62430894", "0.62428886", "0.62160045", "0.62109995", "0.6204249", "0.6196464", "0.6185687", "0.6183971", "0.6183233", "0.6179472", "0.6160727", "0.6157787", "0.6154836", "0.6150205", "0.614835", "0.614835", "0.61433125", "0.61400086", "0.613694", "0.6135676", "0.61319196", "0.61319196", "0.61258334", "0.61258334", "0.61258334" ]
0.0
-1
When rendering or working with multiple template files, we load extension files related to those templates, which alters the environment, and we add each template's parent directory to the template loader search path, which also alters the environment. That means processing one template with a Yasha instance alters the behaviour of the Yasha instance for all future templates processed. To avoid this, we creste an isolated jinja environment for each template from the Yasha instance's base environment.
def _make_isolated_env_for_template(self, template: Union[Path, str]) -> Environment: if isinstance(template, str): # string tempaltes have no associated files, and therefore don't alter the environment. They can use the base environment directly return self.env # Deplicate the base env, but replace references to dictionaries in the base env with copies of those dictionaries env: Environment = self.env.overlay() # globals can be a nested data structure, so it must be deep copied env.globals = deepcopy(env.globals) # filters and tests can be shallow-copied env.filters = env.filters.copy() env.tests = env.tests.copy() # create a new filesystem loader searchpath = env.loader.searchpath.copy() # type: ignore env.loader = FileSystemLoader(searchpath=searchpath) return env
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def init_templates( path=\"boilerplate\" ):\n global template_env\n template_loader = jinja2.FileSystemLoader(searchpath=\"boilerplate\" )\n template_env = jinja2.Environment(\n loader=template_loader,\n lstrip_blocks=True\n )", "def _buildjinja2_templates(self):\n templates = self.embryo.templates\n\n # if templates is a module extract its public string attributes\n # into the templates dict expected below.\n if isinstance(templates, ModuleType):\n tmp_templates = {}\n for k in dir(templates):\n v = getattr(templates, k)\n if (not k.startswith('_')) and isinstance(v, (str, Template)):\n tmp_templates[k] = v\n templates = tmp_templates\n\n # load the jinja2 templates contained in the module, either in the form\n # of Template objects or strings.\n loaded_templates = {}\n jinja_env = build_env()\n\n if templates:\n for k, v in templates.items():\n say('loading template: {}'.format(k))\n if isinstance(v, Template):\n loaded_templates[k] = v\n elif isinstance(v, str):\n try:\n loaded_templates[k] = jinja_env.from_string(v)\n except Exception as exc:\n source = exc.source.split('\\n')[exc.lineno - 1]\n shout(f'error \"{exc.message}\", line {exc.lineno} {source}')\n\n self.jinja2_templates = loaded_templates", "def __init__(self, \n root_dir: Path = Path('.'),\n variable_files: List[Union[Path,str]] = list(), \n inline_variables = dict(),\n yasha_extensions_files: List[Union[Path,str]] = list(), \n template_lookup_paths: List[Union[Path,str]] = list(), \n mode: Union[Literal['pedantic'], Literal['debug'], None] = None,\n encoding: str = ENCODING, \n **jinja_configs):\n self.root = root_dir\n self.parsers = PARSERS.copy()\n self.template_lookup_paths = [Path(p) for p in template_lookup_paths]\n self.yasha_extensions_files = [Path(p) for p in yasha_extensions_files]\n self.variable_files = [Path(f) for f in variable_files]\n self.encoding = encoding\n self.env = Environment()\n if mode == 'pedantic': self.env.undefined = StrictUndefined\n if mode == 'debug': self.env.undefined = DebugUndefined\n self.env.filters.update(FILTERS)\n self.env.tests.update(TESTS)\n for jinja_extension in CLASSES:\n self.env.add_extension(jinja_extension)\n if jinja_configs:\n for config, value in jinja_configs.items():\n setattr(self.env, config, value)\n for ext in self.yasha_extensions_files:\n self._load_extensions_file(ext)\n self.env.loader = FileSystemLoader(self.template_lookup_paths)\n self._load_data_files(self.variable_files) # data from the data files becomes the baseline for jinja global vars\n self.env.globals.update(inline_variables) # data from inline variables / directly-specified global variables overrides data from the data files", "def jinja_environment(self):\n return jinja2.jinja2.Environment(\n loader=jinja2.jinja2.FileSystemLoader(os.path.dirname(__file__) + '/../templates'),\n extensions=['jinja2.ext.autoescape'])", "def render_templates(self):\n\n # dockerfile\n try:\n t = self.templates.get_template(\n 'docker/dockerfiles/{}.dockerfile.template'.format(self.repo)\n )\n except TemplateNotFound:\n t = self.templates.get_template(\n 'docker/dockerfiles/default.dockerfile.template'\n )\n\n self.files.append({\n 'name': 'Dockerfile',\n 'content': t.render(commit=self.commit),\n })\n\n # gunicorn\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.conf.py'\n )\n self.files.append({\n 'name': 'gunicorn.conf.py',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.sh'\n )\n self.files.append({\n 'name': 'gunicorn.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # nginx\n t = self.templates.get_template(\n 'docker/nginx/app.nginx.conf'\n )\n self.files.append({\n 'name': 'app.nginx.conf',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/nginx/nginx.sh'\n )\n self.files.append({\n 'name': 'nginx.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # cron/, etc/ iif there exists a `self.repo` directory\n def _filter(p):\n return (\"cron/\" in p or \"etc/\" in p) and (self.repo in p) and \\\n (not os.path.basename(p).startswith('.'))\n\n for t in self.templates.list_templates(\n filter_func=_filter):\n\n self.files.append({\n 'name': os.path.basename(t),\n 'content': self.templates.get_template(t).render(),\n })", "def _setup_jinja(manager: ConfigServiceManager, app: web.Application):\n\n tpl_path = manager.env.get('TEMPLATE_PATH')\n if not tpl_path:\n tpl_path = os.path.join(manager.config_root, 'templates')\n # load default templates provided by package\n loader = PackageLoader('vonx', 'templates')\n if tpl_path:\n # load custom templates if present\n # may want to use a resource loader if tpl_path looks like a package name (has a colon)\n loader = ChoiceLoader([\n loader,\n FileSystemLoader(tpl_path)\n ])\n filters = {\"jsonify\": json.dumps}\n aiohttp_jinja2.setup(app, loader=loader, filters=filters)", "def test_filesystem_loader(self):\n\n self.assertEqual(\n list(\n template_finder.templates_for_engine({\n 'BACKEND': 'django.templates.backends.django.Djangotemplate.',\n 'APP_DIRS': False,\n 'DIRS': ['/tmp/project/templates/', '/tmp/project/other_templates/']\n })\n ),\n [\n ('base.html', '/tmp/project/templates/base.html'),\n ('foo/bar.html', '/tmp/project/templates/foo/bar.html'),\n ('baz.html', '/tmp/project/other_templates/baz.html'),\n ]\n )", "def get_jinja_filename_environment(templates) -> jinja2.Environment:\n loader = jinja2.DictLoader(\n {template.name: template.name for template in templates}\n )\n return jinja2.Environment(\n loader=loader, trim_blocks=True, lstrip_blocks=True\n )", "def set_jinja_before_request():\n resource_provider.set_jinja_globals()", "def __init__(self, template_dirs=[], include_comments=False,\n include_introspection_server=False):\n # Flag to enable rendering of header and footer comments in templates\n self._include_comments = include_comments\n\n # Flag to enable inclusion of introspection server (for use with\n # smach_viewer)\n self._include_introspection_server = include_introspection_server\n\n # Create list of any custom user-defined template dirs + default\n # template dir\n self._template_dirs = (\n template_dirs +\n [os.path.join(os.path.dirname(os.path.realpath(__file__)),\n 'templates')])\n\n # Create template loader for the template directories\n template_loaders = [\n jinja2.FileSystemLoader(template_dir)\n for template_dir in self._template_dirs]\n self._template_loader = jinja2.ChoiceLoader(template_loaders)\n\n # Create an environment for reading and parsing templates, including\n # the SkipBlockExtension class to allow for skipping certain blocks.\n self._template_env = (\n jinja2.Environment(loader=self._template_loader,\n extensions=[jinja2.ext.do,\n SkipBlockExtension],\n trim_blocks=False,\n lstrip_blocks=True))\n\n # Skip comment blocks as required\n if not self._include_comments:\n self._template_env.skip_blocks.append('upper_comments')\n self._template_env.skip_blocks.append('lower_comments')\n\n # Skip introspection server blocks as required\n if not self._include_introspection_server:\n self._template_env.skip_blocks.append('introspection_server')\n self._template_env.skip_blocks.append('spin')\n\n # Register custom tests with the environment\n self._template_env.tests['expression'] = expression\n self._template_env.tests['not_string'] = not_string\n\n # Register custom filters with the environment\n self._template_env.filters['exptostr'] = exptostr\n\n # Create a template references cache dictionary\n # to be indexed by template names.\n self._template_ref_names_cache = {}\n\n # Create a template block names cache dictionary\n # to be indexed by template names.\n self._template_block_names_cache = {}\n\n # Create a template block cache dictionary\n # to be indexed by tuples of the form (template_name, block_name)\n self._template_block_cache = {}\n\n pass", "def __fill_all_templates__(self,configs):\n template_dir = configs['system'].get('Common_directories','template')\n sample_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','sample'))\n system_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','system'))\n qsub_template = os.path.join(template_dir,configs['pipeline'].get('Template_files','bcbio'))\n self.__fill_template__(sample_template,self.sample_file)\n self.__fill_template__(system_template,self.systems_file)\n self.__fill_template__(qsub_template,self.qsub_file)", "def get_jinja_environment(\n templates: List[pathlib.Path],\n trim_blocks: bool = True,\n lstrip_blocks: bool = True,\n **env_kwargs,\n) -> jinja2.Environment:\n template_dirs = set(str(item.parent) for item in templates)\n loader = jinja2.ChoiceLoader(\n [jinja2.FileSystemLoader(str(path)) for path in template_dirs]\n )\n\n env = jinja2.Environment(\n loader=loader,\n trim_blocks=trim_blocks,\n lstrip_blocks=lstrip_blocks,\n **env_kwargs,\n )\n env.filters.update(get_jinja_filters())\n return env", "def load_templates(self):\n TemplateHandler.templates = []\n for template in os.listdir(TemplateHandler.templates_path):\n template_config = self.load_template_conf(template)\n if template_config is None:\n continue\n TemplateHandler.templates.append(template_config)", "def create_jinja_environment(template_path: str) -> Environment:\n\n environment = Environment(\n loader=FileSystemLoader(template_path), autoescape=select_autoescape()\n )\n environment.globals[\"env\"] = env\n\n return environment", "def __get_env_for_template_libraries__(*libraries):\n child_loaders = [\n jinja2.PackageLoader('raco.language', l) for l in libraries]\n loaders = child_loaders + \\\n [jinja2.PackageLoader('raco.language', 'cbase_templates')]\n\n # StrictUndefined makes uses of the result of render() fail when\n # a template variable is undefined, which is most useful for debugging\n return jinja2.Environment(\n undefined=jinja2.StrictUndefined,\n loader=jinja2.ChoiceLoader(loaders))", "def template_loader(self):\n return None", "def setup_templates(self):\n self.libs[\"template\"] = (\"#libs/templates/include\", None, \"\")\n self[\"CPPPATH\"].append(\"#libs/templates/include\")", "def test_nothing_to_do(self):\n self.assertEqual(\n list(\n template_finder.templates_for_engine({\n 'BACKEND': 'django.templates.backends.jinja2.Jinja2',\n 'APP_DIRS': False,\n 'DIRS': []\n })\n ),\n []\n )", "def create_base_templates(outdir, templateEnv):\n for file in ME_TEMPLATES:\n filename = os.path.join(outdir, ME_FILENAME.format(file))\n template = templateEnv.get_template(file + '.go.jinja')\n\n with open(filename, 'w') as f:\n output = template.render(copyright=COPYRIGHT,\n generator_warning=GENERATOR_WARNING,\n package_name=PACKAGE_NAME)\n f.write(output)\n pass", "def create_environment(base):\n # Build a template loader based on SEARCH_PATHS\n resolver = AssetResolver()\n searchpath = [resolver.resolve(path).abspath() for path in SEARCH_PATHS]\n loader = pyramid_jinja2.SmartAssetSpecLoader(searchpath)\n\n # Make an overlay environment from the main Jinja2 environment. See:\n #\n # http://jinja.pocoo.org/docs/dev/api/#jinja2.Environment.overlay\n return base.overlay(autoescape=True, loader=loader)", "def angular_template_context(name):\n\n jinja_env_ext = Environment(loader=PackageLoader(__package__, 'templates'))\n jinja_env_hypothesis = h.client.jinja_env\n\n # first look if there is a local copy in annotran that we should use\n angular_template_path = 'client/{}.html'.format(name)\n base_directory = os.path.dirname(os.path.realpath(__file__))\n\n if os.path.isfile('{0}/templates/{1}'.format(base_directory, angular_template_path)):\n content, _, _ = jinja_env_ext.loader.get_source(jinja_env_ext, angular_template_path)\n else:\n content, _, _ = jinja_env_hypothesis.loader.get_source(jinja_env_hypothesis, angular_template_path)\n\n return {'name': '{}.html'.format(name), 'content': content}", "def render_template(\n template_name: str,\n context: dict[str, Any],\n extension: str,\n autoescape: bool = True,\n keep_trailing_newline: bool = False,\n) -> str:\n import jinja2\n\n template_loader = jinja2.FileSystemLoader(searchpath=MY_DIR_PATH)\n template_env = jinja2.Environment(\n loader=template_loader,\n undefined=jinja2.StrictUndefined,\n autoescape=autoescape,\n keep_trailing_newline=keep_trailing_newline,\n )\n template = template_env.get_template(f\"{template_name}_TEMPLATE{extension}.jinja2\")\n content: str = template.render(context)\n return content", "def run():\r\n template_locations = settings.MAKO_TEMPLATES\r\n for namespace, directories in template_locations.items():\r\n clear_lookups(namespace)\r\n for directory in directories:\r\n add_lookup(namespace, directory)", "def load_jinja_template(template_name: str) -> Template:\n loader = PackageLoader(\"elyra\", \"templates/components\")\n template_env = Environment(loader=loader)\n template_env.policies[\"json.dumps_kwargs\"] = {\"sort_keys\": False} # prevent automatic key sort on 'tojson'\n\n return template_env.get_template(template_name)", "def jinja():\n template_path = '/tmp/pycheat-jinja-template.html'\n output_path = '/tmp/pycheat-jinja-output.html'\n\n # create the testing template\n with open(template_path, 'w') as f:\n f.write(\"\"\"Testing template with {{athlet_type}}:\n{% for a in athlets %}\n{{a.name}} is from {{a['country']}}\n{% endfor %}\"\"\")\n\n # testing dict with variables\n context = {\n 'athlet_type': 'tennis players',\n 'athlets': [\n {'name': 'Roger Federer', 'country': 'SUI'},\n {'name': 'Rafael Nadal', 'country': 'ESP'},\n {'name': 'Novak Djokovic', 'country': 'SRB'}\n ]\n }\n\n import jinja2\n import os\n # render the template\n template_dir, template_filename = os.path.split(template_path)\n loader = jinja2.FileSystemLoader(template_dir)\n\n # whitespace control:\n # http://jinja.pocoo.org/docs/2.9/templates/#whitespace-control\n jinja_env = jinja2.Environment(loader=loader, trim_blocks=True,\n lstrip_blocks=True)\n template = jinja_env.get_template(template_filename)\n rendered_output = template.render(context)\n # print and write the result to the file\n print rendered_output\n with open(output_path, 'w') as f:\n f.write(rendered_output.encode('utf-8'))", "def _bootstrap():\r\n import os\r\n import sys\r\n \r\n pwd = os.path.dirname(__file__)\r\n \r\n (parent_directory, project_name) = os.path.split(pwd)\r\n \r\n # protect template itself from being bootstrapped\r\n if project_name == 'django_project_template':\r\n abort('bootstrap should not be run on project template!')\r\n\r\n env.project_name = project_name\r\n env.project_domain = env.project_name.split('.')[0].replace('_','-')\r\n \r\n def replace_in_files(path, find, replace):\r\n \r\n import fileinput\r\n \r\n if os.path.isfile(path):\r\n for line in fileinput.input(path, inplace=1):\r\n if find in line:\r\n line = line.replace(find, replace)\r\n sys.stdout.write(line)\r\n \r\n if os.path.isdir(path):\r\n # do not replace in virtual env\r\n if os.path.split(path)[1] == env.virtualenv_dir:\r\n return\r\n for f in os.listdir(path):\r\n replace_in_files(os.path.join(path, f), find, replace)\r\n\r\n # 'escape' placeholders here to protect them from being replaced\r\n replace_in_files(pwd, '@PROJECT_NAME' + '@', env.project_name)\r\n replace_in_files(pwd, '@PROJECT_DOMAIN' + '@', env.project_domain)", "def _prepare_assets(self, page_instructions, assets=None):\n assert type(assets) == tuple or type(assets) == list\n\n for yaml in page_instructions.yaml:\n # yaml = app/page/page.yaml\n template, origin = loader.find_template(yaml)\n filepath = template.origin.name\n\n # /Users/me/Development/app/templates/app/page/page.yaml\n yaml_basedir = os.path.dirname(yaml)\n # app/page\n template_basedir = filepath[:filepath.find(yaml)]\n # /Users/me/Development/app/templates\n\n for asset in assets:\n # directory = /media/js/templates\n if not yaml_basedir in asset:\n # The user might be specifying the directory relative to\n # the yaml file itself, so we'll add it for them if they\n # gave us something like 'media/js/templates'\n directory = os.path.join(yaml_basedir, asset)\n else:\n directory = asset\n\n sourcedirectory = os.path.join(template_basedir, directory)\n\n if not os.path.isdir(sourcedirectory):\n # We're going to try and find it somewhere else, it may not\n # be relative to the YAML file\n #\n # This is quite possible if the yaml file is processing a\n # \"chirp:\" attribute.\n try:\n sourcedirectory = find_directory_from_loader(\n page_instructions, asset)\n # We need to reset this, it has the yaml_basedir on it\n # at this point\n directory = asset\n except TemplateDoesNotExist:\n continue\n\n if not os.path.isdir(sourcedirectory):\n continue\n\n cachedirectory = os.path.join(self.cache_root, directory)\n\n if os.path.isdir(cachedirectory):\n if self._assets_are_stale(sourcedirectory, cachedirectory):\n shutil.rmtree(cachedirectory)\n else:\n continue\n\n shutil.copytree(sourcedirectory, cachedirectory)\n\n if settings.FILE_UPLOAD_PERMISSIONS is not None:\n os.chmod(cachedirectory, 02750)\n\n for root, dirs, files in os.walk(cachedirectory):\n for momo in files:\n os.chmod(os.path.join(root, momo),\n settings.FILE_UPLOAD_PERMISSIONS)\n for momo in dirs:\n os.chmod(os.path.join(root, momo), 02750)", "def load_template_files(self):\n templates = dict()\n template_path = settings.CUSTOM_VERTO_TEMPLATES\n templates.update(self.read_template_files(template_path))\n if hasattr(self, \"extra_converter_templates_directory\"):\n directory = self.extra_converter_templates_directory\n template_path = os.path.join(template_path, directory)\n templates.update(self.read_template_files(template_path))\n return templates", "def disable_templates():\n with patch(loader, template_source_loaders=None):\n with patch(settings, TEMPLATE_LOADERS=[]):\n yield", "def configure_jinja(theme, src):\n\n # Do not enable autoescape since we actually *do not* want it. Otherwise, we\n # wouldn't be able to integrate html content in the templates properly.\n\n theme_path = os.path.join(src, paths.THEMES_PATH, theme)\n logging.debug(\"Setting up environment at \" + theme_path)\n\n return Environment(loader=FileSystemLoader(theme_path), autoescape=False)", "def _set_templates(spm_dir=SPM_DIR):\n global EPI_TEMPLATE, T1_TEMPLATE, GM_TEMPLATE, WM_TEMPLATE, CSF_TEMPLATE\n\n spm_version = _get_version_spm(SPM_DIR)\n\n # Set the tpm and template paths according to SPM version\n if spm_version == 'spm12':\n template_path = 'toolbox/OldNorm'\n tpm_path = 'toolbox/OldSeg'\n else:\n template_path = 'templates'\n tpm_path = 'tpm'\n\n # configure template images\n EPI_TEMPLATE = os.path.join(SPM_DIR, template_path, 'EPI.nii')\n SPM_T1_TEMPLATE = os.path.join(SPM_DIR, template_path, 'T1.nii')\n T1_TEMPLATE = \"/usr/share/data/fsl-mni152-templates/avg152T1.nii\"\n if not os.path.isfile(T1_TEMPLATE):\n T1_TEMPLATE += '.gz'\n if not os.path.exists(T1_TEMPLATE):\n T1_TEMPLATE = SPM_T1_TEMPLATE\n GM_TEMPLATE = os.path.join(SPM_DIR, tpm_path, 'grey.nii')\n WM_TEMPLATE = os.path.join(SPM_DIR, tpm_path, 'white.nii')\n CSF_TEMPLATE = os.path.join(SPM_DIR, tpm_path, 'csf.nii')", "def set_jinja_globals():\n\n # The resource keys will not be present.\n ufo.app.logger.info('Start setting resources into jinja globals.\\n'\n 'Current jinja globals: %s' %\n ufo.app.jinja_env.globals.keys())\n\n ufo.app.jinja_env.globals['resources'] = json.dumps(_get_resources())\n\n # The resource keys should be present if set.\n ufo.app.logger.info('Finished setting resources into jinja globals.\\n'\n 'Current jinja globals: %s' %\n ufo.app.jinja_env.globals.keys())", "def generate_loader_vanilla():\n return template_loader_vanilla", "def test_app_loader(self):\n\n with mock.patch('template_tree.template_finder.apps', new=self.mock_apps):\n self.assertEqual(\n list(template_finder.templates_for_engine(self.engine_config)),\n [\n ('abc.html', '/tmp/project/project/templates/abc.html'),\n ('my_app/def.html', '/tmp/project/my_app/templates/my_app/def.html'),\n ('your_app/def.html', '/tmp/project/your_app/templates/your_app/def.html'),\n ]\n )", "def load_jinja_fns(self):\n\n\t\t# Function for jinja, to remove duplicates from flashed messages\n\t\tdef remove_duplicates(msgs):\n\t\t\tuniq_msgs = []\n\t\t\tfor msg in msgs:\n\t\t\t\tif msg not in uniq_msgs:\n\t\t\t\t\tuniq_msgs.append(msg)\n\n\t\t\treturn uniq_msgs\n\n\t\tcurrent_app.jinja_env.globals.update(remove_duplicates=remove_duplicates)", "def __init__(self, template_path, jinja_args=None):\n\n # Short description for the template class.\n self.desc = 'Generic'\n\n self.template_filename = os.path.basename(template_path)\n\n m = re.match(r'([^.]*).*\\.([^.]*)$', self.template_filename)\n if not m:\n raise Exception(\n 'Error matching template filename: %s' % self.template_filename)\n\n self.schema_object_type = m.group(1).lower()\n self.extension = m.group(2)\n\n default_jinja_args = {\n 'loader': jinja2.FileSystemLoader(os.path.dirname(template_path)),\n 'undefined': jinja2.StrictUndefined,\n 'trim_blocks': True,\n 'lstrip_blocks': True,\n 'extensions': ['jinja2.ext.do', 'jinja2.ext.loopcontrols'],\n 'line_statement_prefix': '%%',\n 'line_comment_prefix': '##'\n }\n\n if jinja_args:\n default_jinja_args.update(jinja_args)\n\n self.jinja_env = jinja2.Environment(**default_jinja_args)\n\n self.jinja_env.globals.update({\n 'template_filename': self.template_filename,\n 'error': error,\n 'full_id': full_id,\n 'quote': json.dumps,\n 'is_object': is_object,\n 'is_field': is_field,\n 'is_map': is_map,\n 'has_visibility': has_visibility,\n 'hasattr': hasattr,\n 'json_schema': json_schema,\n 'data_as_json': data_as_json,\n 'camelize': inflection.camelize,\n 'dasherize': inflection.dasherize,\n 'humanize': inflection.humanize,\n 'underscore': inflection.underscore,\n 'regex_replace': regex_replace,\n 'print': print,\n 'map_key': map_key,\n 'map_value': map_value,\n 'get_enum_dependencies': get_enum_dependencies,\n 'get_struct_dependencies': get_struct_dependencies,\n 'get_dependencies': get_dependencies,\n 'get_direct_dependencies': get_direct_dependencies,\n 'get_nested_enums': get_nested_enums,\n 'get_nested_structs': get_nested_structs,\n 'get_all_files': get_all_files,\n 'get_all_structs': get_all_structs,\n 'get_all_enums': get_all_enums,\n 'get_all_typespaces': get_all_typespaces,\n 'get_all_traits': get_all_traits,\n 'get_all_commands': get_all_commands,\n 'get_all_command_responses': get_all_command_responses,\n 'get_all_events': get_all_events,\n 'get_all_interfaces': get_all_interfaces,\n 'get_all_resources': get_all_resources,\n 'type_url_prefix': TYPE_URL_PREFIX,\n 'get_object_type': get_object_type,\n 'get_object_type_url': get_object_type_url,\n 'get_idl_type': idl_type,\n })\n\n self.jinja_env.tests.update({\n 'array': is_array,\n 'command': is_command,\n 'command_response': is_command_response,\n 'common': is_common,\n 'duration': is_duration,\n 'event': is_event,\n 'field': is_field,\n 'nullable': is_nullable,\n 'map': is_map,\n 'object': is_object,\n 'standard': is_standard,\n 'protobuf': is_protobuf,\n 'wdl': is_wdl,\n 'resource_id': is_resource_id,\n 'resource_name': is_resource_name,\n 'timestamp': is_timestamp,\n 'writable': is_writable,\n 'false': lambda x: not x,\n 'struct': is_struct,\n 'oneof': is_oneof,\n 'enum': is_enum,\n 'trait': is_trait,\n 'typespace': is_typespace,\n 'vendor': is_vendor,\n })\n\n self.jinja_env.filters.update({\n 'all': all,\n 'any': any,\n 'camelize': inflection.camelize,\n 'chain': itertools.chain,\n 'dasherize': inflection.dasherize,\n 'humanize': inflection.humanize,\n 'max': max,\n 'min': min,\n 'underscore': inflection.underscore,\n 'unique': unique,\n })", "def __init__(self, config):\n\n with open(config) as file:\n self.config = yaml.load(file, Loader=yaml.FullLoader)\n\n self.contents = []\n\n self.templateLoader = jinja2.FileSystemLoader(searchpath=\"./\")\n self.templateEnv = jinja2.Environment(loader=self.templateLoader)", "def _load_templates(cls):\n if cls._raw_templates is None:\n cls._raw_templates = fetch_rrlyrae_templates()", "def __init__(self, template_env):\r\n self._template_env = template_env\r\n self._pythons = []", "def main(temp_dir, extensions, template):\n env = load_env(template_dir=temp_dir)\n if not template:\n # Get all the templates and return a dict with enumerated \n # templates names\n ext = extensions if extensions else []\n template_dict = get_templates(env, extensions=ext)\n # Echo the content of the template directory by enumerating \n # the templates and a simple list join\n temp_list = list()\n for x in template_dict.items():\n num = str(x[0])\n # Remove whitespace, underscores and capitalize words\n temp_name = x[1].strip().replace(\"_\", \" \").title()\n temp_string = \"{}. {}\".format(num, temp_name)\n temp_list.append(temp_string)\n click.echo(\"\\n\".join(temp_list))\n # Prompt the user to give the number of the template\n temp_num = click.prompt(\n \"Choose a templeta by entering the number of the template.\",\n type=int\n )\n # Get the template from the template dictionary\n template = template_dict.get(temp_num)\n # Get the variables\n temp_vars = get_vars(template, env)\n # Crate a dict with variables and let the user input the variables\n vars_to_render = dict()\n for var in temp_vars:\n user_var = click.prompt(\"{}?\".format(var.capitalize()))\n vars_to_render[var] = user_var\n # Get the template\n temp = env.get_template(template)\n # Render the template\n click.echo(temp.render(vars_to_render))", "def test_theme_template_loading_by_prefix():\n app = create_ctfd()\n with app.test_request_context():\n tpl1 = render_template_string(\"{% extends 'core/page.html' %}\", content=\"test\")\n tpl2 = render_template(\"page.html\", content=\"test\")\n assert tpl1 == tpl2", "def render_env(self):\n return {\n jinja2.Template(k).render({self.name: self}):\n jinja2.Template(v).render({self.name: self})\n for k, v in self.env.items()\n } if self.env else self.env", "def index(path):\n return render_template(\"main.jinja2.html\")", "def return_template_output(base_dir,filename,data_dict):\n templateLoader = jinja2.FileSystemLoader( searchpath=base_dir)\n templateEnv = jinja2.Environment( loader=templateLoader )\n template = templateEnv.get_template(filename)\n output = template.render(data_dict)\n return output", "def render_template(self, \n template: Union[Path, str], \n find_data_files = True, \n find_extension_files = True, \n jinja_env_overrides = dict(), \n output: BinaryIO = None) -> Union[str, BinaryIO]:\n\n if isinstance(template, Path):\n # Automatic file lookup only works if template is a file. \n # If template is a str (like, for example, something piped in to Yasha's STDIN), then don't bother trying to find related files\n\n if find_extension_files:\n # load extension files related to this template, updating the local env and the local parsers dict\n extension_files = find_template_companion_files(template, EXTENSION_FILE_FORMATS, self.root)\n for ext in extension_files:\n self._load_extensions_file(ext)\n\n if find_data_files:\n # load variable files related to this template, merging their variables into the local env's globals object\n data_files = find_template_companion_files(template, self.parsers.keys(), self.root)\n self._load_data_files(data_files)\n \n # Add the template's directory to the template loader's search path\n self.env.loader.searchpath.append(template.parent) # type: ignore\n # Read the template string from the template path\n template_text = template.read_text()\n else:\n template_text = template\n \n for k, v in jinja_env_overrides:\n setattr(self.env, k, v)\n \n if output:\n # Don't return the rendered template, stream it to a file\n compiled_template: TemplateStream = self.env.from_string(template_text).stream()\n compiled_template.enable_buffering(5)\n compiled_template.dump(output, encoding=self.encoding)\n return output\n else:\n return self.env.from_string(template_text).render()", "def __render_templates(files_to_render, dest_location, jinja_env):\n errors = []\n\n from jinja2.exceptions import TemplateNotFound\n\n for template_file in files_to_render:\n filename = os.path.abspath(os.path.join(dest_location, template_file))\n\n print(\"Pillar template_file: {} --> {}\".format(template_file, filename))\n\n if not os.path.isdir(os.path.dirname(filename)):\n os.makedirs(os.path.dirname(filename))\n\n try:\n print(\"Attempting to load template_file: {}\".format(template_file))\n template_rendered = jinja_env.get_template(template_file).render(env=env)\n print(green(\"Pillar template_file rendered: {} --> {}\".format(template_file, filename)))\n\n # Only write the template file if we can actually render it\n with open(os.path.join(dest_location, template_file), 'w') as f:\n f.write(template_rendered)\n\n except TemplateNotFound:\n errors.append(template_file)\n print(red(\"Pillar template_file not found: {} --> {}\".format(template_file, filename)))\n\n if not len(errors):\n print(green(\"Pillar was successfully rendered in: {}\".format(dest_location)))\n else:\n print(red(\"Pillar could not compile the following templates:\"))\n for error in errors:\n print(red(\" - {}\").format(error))\n\n return len(errors) == 0", "def use_templates(self, templates):\n self.htmls = templates", "def _load_settings_to_jinja_env(self) :\n\t\t# Load filters if exists\n\t\tif hasattr(self.settings, 'FILTERS') :\n\t\t\tfor name, cls in utils.load_module(self.settings.FILTERS).__dict__.items() :\n\t\t\t\tself.jinja_env.filters[name] = cls\n\n\n\t\t# Load globals if exists\n\t\tif hasattr(self.settings, 'GLOBALS') :\n\t\t\tfor name, cls in utils.load_module(self.settings.GLOBALS).__dict__.items() :\n\t\t\t\tself.jinja_env.globals[name] = cls", "def load_render(views):\n render = render_jinja(\n views, encoding='utf-8',\n extensions=['jinja2.ext.do', AssetsExtension])\n render._lookup.assets_environment = env\n render._lookup.globals.update(dict(DEV=config.DEV,\n VERSION=get_version()))\n def inner():\n web.ctx.render = render;\n return inner", "def jinja2(self):\n return jinja2.get_jinja2(app=self.app)", "def test_themes_run_in_sandbox():\n app = create_ctfd()\n with app.app_context():\n try:\n app.jinja_env.from_string(\n \"{{ ().__class__.__bases__[0].__subclasses__()[40]('./test_utils.py').read() }}\"\n ).render()\n except SecurityError:\n pass\n except Exception as e:\n raise e\n destroy_ctfd(app)", "def new(root: str = \".\", name: str = \"piccolo_project\"):\n tree = os.walk(TEMPLATE_DIR)\n\n router = get_routing_framework()\n\n template_context = {\n \"router\": router,\n \"router_dependencies\": ROUTER_DEPENDENCIES.get(router) or [router],\n \"server\": get_server(),\n \"project_identifier\": name.replace(\" \", \"_\").lower(),\n }\n\n for directory in tree:\n dir_path, sub_dir_names, file_names = directory # type: ignore\n\n output_dir_path = os.path.join(root, dir_path.split(TEMPLATE_DIR)[-1])\n\n if not os.path.exists(output_dir_path):\n folder_name = output_dir_path.split(\"/\")[-1]\n if folder_name.startswith((\"_\", \".\")):\n continue\n os.mkdir(dir_path)\n\n for sub_dir_name in sub_dir_names:\n if sub_dir_name.startswith(\"_\"):\n continue\n\n sub_dir_path = os.path.join(output_dir_path, sub_dir_name)\n if not os.path.exists(sub_dir_path):\n os.mkdir(sub_dir_path)\n\n for file_name in file_names:\n if file_name.startswith(\"_\") and file_name != \"__init__.py.jinja\":\n continue\n\n extension = file_name.rsplit(\".\")[0]\n if extension in (\"pyc\",):\n continue\n\n if file_name.endswith(\".jinja\"):\n output_file_name = file_name.replace(\".jinja\", \"\")\n template = Environment(\n loader=FileSystemLoader(searchpath=dir_path)\n ).get_template(file_name)\n\n output_contents = template.render(**template_context)\n\n if output_file_name.endswith(\".py\"):\n try:\n output_contents = black.format_str(\n output_contents,\n mode=black.FileMode(line_length=80),\n )\n except Exception as exception:\n print(f\"Problem processing {output_file_name}\")\n raise exception from exception\n\n with open(\n os.path.join(output_dir_path, output_file_name), \"w\"\n ) as f:\n f.write(output_contents)\n else:\n if file_name.endswith(\".jinja_raw\"):\n output_file_name = file_name.replace(\n \".jinja_raw\", \".jinja\"\n )\n else:\n output_file_name = file_name\n\n shutil.copy(\n os.path.join(dir_path, file_name),\n os.path.join(output_dir_path, output_file_name),\n )\n\n print(\n \"Run `pip install -r requirements.txt` and `python main.py` to get \"\n \"started.\"\n )", "def update():\n if Project.use_templates:\n defaults = _project_defaults()\n\n template = Template()\n\n for template_dir in [os.path.abspath(os.path.join(herringlib, 'herringlib', 'templates'))\n for herringlib in HerringFile.herringlib_paths]:\n\n info(\"template directory: %s\" % template_dir)\n # noinspection PyArgumentEqualDefault\n template.generate(template_dir, defaults, overwrite=False)", "def get_templates_dirs(self): \n from pkg_resources import resource_filename\n return [ resource_filename(__name__, 'templates') ]\n # return []", "def load_templates(self):\n\n self.templates = []\n\n if os.path.exists(\"question_templates.txt\"):\n for line in open(\"question_templates.txt\", \"r\"):\n self.templates.append(line.replace(\"\\n\", \"\"))", "def __init__(self, template_name):\n # self.env = Environment(loader=PackageLoader(\n # package, path))\n # self.template = self.env.get_template(template_name)\n with open(template_name, 'r', encoding='UTF-8') as f:\n self.template = Template(f.read())", "def _init_test_project_dir(self, project_dir):\n templates = glob.glob(f'{project_dir}/*.yml.template')\n for template_path in templates:\n # Replace env vars in template\n with open(template_path, 'r', encoding='utf-8') as f_template:\n yaml = f_template.read()\n\n # Detect if every env var configured for the template\n template = os.path.basename(template_path)\n yaml_path = template_path.replace('.template', '')\n env_connectors = self._find_env_conn_by_template_name(template)\n is_configured = self._is_env_connector_configured(env_connectors)\n\n # \"Render\" the template and save to file if env vars configured\n if is_configured:\n template_vars = set(re.findall(r'\\$\\{(.+?)\\}', yaml))\n for var in template_vars:\n yaml = yaml.replace(\n f'${{{var}}}', self._all_env_vars_to_dict().get(var)\n )\n\n # Write the template replaced YAML file\n with open(yaml_path, 'w+', encoding='utf-8') as f_render:\n f_render.write(yaml)\n\n # Delete if exists but not configured\n else:\n try:\n os.remove(yaml_path)\n except OSError:\n pass", "def get_template_directories() -> list[Path]:\n template_directories = []\n for engine in engines.all():\n for template_loader in engine.engine.template_loaders: # type: ignore\n if is_compatible_template_loader(template_loader):\n for template_directory in template_loader.get_dirs():\n if isinstance(template_directory, str):\n template_directory = Path(template_directory)\n template_directories.append(template_directory)\n return template_directories", "def create_template_loader(self, template_path):\n raise NotImplementedError()", "def __init__(self, template_dir=None, package_name=None):\r\n self._template_dir = template_dir\r\n self._package_name = package_name\r\n self._pystache_renderer = pystache.Renderer(search_dirs=template_dir)", "def __call__(self, template_name, **kwargs):\n if not template_name.endswith('.jinja2'):\n template_name += '.jinja2'\n\n template = self._env.get_template(template_name)\n context = self._system.copy()\n context.update(kwargs)\n\n return jinja2.Markup(template.render(context))", "def _get_jinja2_env(self):\n env_config = {'undefined': StrictUndefined,\n 'trim_blocks': True,\n 'autoescape': True,\n 'lstrip_blocks': True}\n\n jinja2_env = Environment(**env_config)\n jinja2_env.loader = FileSystemLoader(TEMPLATE_ROOT)\n jinja2_env.filters['escape_attr'] = jinja2_attr_value_escape_filter\n jinja2_env.filters['escape_text'] = jinja2_text_value_escape_filter\n return jinja2_env", "def __init__(self, static_dir=None, template_dir=None):\n self.handlers_map = {}\n\n if static_dir:\n self.static_dir = static_dir\n else:\n self.static_dir = \"/static/\"\n\n if template_dir:\n self.template_dir = os.path.dirname(os.path.abspath(__file__)) + template_dir\n else:\n self.template_dir = os.path.dirname(os.path.abspath(__file__)) + '/templates'\n\n self.jinja_env = Environment(loader=FileSystemLoader(self.template_dir))", "def __init__(self, language, app_type):\n self._language = language\n self._app_type = app_type\n self.jinja_env = Environment(\n loader=FileSystemLoader(path.join(path.abspath(path.dirname(__file__)), 'templates')),\n autoescape=select_autoescape(['jinja'])\n )", "def get_django_template_dirs():\n template_dirs = []\n if 'django.template.loaders.filesystem.load_template_source' in\\\n settings.TEMPLATE_LOADERS or\\\n 'django.template.loaders.filesystem.Loader' in\\\n settings.TEMPLATE_LOADERS:\n template_dirs.extend(settings.TEMPLATE_DIRS)\n if 'django.template.loaders.app_directories.load_template_source' in\\\n settings.TEMPLATE_LOADERS or\\\n 'django.template.loaders.app_directories.Loader' in\\\n settings.TEMPLATE_LOADERS:\n from django.template.loaders.app_directories import app_template_dirs\n template_dirs.extend(app_template_dirs)\n return template_dirs", "def get_templates(self):\n\n data = self.request_from_server('templates')\n self.templates = data", "def render(self, template_name, **kwargs):\n currentUser = self.current_user\n from_workspace_str = self.get_argument(\"from_workspace\", default=\"0\", strip=False)\n from_workspace = from_workspace_str == \"1\"\n html = self.render_string(template_name, currentUser=currentUser, from_workspace = from_workspace, **kwargs)\n if from_workspace :\n scriptName = self.__class__.__name__\n\n if scriptName.endswith('Handler') :\n scriptName = scriptName[:-7] \n\n path = self.static_url('scripts/' + scriptName + '.js')\n\n js = '<script src=\"' + escape.xhtml_escape(path) + '\" type=\"text/javascript\"></script>'\n html = html + utf8(js)\n self.finish(html)\n return\n\n # Insert the additional JS and CSS added by the modules on the page\n js_embed = []\n js_files = []\n css_embed = []\n css_files = []\n html_heads = []\n html_bodies = []\n for module in getattr(self, \"_active_modules\", {}).values():\n embed_part = module.embedded_javascript()\n if embed_part:\n js_embed.append(utf8(embed_part))\n file_part = module.javascript_files()\n if file_part:\n if isinstance(file_part, (unicode_type, bytes_type)):\n js_files.append(file_part)\n else:\n js_files.extend(file_part)\n embed_part = module.embedded_css()\n if embed_part:\n css_embed.append(utf8(embed_part))\n file_part = module.css_files()\n if file_part:\n if isinstance(file_part, (unicode_type, bytes_type)):\n css_files.append(file_part)\n else:\n css_files.extend(file_part)\n head_part = module.html_head()\n if head_part:\n html_heads.append(utf8(head_part))\n body_part = module.html_body()\n if body_part:\n html_bodies.append(utf8(body_part))\n\n def is_absolute(path):\n return any(path.startswith(x) for x in [\"/\", \"http:\", \"https:\"])\n if js_files:\n # Maintain order of JavaScript files given by modules\n paths = []\n unique_paths = set()\n for path in js_files:\n if not is_absolute(path):\n path = self.static_url(path)\n if path not in unique_paths:\n paths.append(path)\n unique_paths.add(path)\n js = ''.join('<script src=\"' + escape.xhtml_escape(p) +\n '\" type=\"text/javascript\"></script>'\n for p in paths)\n sloc = html.rindex(b'</body>')\n html = html[:sloc] + utf8(js) + b'\\n' + html[sloc:]\n if js_embed:\n js = b'<script type=\"text/javascript\">\\n//<![CDATA[\\n' + \\\n b'\\n'.join(js_embed) + b'\\n//]]>\\n</script>'\n sloc = html.rindex(b'</body>')\n html = html[:sloc] + js + b'\\n' + html[sloc:]\n if css_files:\n paths = []\n unique_paths = set()\n for path in css_files:\n if not is_absolute(path):\n path = self.static_url(path)\n if path not in unique_paths:\n paths.append(path)\n unique_paths.add(path)\n css = ''.join('<link href=\"' + escape.xhtml_escape(p) + '\" '\n 'type=\"text/css\" rel=\"stylesheet\"/>'\n for p in paths)\n hloc = html.index(b'</head>')\n html = html[:hloc] + utf8(css) + b'\\n' + html[hloc:]\n if css_embed:\n css = b'<style type=\"text/css\">\\n' + b'\\n'.join(css_embed) + \\\n b'\\n</style>'\n hloc = html.index(b'</head>')\n html = html[:hloc] + css + b'\\n' + html[hloc:]\n if html_heads:\n hloc = html.index(b'</head>')\n html = html[:hloc] + b''.join(html_heads) + b'\\n' + html[hloc:]\n if html_bodies:\n hloc = html.index(b'</body>')\n html = html[:hloc] + b''.join(html_bodies) + b'\\n' + html[hloc:]\n self.finish(html)", "def build_extra_templates(extra_templates, config, site_navigation=None):\n\n log.debug(\"Building extra_templates pages\")\n\n for extra_template in extra_templates:\n\n input_path = os.path.join(config['docs_dir'], extra_template)\n\n with io.open(input_path, 'r', encoding='utf-8') as template_file:\n template = jinja2.Template(template_file.read())\n\n # Run `pre_template` plugin events.\n template = config['plugins'].run_event(\n 'pre_template', template, template_name=extra_template, config=config\n )\n\n context = get_context(site_navigation, config)\n\n # Run `template_context` plugin events.\n context = config['plugins'].run_event(\n 'template_context', context, template_name=extra_template, config=config\n )\n\n output_content = template.render(context)\n\n # Run `post_template` plugin events.\n output_content = config['plugins'].run_event(\n 'post_template', output_content, template_name=extra_template, config=config\n )\n\n if output_content.strip():\n output_path = os.path.join(config['site_dir'], extra_template)\n utils.write_file(output_content.encode('utf-8'), output_path)\n else:\n log.info(\"Template skipped: '{}'. Generated empty output.\".format(extra_template))", "def render(name, conf):\n\n template_filename = '/etc/pgpool-II/templates/{}'.format(name)\n output_filename = '/etc/pgpool-II/{}'.format(name)\n\n env = jinja2.Environment(loader=jinja2.FileSystemLoader(os.path.dirname(template_filename)),\n extensions=['jinja2.ext.with_'])\n\n try:\n template = env.get_template(os.path.basename(template_filename))\n except exceptions.TemplateNotFound as e:\n logger.info('Error reading template file {}!'.format(template_filename))\n\n logger.debug('Conf list passed to Jinja template file {}.'.format(conf))\n with open(output_filename, \"w\") as f:\n f.write(template.render(conf))\n\n # set permission of rendered files to postgres:postgres\n os.chown(output_filename, UID, GID)", "def copy_templates(root_directory, dist_directory, sdk_directory,\n cpus, families, boards):\n\n def _process(when, contexts):\n for context in contexts:\n for template in configuration.TEMPLATES:\n if template[\"when\"] == when:\n context.update({\n \"root\": root_directory,\n \"sdk\": sdk_directory,\n \"dist\": dist_directory\n })\n\n source = templates.from_string(template[\"source\"], context)\n target = templates.from_string(template[\"target\"], context)\n target = os.path.join(dist_directory, target)\n\n # Perform the action.\n sys.stdout.write(\"Processing '%s'\\n\" % source)\n\n if template[\"type\"] == \"file\":\n templates.from_file(source, target, context)\n elif template[\"type\"] == \"glob\":\n for source_file in glob.glob(source):\n if os.path.isfile(source_file):\n target_file = os.path.join(\n target, os.path.basename(source_file))\n\n templates.from_file(\n source_file, target_file, context)\n else:\n raise Exception(\"Not supported\")\n\n _process(\"per_family\", families)\n _process(\"per_cpu\", cpus)\n _process(\"per_board\", boards)\n _process(\"per_once\", [{\n \"families\": [family[\"family\"] for family in families],\n \"cpus\": [cpu[\"cpu\"] for cpu in cpus],\n \"boards\": [board[\"board\"] for board in boards]\n }])", "def _GetTemplate(self):\n# First read default template.\n tmplt = self._LoadTemplate(c.preproc_template_default)\n tmplt['proc'] = self.topdir\n self.template_type = 'default'\n\n self.templates = []\n if self.template_file is not None:\n tmplt.update(self._LoadTemplate(self.template_file))\n self.template_type = 'command-line'\n self.templates.append(os.path.abspath(self.template_file))\n found_template = True\n else:\n# Find a study specific template file.\n study_template_file = self._FindTemplateFile('%s/..' % self.topdir)\n if study_template_file is not None:\n# Merge study template into default, study template has precedence.\n if self.verbose:\n print \"Using study template at \" + study_template_file\n tmplt.update(self._LoadTemplate(study_template_file))\n self.template_type = 'study-specific'\n self.templates.append(os.path.abspath(study_template_file))\n found_template = True\n else:\n found_template = False\n# Now look for a subject-specific template file.\n subject_template_file = self._FindTemplateFile('%s' % self.topdir)\n if subject_template_file is not None:\n# Merge subject template, subject template has precedence.\n if self.verbose:\n print \"Using subject-specific template at %s\" % \\\n subject_template_file\n tmplt.update(self._LoadTemplate(subject_template_file))\n self.template_type = 'study-specific'\n self.templates.append(os.path.abspath(subject_template_file))\n found_template = True\n\n if not found_template:\n raise RuntimeError('Could not find template file.')\n\n if tmplt.get('subject','same') == 'same':\n# Default subdirectory is same as data directory.\n tmplt['subject'] = self.topdir.split('/')[-1]\n else:\n if not isinstance(tmplt['subject'],str):\n errstr = 'preprocess: Invalid subject number. Be sure to ' + \\\n 'enclose the subject number item with double quotes.'\n raise RuntimeError(errstr)\n\n# Keys that apply to all EPIs.\n self.fsl_flip = tmplt.get('fsl_flip', False)\n if self.fsl_flip:\n self.flip_opts = '-LT'\n else:\n self.flip_opts = ''\n\n# Replace strings with python types.\n for key in tmplt.keys():\n if tmplt[key] == 'None':\n tmplt[key] = None\n elif key == 'True':\n tmplt[key] = True\n elif key == 'False':\n tmplt[key] = False\n return tmplt", "def get_default_template(env):\n return env.from_string(\n \"\"\"\\\n{% if record.standard_information and record.filename_information %}\n0|{{ prefix }}{{ record.path }}|{{ record.inode }}|0|{{ record.standard_information.owner_id }}|0|{{ record.size }}|{{ record.standard_information.accessed|unixtimestampformat }}|{{ record.standard_information.modified|unixtimestampformat }}|{{ record.standard_information.changed|unixtimestampformat }}|{{ record.standard_information.created|unixtimestampformat }}\n{% endif %}\n{% if record.standard_information and record.filename_information %}\n0|{{ prefix }}{{ record.path }} (filename)|{{ record.inode }}|0|{{ record.standard_information.owner_id }}|0|{{ record.size }}|{{ record.filename_information.accessed|unixtimestampformat }}|{{ record.filename_information.modified|unixtimestampformat }}|{{ record.filename_information.changed|unixtimestampformat }}|{{ record.filename_information.created|unixtimestampformat }}\n{% endif %}\n{% for e in record.indx_entries %}\n0|{{ prefix }}{{ record.path }}\\\\{{ e.name }} (INDX)|{{ e.inode }}|0|0|0|{{ e.logical_size }}|{{ e.accessed|unixtimestampformat }}|{{ e.modified|unixtimestampformat }}|{{ e.changed|unixtimestampformat }}|{{ e.created|unixtimestampformat }}\n{% endfor %}\n{% for e in record.slack_indx_entries %}\n0|{{ prefix }}{{ record.path }}\\\\{{ e.name }} (slack-INDX)|{{ e.inode }}|0|0|0|{{ e.logical_size }}|{{ e.accessed|unixtimestampformat }}|{{ e.modified|unixtimestampformat }}|{{ e.changed|unixtimestampformat }}|{{ e.created|unixtimestampformat }}\n{% endfor %}\n\"\"\"\n )", "def test_nested_template_source_generation(self):\n sources = [source for source in self.loader.get_template_sources('component.child.html')]\n self.assertEqual(len(sources), 2)\n self.assertEqual(sources[0], 'MOCK_BASE_DIR/component/child/child.html')\n self.assertEqual(sources[1], 'MOCK_BASE_DIR_2/component/child/child.html')\n\n sources = [source for source in self.loader.get_template_sources('deeply.nested.component.and.child.html')]\n self.assertEqual(len(sources), 2)\n self.assertEqual(sources[0], 'MOCK_BASE_DIR/deeply/nested/component/and/child/child.html')\n self.assertEqual(sources[1], 'MOCK_BASE_DIR_2/deeply/nested/component/and/child/child.html')\n\n sources = [source for source in self.loader.get_template_sources('component.child/another.html')]\n self.assertEqual(len(sources), 2)\n self.assertEqual(sources[0], 'MOCK_BASE_DIR/component/child/another.html')\n self.assertEqual(sources[1], 'MOCK_BASE_DIR_2/component/child/another.html')", "def test_non_nested_template_source_generation(self):\n sources = [source for source in self.loader.get_template_sources('component.html')]\n\n self.assertEqual(len(sources), 2)\n self.assertEqual(sources[0], 'MOCK_BASE_DIR/component/component.html')\n self.assertEqual(sources[1], 'MOCK_BASE_DIR_2/component/component.html')", "def uses_template(template):\n def wrapper(func):\n @functools.wraps(func)\n def wrapped(*args, **kwargs):\n template_path = template\n ctx = func(*args, **kwargs)\n if type(ctx) is dict:\n try:\n return render_template(template_path,\n inators=ctx['inators'])\n except KeyError:\n try:\n return render_template(template_path,\n inator=ctx['inator'])\n except KeyError:\n return render_template(template_path, inators=ctx)\n else:\n return ctx\n return wrapped\n return wrapper", "def readTemplates():\n\n # Compile HTML templates.\n templates = {}\n for tt in [ 'image', 'dirindex', 'allindex', 'trackindex', 'sortindex' ]:\n fn = 'template-%s' % tt + opts.htmlext\n ttext = readTemplate(fn)\n templates[ tt ] = compileTemplate(ttext, fn)\n\n fn = 'template-css.css'\n ttext = readTemplate(fn)\n templates[ 'css' ] = compileTemplate(ttext, fn)\n\n # Compile user-specified rc file.\n rcsfx = 'rc'\n templates[ rcsfx ] = []\n if opts.rc:\n try:\n tfile = open(opts.rc, \"r\")\n orc = tfile.read()\n tfile.close()\n except IOError, e:\n print >> sys.stderr, \"Error: can't open user rc file:\", opts.rc\n sys.exit(1)\n\n o = compileCode('', orc, opts.rc)\n templates[ rcsfx ] += [ o ]\n\n # Compile user-specified code.\n if opts.rccode:\n o = compileCode('', opts.rccode, \"rccode option\")\n templates[ rcsfx ] += [ o ]\n\n # Compile global rc file without HTML tags, just python code.\n code = readTemplate('template-%s' % rcsfx + '.py')\n o = compileCode('', code, tt)\n templates[ rcsfx ] += [ o ]\n\n return templates", "def create_auto_init_templates(user_home_directory):\n jig_user_directory = join(user_home_directory, JIG_DIR_NAME)\n jig_git_user_directory = join(jig_user_directory, 'git')\n\n try:\n map(makedirs, [jig_user_directory, jig_git_user_directory])\n except OSError as ose:\n if ose.errno == 13:\n # Permission denied\n raise JigUserDirectoryError(\n 'Cannot create {0} Jig user directory'.format(\n jig_user_directory\n )\n )\n if ose.errno != 17:\n # Some other kind of OSError\n raise JigUserDirectoryError(unicode(ose))\n\n # Copy the shared Git templates directory to .jig/git/templates\n git_templates_directory = _git_templates()\n\n if not git_templates_directory:\n raise GitTemplatesMissing()\n\n home_templates_directory = join(jig_git_user_directory, 'templates')\n\n if isdir(home_templates_directory):\n raise GitHomeTemplatesExists(home_templates_directory)\n\n copytree(git_templates_directory, home_templates_directory)\n\n pc_filename = realpath(\n join(home_templates_directory, 'hooks', 'pre-commit')\n )\n\n script_kwargs = {'python_executable': sys.executable}\n\n _create_pre_commit(\n pc_filename, AUTO_JIG_INIT_SCRIPT, script_kwargs\n )\n\n return home_templates_directory", "def get_hierarchy_loader(directories):\n template_loaders = OrderedDict()\n for app_name, template_dir in directories:\n # Pull FileSystemLoader from cache if it already exists for this directory,\n # or instanciate it if not\n if template_dir not in file_system_loaders:\n loader = FileSystemLoader(template_dir)\n file_system_loaders[template_dir] = loader\n else:\n loader = file_system_loaders[template_dir]\n template_loaders[app_name] = loader\n return HierarchyLoader(template_loaders)", "def load_dev_templates(settings, project_name):\n #Load json file\n base_path = settings['path.templates']\n template_path = os.path.join(base_path,\n \"{0}.json\".format(project_name)).replace(\"\\\\\", \"/\")\n file = open(template_path).read()\n template = json.loads(file)\n\n return template", "def create_template(api_url, project_id, username, token, update_flag,\n validation_messages, json_files, content_files, scope, csv_flag, input_list):\n try:\n # template loader log folder exists check\n log_path = '/opt/core/cache/tmp/templateloader_logs/'\n if not os.path.exists(log_path):\n os.makedirs(log_path)\n timestamp = datetime.datetime.fromtimestamp(\n time.time()).strftime('%Y%m%d%H%M%S')\n log_filename = 'templateloader_' + timestamp\n my_file = open(log_path + log_filename, \"a\")\n\n # Print and write the log messages\n for message in validation_messages:\n my_file.write(\"%s\\n\" % message)\n\n success_templates = 0\n\n for metadata, content in zip(json_files, content_files):\n # Metadata Read\n json_file = open(metadata, 'r')\n file_name = list(metadata.split(\"/\"))\n file_name = file_name[-1]\n req_body = json.dumps(json_file.read()).encode('utf-8')\n req_body = json.loads(req_body)\n json_file.close()\n\n req_body = json.loads(req_body)\n\n if csv_flag:\n if input_list and req_body.get(\"name\") not in input_list:\n continue\n # Content Read\n if os.path.isfile(content):\n content_datafile = open(content, 'r')\n content_value = json.dumps(content_datafile.read()).encode('utf-8')\n content_value = json.loads(content_value)\n content_datafile.close()\n req_body[\"content_files\"] = dict(content=dict(content=content_value, name=content.split('/')[-1]))\n else:\n req_body[\"content_files\"] = get_content_files(content)\n # Checks for files\n files_directory = os.path.abspath(\n os.path.join(content, os.pardir)) + \"/files\"\n if os.path.exists(files_directory):\n dependencies = list()\n for script_file_path in find_files(files_directory, '*'):\n script_file_name = os.path.basename(script_file_path)\n script_file_obj = open(script_file_path, 'r')\n script_file_value = script_file_obj.read()\n script_file_obj.close()\n dependencies.append({\"content\": script_file_value, \"name\": script_file_name})\n req_body[\"content_files\"][\"files\"] = dependencies\n\n dependencies_directory = os.path.abspath(os.path.join(content, 'modules'))\n if os.path.exists(dependencies_directory):\n dependencies = list()\n for elem in os.listdir(dependencies_directory):\n module_path = os.path.join(dependencies_directory, elem)\n if not os.path.isdir(module_path):\n continue\n dependencies.append({\"type\": \"module\", \"name\": elem,\n \"content_files\": get_content_files(module_path)})\n if dependencies:\n req_body['dependencies'] = dependencies\n if scope != 'default':\n req_body['scope'] = scope\n\n req_body = json.dumps(req_body).encode('utf-8')\n\n url = \"%s%s/%s\" % (api_url, project_id, 'templates')\n http_client = httplib2.Http()\n headers = {\"X-Auth-User\": username, \"X-Auth-Token\": token}\n\n # call the Create Template API\n resp, content = http_client.request(\n url, method=\"POST\", body=req_body, headers=headers)\n content = json.loads(content)\n\n if resp[\"status\"] == \"200\":\n success_templates += 1\n log_msg = \"%s%s%s - %s\" % (file_name[:-5], \" ==> status:\",\n content[\"status\"],\n content[\"message\"])\n sys.stdout.write(\"%s\\n\" % log_msg)\n elif resp[\"status\"] == \"400\" and update_flag:\n template_id = None\n url = \"%s%s/%s\" % (api_url, project_id, 'templates')\n list_resp, list_content = http_client.request(\n url, method=\"GET\", headers=headers)\n list_content = json.loads(list_content)\n if list_resp[\"status\"] == \"200\":\n template_list = list_content['data']['templates']\n for template in template_list:\n if template['name'] == json.loads(req_body)['name']:\n # call the Update Template API\n template_id = template[\"id\"]\n url = \"%s%s/%s/%s\" % (api_url, project_id,\n 'templates', template_id)\n update_resp, update_content = \\\n http_client.request(url, method=\"PUT\",\n body=req_body,\n headers=headers)\n update_content = json.loads(update_content)\n log_msg = \"%s%s%s - %s\" % (\n file_name[:-5], \" ==> status:\",\n update_content[\"status\"],\n update_content[\"message\"])\n sys.stdout.write(\"%s\\n\" % log_msg)\n if update_resp[\"status\"] == \"200\":\n success_templates += 1\n break\n if not template_id:\n temp_url = \"%s%s/%s?is_temp=true\" % (api_url, project_id, 'templates')\n list_temp_resp, list_temp_content = http_client.request(\n temp_url, method=\"GET\", headers=headers)\n list_temp_content = json.loads(list_temp_content)\n if list_temp_resp[\"status\"] == \"200\":\n temp_template_list = list_temp_content['data']['templates']\n for template in temp_template_list:\n if template['name'] == json.loads(req_body)['name']:\n # call the Update Template API\n template_id = template[\"id\"]\n url = \"%s%s/%s/%s\" % (api_url, project_id,\n 'templates', template_id)\n update_resp, update_content = \\\n http_client.request(url, method=\"PUT\",\n body=req_body,\n headers=headers)\n update_content = json.loads(update_content)\n log_msg = \"%s%s%s - %s\" % (\n file_name[:-5], \" ==> status:\",\n update_content[\"status\"],\n update_content[\"message\"])\n sys.stdout.write(\"%s\\n\" % log_msg)\n if update_resp[\"status\"] == \"200\":\n success_templates += 1\n break\n if not template_id:\n log_msg = \"%s%s%s - %s\" % (file_name[:-5], \" ==> status:\",\n content[\"status\"],\n content[\"message\"])\n sys.stderr.write(\"%s\\n\" % log_msg)\n my_file.write(\"%s\\n\" % log_msg)\n else:\n log_msg = \"%s%s%s - %s\" % (file_name[:-5], \" ==> status:\",\n content[\"status\"],\n content[\"message\"])\n sys.stderr.write(\"%s\\n\" % log_msg)\n my_file.write(\"%s\\n\" % log_msg)\n if not csv_flag:\n total_templates = len(json_files)\n failed_templates = total_templates - success_templates\n else:\n total_templates = len(input_list)\n failed_templates = total_templates - success_templates\n sys.stdout.write('Total templates: ' + str(total_templates) + \"\\n\")\n sys.stdout.write(\"Success Templates: \" + str(success_templates) + \"\\n\")\n sys.stderr.write(\"Failed Templates: \" + str(failed_templates) + \"\\n\")\n\n my_file.write('Total templates: ' + str(total_templates) + \"\\n\")\n my_file.write(\"Failed Templates: \" + str(failed_templates) + \"\\n\")\n my_file.close()\n\n except Exception as e:\n sys.stdout.write(e.message)\n exit(1)", "def read_in_templates(path, email_object=None):\n import os\n templates = {}\n\n for fle in os.listdir(path):\n with open(os.path.join(path, fle)) as _f:\n raw = \"\\n\".join(_f.readlines())\n templates[fle] = raw\n\n if email_object:\n email_object.use_templates(templates)\n else:\n return templates", "def read_template_files(self, template_path):\n templates = dict()\n for file in listdir(template_path):\n template_file = re.search(r\"(.*?).html$\", file)\n if template_file:\n template_name = template_file.groups()[0]\n templates[template_name] = open(os.path.join(template_path, file)).read()\n return templates", "def _load_template(self):\n filename = os.path.join(get_conf('DEFAULT_TEMPLATE_PATH'), self._template, '__init__.ini')\n cf = ApplicationConf.get_instance()\n with comp_open(filename, mode='r') as fp:\n content = fp.read()\n content = content.format(**cf)\n conf = CompConfigParser(allow_no_value=True)\n conf.read_string(content, '__init__.ini')\n ini = {'dirs': [], 'files': [], 'binaries': []}\n if conf.has_section('dirs'):\n for key in conf.options('dirs'):\n ini['dirs'].append(key)\n if conf.has_section('files'):\n for key in conf.options('files'):\n ini['files'].append(self.__remap(key))\n if conf.has_section('binaries'):\n for key in conf.options('binaries'):\n ini['binaries'].append(self.__remap(key))\n if isinstance(self._ini, dict):\n self._ini.update(ini)\n else:\n self._ini = ini", "def load_template_if_needed(self):\n\n class GeneratorProxy(object):\n \"\"\"\n An interface to templates and plugins for\n providing restricted access to the methods.\n \"\"\"\n\n def __init__(self, preprocessor=None, postprocessor=None,\n context_for_path=None):\n self.preprocessor = preprocessor\n self.postprocessor = postprocessor\n self.context_for_path = context_for_path\n\n if not self.template:\n logger.info(\"Generating site at [%s]\" % self.site.sitepath)\n self.template = Template.find_template(self.site)\n logger.debug(\"Using [%s] as the template\",\n self.template.__class__.__name__)\n\n logger.info(\"Configuring the template environment\")\n preprocessor = self.events.begin_text_resource\n postprocessor = self.events.text_resource_complete\n proxy = GeneratorProxy(context_for_path=self.context_for_path,\n preprocessor=preprocessor,\n postprocessor=postprocessor)\n self.template.configure(self.site,\n engine=proxy)\n self.events.template_loaded(self.template)", "def get_templates_dirs(self):\n\t\tfrom pkg_resources import resource_filename\n\t\treturn [resource_filename(__name__, 'templates')]", "def _render_template(self,**kwargs):\n\t\tkwargs[\"module_name\"] = self._modulename\n\t\tfolder = path.dirname( stack()[1][1] )\n\t\tenv = Environment(loader=FileSystemLoader(folder))\n\t\ttemplate = env.get_template(\"jitced_template.c\")\n\t\twith open(self.sourcefile, \"w\") as codefile:\n\t\t\tcodefile.write(template.render(kwargs))", "def render_template(path, content, template_vars):\n _, filename = os.path.split(path)\n if filename in DO_NOT_TEMPLATE_FILES or not filename.endswith('.jinja2'):\n return content\n tm = Template(content)\n return tm.render(**template_vars)", "def global_jinja_env():\n return _ENV", "def render_template():\n template_engine = engines['django']\n def func(template_string):\n load_tags_string = '{% load wagtailextensions_tags %}'\n return template_engine.from_string(load_tags_string + template_string).render()\n return func", "def render(template_name, context, template_dir=DEFAULT_TEMPLATES_DIR,\n jinja_env_args=None):\n env_kwargs = jinja_env_args or {}\n templates = jinja2.Environment(\n loader=jinja2.FileSystemLoader(template_dir), **env_kwargs)\n template = templates.get_template(template_name)\n return template.render(context)", "def replace_template_files(root_directory, variables=None, template_files=None, subdirs=None):\n variables = variables or {\n 'branch' : retrieve_current_branch(repository_directory=root_directory, fix_environment=True),\n }\n \n templates = template_files or [\"requirements.txt\", \"setup.py\", \"pavement.py\"]\n \n for template in templates:\n fp = os.path.join(root_directory, template)\n _replace_template(fp, variables)\n \n if subdirs is None:\n subdirs = ['debian']\n\n if subdirs:\n for subdir in subdirs:\n dp = os.path.join(*list(chain([root_directory], subdir.split('/'))))\n if os.path.exists(dp):\n for file in os.listdir(dp):\n fp = os.path.join(root_directory, subdir, file)\n if os.path.isfile(fp):\n _replace_template(fp, variables)", "def jinja2(self):\n return jinja2.get_jinja2(app=self.app)", "def jinja2(self):\n return jinja2.get_jinja2(app=self.app)", "def __call__(self, template, obj=None):\n for engine in self.engines:\n filename = engine.find_template_filename(template)\n if filename:\n if obj:\n self.res.locals.update(obj)\n html = engine.render_source(filename, self.res.locals)\n self.res.send_html(html)\n break\n else:\n raise ValueError(\"Could not find a template with name '%s'\" % template)", "def build(self) -> None:\n def do_process(fname) -> bool:\n for sfx in skip_suffixes:\n if fname.endswith(sfx):\n return False\n return True\n\n for dirpath, _, fnames in os.walk(self.template_dir):\n for fname in fnames:\n if do_process(fname):\n self.process(dirpath, fname)", "def load_all_templates(dataset, template_dir: str) -> Dict[str, NexusTemplate]:\n template_set = {\n template_name\n for template_name in os.listdir(template_dir)\n if not template_name.endswith(\".json\")\n }\n template_set.add(\"linear\")\n\n template_ord = []\n for template_name in TEMPLATE_PREFERRED_ORDER:\n try:\n template_set.remove(template_name)\n except KeyError:\n pass\n else:\n template_ord.append(template_name)\n template_ord.extend(sorted(template_set))\n\n return {\n template_name: load_template(dataset, template_dir, template_name)\n for template_name in template_ord\n }", "def get_templates_dirs(self):\n return [resource_filename(__name__, 'templates')]", "def get_templates_dirs(self):\n return [resource_filename(__name__, 'templates')]", "def template_path(self):\n return super().template_path + [\n os.path.join(\n sys.prefix, \"share\", \"jupyter\", \"nbconvert\", \"templates\", \"flex\"\n )\n ]", "def configure_environment(config): # pragma: no cover\n base = config.get_jinja2_environment()\n config.registry[ENVIRONMENT_KEY] = create_environment(base)" ]
[ "0.71863306", "0.68339425", "0.68019456", "0.64534956", "0.6438727", "0.64271533", "0.63226676", "0.62789506", "0.61880404", "0.61768335", "0.6093075", "0.6058683", "0.60580313", "0.60333896", "0.59724724", "0.59577113", "0.5878471", "0.5849247", "0.5840072", "0.5812187", "0.57804793", "0.57687664", "0.57674855", "0.573975", "0.5695509", "0.5687672", "0.5680515", "0.567818", "0.56724036", "0.5656425", "0.56512064", "0.5643645", "0.5640745", "0.56284887", "0.5624324", "0.5622187", "0.56181455", "0.56156856", "0.5594183", "0.559398", "0.5586657", "0.5584085", "0.5575336", "0.5563394", "0.55630386", "0.5562465", "0.5560251", "0.5526144", "0.55198", "0.55177677", "0.551377", "0.5498932", "0.5463547", "0.54613143", "0.54587305", "0.5455952", "0.54476374", "0.54312044", "0.54229695", "0.53968644", "0.53949726", "0.5347462", "0.53401726", "0.53348464", "0.5334061", "0.5327214", "0.53191453", "0.5313788", "0.5299663", "0.52940434", "0.5277322", "0.5274865", "0.52742493", "0.5272199", "0.52580094", "0.5247437", "0.5240466", "0.522913", "0.52146626", "0.52036107", "0.519858", "0.5196855", "0.5196106", "0.51949185", "0.5194107", "0.519316", "0.5192372", "0.5184932", "0.51791334", "0.5158864", "0.5152634", "0.5138766", "0.5138766", "0.51383644", "0.5125913", "0.5125883", "0.512526", "0.512526", "0.5123928", "0.5104392" ]
0.5354332
61
Produces a list of all files that the rendering of this template depends on, including files referenced within {% include %}, {% import %}, and {% extends %} blocks within the template
def get_makefile_dependencies(self, template: Union[Path, str]) -> List[Path]: if isinstance(template, Path): template = template.read_text() dependencies = self.variable_files + self.yasha_extensions_files referenced_template_partials = find_referenced_templates(self.env.parse(template)) # returns a generator # convert the generator to a list, filtering out the None values referenced_template_partials: List[str] = list(filter(bool, referenced_template_partials)) for relative_path in referenced_template_partials: for basepath in self.env.loader.searchpath: # type: ignore if not isinstance(basepath, Path): basepath = Path(basepath) template_path = basepath / relative_path if template_path.is_file: # we've found the template partial inside this basepath dependencies.append(template_path) return dependencies
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_imports() -> str:\n extension = \"\"\n for js_ in JS_FILES.values():\n extension += f'<script src=\"{js_}\"></script>'\n for css in CSS_FILES.values():\n extension += f'<link rel=\"stylesheet\" href=\"{css}\" is=\"custom-style\">'\n\n return extension", "def list_files(self):\n re_css = re.compile(r'\\.css$')\n re_js = re.compile(r'\\.js$')\n re_adminlte2 = re.compile(r'adminlte2')\n file_list = []\n print \"static path is %s\" % self.static_path\n for dirpath, _, files in os.walk(self.static_path):\n if not re_adminlte2.search(dirpath):\n for name in files:\n if re_css.search(name) or re_js.search(name):\n file_list.append(os.path.join(dirpath, name))\n return file_list", "def included_files(self) -> Iterable[str]:\n return self._incl_files", "def get_included_files(self):\n return self._includedfiles", "def get_dependencies_content():\n import trustedanalytics\n dependencies = []\n for filename in trustedanalytics.udf_dependencies:\n name, content = _get_file_content_as_str(filename)\n dependencies.append({'file_name': name, 'file_content': content})\n return dependencies", "def include_templates(self):\n if self._include_templates is None:\n result = []\n for inc in self._includes:\n result.append(self.manager.get_template(inc))\n self._include_templates = result\n return self._include_templates", "def get_source_files(self):\n files = []\n for pkg in self._po_packages():\n files.append(pkg['template'])\n files.extend((item['po_file'] for item in\n self._po_package_contents(pkg)))\n return files", "def getAllImportFiles():\n\tdef get_path(base):\n\t\tb, t = os.path.split(base)\n\t\tif __name__ == t:\n\t\t\treturn [\"animation_nodes\"]\n\t\telse:\n\t\t\treturn get_path(b) + [t]\n\n\tfor root, dirs, files in os.walk(currentPath):\n\t\tpath = \".\".join(get_path(root))\n\t\tfor f in filter(lambda f:f.endswith(\".py\"), files):\n\t\t\tname = f[:-3]\n\t\t\tif not name == \"__init__\":\n\t\t\t\tyield path + \".\" + name", "def get_messages_from_include_files(app_name=None):\n\tfrom frappe.utils.jinja_globals import bundled_asset\n\n\tmessages = []\n\tapp_include_js = frappe.get_hooks(\"app_include_js\", app_name=app_name) or []\n\tweb_include_js = frappe.get_hooks(\"web_include_js\", app_name=app_name) or []\n\tinclude_js = app_include_js + web_include_js\n\n\tfor js_path in include_js:\n\t\tfile_path = bundled_asset(js_path)\n\t\trelative_path = os.path.join(frappe.local.sites_path, file_path.lstrip(\"/\"))\n\t\tmessages_from_file = get_messages_from_file(relative_path)\n\t\tmessages.extend(messages_from_file)\n\n\treturn messages", "def _get_compile_cache_dep_files():\n if entry_script_path is None:\n logger.warning(\"Can not get the entry script file path.\")\n return []\n compile_cache_dep_files = []\n logger.debug(f\"entry script file path: {entry_script_path}\")\n compile_cache_dep_files.append(entry_script_path)\n __get_compile_cache_dep_files(entry_script_path, compile_cache_dep_files, None)\n return compile_cache_dep_files", "def getExternalFiles(self):\n return []", "def _get_include_files(self):\n for dirpath, dirnames, filenames in os.walk(self.IncludesDirectory):\n for f in filenames:\n rel_name = path.join(dirpath, f)\n if f.endswith('.pyx'):\n yield (rel_name, 'PyRex')\n elif f.endswith('.h'):\n yield (rel_name, 'Header')\n else:\n pass", "def _readFiles(self):\n template_files = []\n for file in os.listdir(self.template_folder):\n if file.endswith(\".xml\"):\n template_files.append(file)\n return template_files", "def _get_files(self):\n # pylint: disable=unused-variable\n for dirpath, __, filenames in os.walk(self.start_location):\n for file_ in filenames:\n if file_.endswith('.py'):\n yield \"{0}{1}\".format(dirpath, file_)", "def determine_dependencies(self):\n raise NotImplemented(\"Need to make a template formatter to find this in the options\")", "def jinja_files(self) -> Pattern:\n return self._parse_pattern(self.get(\"jinja_files\", \"*.htm?|*.css\"))", "def find_reference_files():\n for root, _, files in os.walk(\"./tests/references/\"):\n for basename in fnmatch.filter(files, \"*.tex\"):\n yield os.path.join(root, basename)", "def freeze_includes() -> List[str]:\n import _pytest\n\n result = list(_iter_all_modules(_pytest))\n return result", "def include_dirs(self):", "def get_extra_assets(self):\n asset_list = []\n if self.extra_assets is None:\n return []\n return [ self.complete_static_filename(asset) \\\n for asset in self.extra_assets ]", "def get_included_files(space):\n files = space.ec.interpreter.included_files\n arr_list = []\n for f in files:\n arr_list.append(space.newstr(f))\n return space.new_array_from_list(arr_list)", "def component_dependencies_tag():\n\n rendered_dependencies = []\n for component in get_components_from_registry(registry):\n rendered_dependencies.append(component.render_dependencies())\n\n return mark_safe(\"\\n\".join(rendered_dependencies))", "def jinja_files(self) -> Pattern:\n return self._parse_pattern(self.get(\"jinja_files\", None))", "def get_messages_from_include_files(app_name=None):\n\tmessages = []\n\tfor file in (frappe.get_hooks(\"app_include_js\", app_name=app_name) or []) + (frappe.get_hooks(\"web_include_js\", app_name=app_name) or []):\n\t\tmessages.extend(get_messages_from_file(os.path.join(frappe.local.sites_path, file)))\n\n\tfor app in ([app_name] if app_name else frappe.get_installed_apps()):\n\t\tif os.path.isfile(frappe.get_app_path(app, \"public/build.json\")):\n\t\t\twith open(frappe.get_app_path(app, \"public/build.json\"), 'r') as f:\n\n\t\t\t\tfor f in json.loads(f.read()):\n\t\t\t\t\tif not f.startswith(\"concat:\"):\n\t\t\t\t\t\tmessages.extend(get_messages_from_file(os.path.join(frappe.local.sites_path, \"assets/\" + f)))\n\n\treturn messages", "def include_file(ctx, name):\n env = ctx.environment\n return jinja2.Markup(env.loader.get_source(env, name)[0])", "def get_required_template_names() -> list[str]:\n return [\n \"blog_list_of_posts.html\",\n \"post.html\",\n \"post_body.html\",\n \"episode.html\",\n ]", "def get_available_templates_list():\n page = import_page.ImportPage()\n page.open()\n return page.open_download_template_modal().available_templates_list", "def elastixTemplates():\n\t\ttransformations = []\n\t\tfileNames = os.listdir(AppVars.transformationsPath())\n\t\tfor fileName in fileNames:\n\t\t\tfullFileName = os.path.join(AppVars.transformationsPath(), fileName)\n\t\t\ttransformation = ParameterList()\n\t\t\tif transformation.loadFromFile(fullFileName):\n\t\t\t\ttransformations.append(transformation)\n\t\treturn transformations", "def get_imports(self) -> str:\n\n imports = [\n \"_ = require('lodash');\",\n \"stream = require('stream');\",\n f\"const hookRegister = require('./{settings.ARTILLERY_HOOK_FILE}').hookRegister;\",\n f\"hook = require('./{settings.ARTILLERY_LIB_FOLDER}/hooks').hook;\",\n f\"utils = require('./{settings.ARTILLERY_LIB_FOLDER}/providers');\",\n f\"settings = require('./{settings.ARTILLERY_LIB_FOLDER}/settings');\",\n f\"StatsCollector = require('./{settings.ARTILLERY_LIB_FOLDER}/statsCollector').StatsCollector;\",\n f\"profiles = require('./{settings.ARTILLERY_LIB_FOLDER}/profiles').profiles;\",\n f\"influx = require('./{settings.ARTILLERY_LIB_FOLDER}/influx').client;\",\n ]\n return \"\\n\".join(imports)", "def files(self) -> Generator[Path, None, None]:\n return Path(self.package).resolve(strict=True).glob(self.glob)", "def get_doc_files(extensions=MARKDOWN_EXTENSIONS + STATIC_ASSET_EXTENSIONS):\n file_list = []\n # doc files on toplevel\n for ext in extensions:\n file_list += config[\"topdir\"].glob('*' + ext)\n # doc files in include dirs\n for incdir in config['incdirs']:\n for ext in extensions:\n file_list += config[\"topdir\"].joinpath(incdir).rglob('*' + ext)\n return file_list", "def _get_base_files(self):\n setup_file = path.join(self.PyCogentDirectory, 'setup.py')\n #reqs_file = path.join(self.PyCogentDirectory, 'cogent-requirements.txt')\n #return [(setup_file, 'Python'), (reqs_file, 'Properties')]\n return [(setup_file, 'Python')]", "def get_templates_dirs(self): \n from pkg_resources import resource_filename\n return [ resource_filename(__name__, 'templates') ]\n # return []", "def find_files(self):\n # yield blueprint paths first\n if getattr(self, 'blueprint_name', None):\n for path in walk_directory(os.path.join(self.path, self.blueprint_name), ignore=self.project.EXCLUDES):\n yield 'preview', {'path': path}\n\n # then yield project paths\n for path in walk_directory(self.path, ignore=self.project.EXCLUDES):\n yield 'preview', {'path': path}", "def get_files(self, include=[], exclude=[]):\r\n for (basepath, dpaths, fpaths) in os.walk(self.path, topdown=True):\r\n for subpath in dpaths + fpaths:\r\n path = os.path.join(self.chroot_path(basepath), subpath)\r\n if filter_path(path, include, exclude):\r\n yield path", "def GetIncludedFilesForHeaderString():\n\n # Don't really need to automate this as it'll be the same for all of them. \n include_files_string = (IncludeString(\"\\\"ChasteSerialization.hpp\\\"\") + \n IncludeString(\"<boost/serialization/base_object.hpp>\") + \n IncludeString(\"<boost/serialization/shared_ptr.hpp>\") + \n \"\\n\" + \n IncludeString(\"<cmath>\") + \n IncludeString(\"<iostream>\") + \n IncludeString(\"\\\"AbstractOdeSystem.hpp\\\"\\n\") )\n\n return include_files_string", "def get_all_files(self):\n dp = FileSystemDataProvider.FileSystemDataProvider(self.folder)\n filenames = dp.getFileNames()\n htmlOut = \"available files:\"+\", \".join(filenames)\n return htmlOut", "def component_js_dependencies_tag():\n\n rendered_dependencies = []\n for component in get_components_from_registry(registry):\n rendered_dependencies.append(component.render_js_dependencies())\n\n return mark_safe(\"\\n\".join(rendered_dependencies))", "def files(self):\r\n files = []\r\n for path in self.paths:\r\n if os.path.isdir(path):\r\n files.extend(glob.glob(os.path.join(path, f'*{self.ext}')))\r\n else:\r\n files.extend(glob.glob(path))\r\n return list(set(self.get_pattern(fname) for fname in files))", "def test_includes(self):\n collection = lookup.TemplateLookup()\n\n collection.put_string(\n \"base\",\n \"\"\"\n <%def name=\"a()\">base_a</%def>\n This is the base.\n ${next.body()}\n End base.\n\"\"\",\n )\n\n collection.put_string(\n \"index\",\n \"\"\"\n <%inherit file=\"base\"/>\n this is index.\n a is: ${self.a()}\n <%include file=\"secondary\"/>\n\"\"\",\n )\n\n collection.put_string(\n \"secondary\",\n \"\"\"\n <%inherit file=\"base\"/>\n this is secondary.\n a is: ${self.a()}\n\"\"\",\n )\n\n assert result_lines(collection.get_template(\"index\").render()) == [\n \"This is the base.\",\n \"this is index.\",\n \"a is: base_a\",\n \"This is the base.\",\n \"this is secondary.\",\n \"a is: base_a\",\n \"End base.\",\n \"End base.\",\n ]", "def getRenderDependencies(*args, **kwargs)->AnyStr:\n pass", "def profiler_includes():\n return gae_mini_profiler.templatetags.profiler_includes()", "def getContentFiles():\n contentFiles = []\n for contentDir, subDirs, filenames in os.walk(sourceDir, followlinks=True):\n if shouldIgnore(contentDir):\n subDirs[:] = []\n continue\n for filename in filenames:\n if not shouldIgnore(filename):\n cf = ContentFile(os.path.join(contentDir, filename))\n log(`cf.path`)\n contentFiles.append(cf)\n return contentFiles", "def get_xmodule_urls():\r\n if settings.DEBUG:\r\n paths = [path.replace(\".coffee\", \".js\") for path in\r\n settings.PIPELINE_JS['module-js']['source_filenames']]\r\n else:\r\n paths = [settings.PIPELINE_JS['module-js']['output_filename']]\r\n return [staticfiles_storage.url(path) for path in paths]", "def template_list(self):\n return self.ezx.get_template_list()", "def _get_doc_files(self):\n return [(path.join(self.DocDirectory, 'conf.py'), 'Python')]", "def IncludePaths(self):\n return self._g_incpaths", "def get_includes(self):\r\n def visitor(fobj, lptr, depth, includes):\r\n if depth > 0:\r\n loc = lptr.contents\r\n includes.append(FileInclusion(loc.file, File(fobj), loc, depth))\r\n\r\n # Automatically adapt CIndex/ctype pointers to python objects\r\n includes = []\r\n conf.lib.clang_getInclusions(self,\r\n callbacks['translation_unit_includes'](visitor), includes)\r\n\r\n return iter(includes)", "def includes(self) -> Set:\n if self._includes is None:\n manifest = self._get_manifest()\n self._includes = manifest[\"files\"][\"includes\"]\n\n return self._includes", "def _get_extension_imports() -> str:\n scss_imports = \"\"\n\n for ext in (simple_bulma_path / \"extensions\").iterdir():\n\n if is_enabled(ext):\n for src in get_sass_files(ext):\n scss_imports += f\"@import '{src.as_posix()}';\\n\"\n\n return scss_imports", "def collect_files(self):\n self.files = []\n for bundle in self.bundles:\n bundle.init_build(self, self.builder)\n bundle_files = bundle.prepare()\n self.files.extend(bundle_files)\n return self", "def render_file(file, files, file_provider, error_handler):\n ret = []\n lines = file_provider(file['template'])\n try:\n lines = replace_template_tags(lines, file_provider)\n except NoSuchFileError as e: # included file not found\n error_handler('''error: referenced template %s in %s not found''' % (e, file['template']))\n \n try:\n lines = replace_text_tags(lines, file, file_provider, error_handler)\n except NoSuchFileError as e: # included file not found\n error_handler('''error: referenced content file %s in /%s not found''' % (e, file['path']))\n raise\n\n for line in lines:\n m = re.search(r'{\\@' + name_re + '}', line)\n if m:\n span = m.span()\n try:\n url = [f['path'] for f in files if 'name' in f and f['name'] == m.group('name')][0]\n line = line[:span[0]] + url + line[span[1]:]\n except IndexError:\n error_handler('''warning: referenced URL %s in /%s not found''' % (m.group('name'), file['path']))\n ret.append(line)\n return ret", "def relative_to_buildroot(self):\n return [os.path.join(self.rel_path, source) for source in self.source_paths]", "def genlist(self):\n out = []\n def responder():\n \"\"\"empty responder object used to find the template name\"\"\"\n pass\n responder.view = static_view_finder\n for path, route in self.routes:\n if route['generate']:\n mako_template = route['function'](responder)+'.mako'\n filename = relpath(self.mylookup.get_template(mako_template).filename)\n out.append((path, filename))\n return out", "def template_extra_functions(self):\n\t\treturn []", "def find_extra_include(file_name):\r\n extra_includes = []\r\n with open(file_name) as f:\r\n for m in re.finditer(regex.extra_include, f.read()):\r\n extra_includes.append(m.groups(1))\r\n return extra_includes", "def listFiles(self):\n pass", "def sources(self):\n res = set()\n for elem in chain(settings.PIPELINE_CSS.values(), settings.PIPELINE_JS.values()):\n # TODO: add support for glob\n res.update(elem.get('source_filenames', []))\n return tuple(res)", "def files(self):\n files = [self.submission]\n if self.kind == 'script':\n files.append(self.exec_script)\n if self.kind == 'function':\n files.append(self.function)\n return files", "def gather_required_files(filename):\n # open the file, while ignoring encoding errors (usually comments)\n encoding = open_guess_encoding(filename)\n with open(filename, encoding=encoding, errors='surrogateescape') as fp:\n config = MugenParser()\n config.read_string(fp.read())\n\n # go through each section and store any options that look like filenames\n required = set()\n for section in config.sections():\n section = config[section]\n options = set(find_asset(normpath(v)) for k, v in section.items()\n if filename_regex.match(v))\n required.update(options)\n\n # check other def files, then search them and add the results\n root = dirname(filename)\n for child_file in required.copy():\n name, ext = os.path.splitext(child_file)\n if ext.lower() == '.def':\n path = join(root, child_file)\n required.update(gather_required_files(path))\n\n # TODO: this is not implemented\n # mugen does checking against many paths, so we need\n # to emulate that the if we want to check for missing files\n # finally, go through the potential files and verify they exist\n # for child_file in required.copy():\n # path = join(root, child_file)\n # if not os.path.exists(path):\n # required.remove(child_file)\n\n return required", "def _get_dependencies(requirements_file: Path) -> List[str]:\n lines = requirements_file.read_text().strip().split('\\n')\n return [line for line in lines if not line.startswith('#')]", "def render_asset(self, name):\n result = \"\"\n if self.has_asset(name):\n asset = self.get_asset(name)\n if asset.files:\n for f in asset.files:\n result += f.render_include() + \"\\r\\n\"\n return result", "def expand_files(self, recursive=True, include_buildfile=True):\r\n\r\n files = []\r\n\r\n def _expand(target):\r\n files.extend([os.path.abspath(os.path.join(target.target_base, s))\r\n for s in (target.sources or [])])\r\n if include_buildfile:\r\n files.append(target.address.buildfile.full_path)\r\n if recursive:\r\n for dep in target.dependencies:\r\n if isinstance(dep, TargetWithSources):\r\n _expand(dep)\r\n elif hasattr(dep, 'address'):\r\n # Don't know what it is, but we'll include the BUILD file to be paranoid\r\n files.append(dep.address.buildfile.full_path)\r\n\r\n _expand(self)\r\n return files", "def filepaths(self):\n pass", "def get_all_js_files(self, root):\n res = []\n\n for fname in os.listdir(root):\n mo = re.match(r'(\\w+)\\.js$', fname)\n if mo:\n res.append({\n 'name': mo.group(1),\n 'src': file_contents(os.path.join(root, mo.group()))\n })\n\n return res", "def coffeescript_files():\r\n dirs = \" \".join(THEME_COFFEE_PATHS + [Env.REPO_ROOT / coffee_dir for coffee_dir in COFFEE_DIRS])\r\n return cmd('find', dirs, '-type f', '-name \\\"*.coffee\\\"')", "def component_css_dependencies_tag():\n\n rendered_dependencies = []\n for component in get_components_from_registry(registry):\n rendered_dependencies.append(component.render_css_dependencies())\n\n return mark_safe(\"\\n\".join(rendered_dependencies))", "def collect_files(files: types.FilesCollection) -> List[str]:\n paths = [conf.proj_path(p) for p in files.paths]\n\n if context.RunContext().get('verbose', 0) >= 3:\n log.info(\"<35>Files:\")\n log.info(\"only_staged: <33>{}\".format(files.only_staged))\n log.info(\"untracked: <33>{}\".format(files.untracked))\n log.info(\"whitelist: <33>\\n{}\".format('\\n'.join(files.whitelist())))\n log.info(\"blacklist: <33>\\n{}\".format('\\n'.join(files.blacklist())))\n\n if files.only_staged and files.include and not files.whitelist():\n # include will be empty if none of the staged files match include\n # and thus the final fs walk will pick up everything. We want\n # to preserve the include patterns defined in `pelconf.yaml`\n # so nothing is picked if none of the staged files match.\n return []\n\n return list(itertools.chain.from_iterable(\n filtered_walk(path, files.whitelist(), files.blacklist())\n for path in paths\n ))", "def get_filelist(import_path, extension):\n filelist = []\n for root, dirs, files in os.walk(import_path):\n filelist += glob.glob(os.path.join(root, '*.' + extension))\n return filelist", "def list_extensions():\n formats = FileFormat.list_formats()\n return render_template('home.html', formats=formats)", "def list_load_entries(filepath):\n try:\n rewriter = rewriter_factory(filepath)\n dynamic_deps = [dep[6:] for dep in rewriter.dependencies if dep.startswith('@rpath')]\n return {'rpaths': rewriter.rpaths, 'libraries': dynamic_deps}\n except MachoError:\n return {'rpaths': [], 'libraries': []}", "def get_library_content(self):\n from glob import glob\n try:\n os.path.isdir(self.source)\n lst = glob(self.source + '/*')\n except TypeError:\n lst = self.source\n dircheck = True\n while dircheck is True:\n dircheck = False\n newlst = []\n for entry in lst:\n if os.path.isdir(entry):\n newlst.extend(glob(entry + '/*'))\n dircheck = True\n else:\n newlst.append(entry)\n lst = newlst\n return lst", "def dependencies(self) -> List[Bundle]:\n return []", "def required_files(self, args):\n args_set = set(args)\n edge_list = self.__transform_pre(self.__include_deps_supply.get_file_include_deps())\n targets = chain((target for (source, target) in edge_list if source in args_set), args_set)\n return self.__transform_post(targets)", "def process_all_files():\n src_files = get_doc_files()\n\n for src_pathname in src_files:\n if src_pathname.suffix in MARKDOWN_EXTENSIONS:\n process_file_markdown(src_pathname)\n elif src_pathname.suffix in STATIC_ASSET_EXTENSIONS:\n process_file_copytodest(src_pathname)", "def get_data_files():\n return [\n ('share/jupyter/nbextensions/{}'.format(PY_PACKAGE), TARGETS),\n ('share/jupyter/lab/extensions', [\n os.path.relpath(f, '.') for f in glob.glob(TAR_PATH)\n ])\n ]", "def get_preferable_files(project, input_template):\n preferable_files = PreferableFile.objects.filter(\n input_template=input_template\n )\n files = []\n if len(preferable_files) > 0:\n for file in project.files:\n if PreferableFile.match_any(file.filename, preferable_files):\n files.append(file)\n if len(files) > 0:\n return files\n return []", "def load_template_files(self):\n templates = dict()\n template_path = settings.CUSTOM_VERTO_TEMPLATES\n templates.update(self.read_template_files(template_path))\n if hasattr(self, \"extra_converter_templates_directory\"):\n directory = self.extra_converter_templates_directory\n template_path = os.path.join(template_path, directory)\n templates.update(self.read_template_files(template_path))\n return templates", "def files(self):\n if os.path.exists(self.xml_file):\n self.xml_link='<li>XML <a href=\"../../%(xml_file)s?download=true\">%(xml_file)s</a></li>'.format(self.xml_file)\n else:\n self.xml_link=''\n # <a href=\"../../%(xml)s?download=true\">%(xml)s</a></li>\n #self.xml = glob.glob('*.xml')[0]", "def includes(self) -> UriDict[List[FileUri]]:\n return self._includes", "def get_required_extensions(self):\n return []", "def getFileReferences():\n refNodes = pm.ls(rf=True)\n fileRefs = [r.referenceFile() for r in refNodes]\n return fileRefs", "def resources(self) -> HTMLBody:\n\t\treturn render_template(\"resources.jinja2\")", "def inject_files():\n for filename, arcname in INJECT_FILES.items():\n filename = os.path.join('bee2', 'inject', filename)\n if os.path.exists(filename):\n yield filename, arcname\n\n # Additionally add files set in the config.\n for prop in CONF.find_children('InjectFiles'):\n filename = os.path.join('bee2', 'inject', prop.real_name)\n if os.path.exists(filename):\n yield filename, prop.value", "def __get_compile_cache_dep_files(file_path, compile_cache_dep_files, pkg):\n with open(file_path) as fh:\n root = ast.parse(fh.read(), file_path)\n for node in ast.iter_child_nodes(root):\n module_name = \"\"\n if isinstance(node, ast.ImportFrom):\n if node.module is not None:\n module_name = node.module\n if node.level == 1:\n module_name = \".\" + module_name\n elif not isinstance(node, ast.Import):\n continue\n # Do not care the files in mindspore package\n if module_name.startswith(\"mindspore\"):\n continue\n\n for n in node.names:\n if n.name.startswith(\"mindspore\"):\n continue\n if module_name == \"\":\n whole_module = n.name\n else:\n whole_module = module_name\n if n.name is not None:\n whole_module += \".\" + n.name\n try:\n module_spec = importlib.util.find_spec(whole_module, pkg)\n except (ModuleNotFoundError, ValueError):\n whole_module = whole_module[0:whole_module.rfind('.')]\n module_spec = importlib.util.find_spec(whole_module, pkg)\n if module_spec is None:\n continue\n module = importlib.util.module_from_spec(module_spec)\n if hasattr(module, '__file__'):\n dep_file_path = module.__file__\n else:\n continue\n # Exclude the installed modules.\n if not _in_sys_path(dep_file_path) and dep_file_path not in compile_cache_dep_files:\n logger.debug(f\"dependent file path: {dep_file_path}\")\n compile_cache_dep_files.append(dep_file_path)\n __get_compile_cache_dep_files(dep_file_path, compile_cache_dep_files, module.__package__)", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def locations(self):\n return [part.file for part in self.iterParts() if part]", "def files(self):\n self._printer('\\tFiles Walk')\n for directory in self.directory:\n for path in os.listdir(directory):\n full_path = os.path.join(directory, path)\n if os.path.isfile(full_path):\n if not path.startswith('.'):\n self.filepaths.append(full_path)\n return self._get_filepaths()", "def render_templates(self):\n\n # dockerfile\n try:\n t = self.templates.get_template(\n 'docker/dockerfiles/{}.dockerfile.template'.format(self.repo)\n )\n except TemplateNotFound:\n t = self.templates.get_template(\n 'docker/dockerfiles/default.dockerfile.template'\n )\n\n self.files.append({\n 'name': 'Dockerfile',\n 'content': t.render(commit=self.commit),\n })\n\n # gunicorn\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.conf.py'\n )\n self.files.append({\n 'name': 'gunicorn.conf.py',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/gunicorn/gunicorn.sh'\n )\n self.files.append({\n 'name': 'gunicorn.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # nginx\n t = self.templates.get_template(\n 'docker/nginx/app.nginx.conf'\n )\n self.files.append({\n 'name': 'app.nginx.conf',\n 'content': t.render(),\n })\n\n t = self.templates.get_template(\n 'docker/nginx/nginx.sh'\n )\n self.files.append({\n 'name': 'nginx.sh',\n 'content': t.render(),\n 'mode': 0555,\n })\n\n # cron/, etc/ iif there exists a `self.repo` directory\n def _filter(p):\n return (\"cron/\" in p or \"etc/\" in p) and (self.repo in p) and \\\n (not os.path.basename(p).startswith('.'))\n\n for t in self.templates.list_templates(\n filter_func=_filter):\n\n self.files.append({\n 'name': os.path.basename(t),\n 'content': self.templates.get_template(t).render(),\n })", "def list_filenames(self):\n l = []\n for path, dirs, files in os.walk(self.archive_path):\n for file in files:\n l.append(os.path.relpath(os.path.join(path,file),self.archive_path))\n l.sort()\n return l", "def _get_implicit_script_imports(self) -> list:\r\n implicit_paths: list = []\r\n\r\n for object_name, script_path in self.psc_paths.items():\r\n script_folder_path = os.path.dirname(script_path)\r\n\r\n for import_path in self.import_paths:\r\n # TODO: figure out how to handle imports on different drives\r\n try:\r\n relpath = os.path.relpath(script_folder_path, import_path)\r\n except ValueError as e:\r\n PapyrusProject.log.warning(f'{e} (path: \"{script_folder_path}\", start: \"{import_path}\")')\r\n continue\r\n\r\n test_path = os.path.normpath(os.path.join(import_path, relpath))\r\n if os.path.isdir(test_path) and test_path not in self.import_paths:\r\n implicit_paths.append(test_path)\r\n\r\n return PathHelper.uniqify(implicit_paths)", "def include_external_file(ctx, name):\n with open(os.path.abspath(name), \"r\") as f:\n content = f.read()\n return jinja2.Markup(content)", "def make_files(self):\n return []", "def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]:\n\n incre = re.compile(FORTRAN_INCLUDE_PAT, re.IGNORECASE)\n usere = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)\n submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)\n\n mod_files = []\n src = Path(src)\n with src.open(encoding='ascii', errors='ignore') as f:\n for line in f:\n # included files\n incmatch = incre.match(line)\n if incmatch is not None:\n incfile = src.parent / incmatch.group(1)\n # NOTE: src.parent is most general, in particular for CMake subproject with Fortran file\n # having an `include 'foo.f'` statement.\n if incfile.suffix.lower()[1:] in compiler.file_suffixes:\n mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler))\n # modules\n usematch = usere.match(line)\n if usematch is not None:\n usename = usematch.group(1).lower()\n if usename == 'intrinsic': # this keeps the regex simpler\n continue\n if usename not in tdeps:\n # The module is not provided by any source file. This\n # is due to:\n # a) missing file/typo/etc\n # b) using a module provided by the compiler, such as\n # OpenMP\n # There's no easy way to tell which is which (that I\n # know of) so just ignore this and go on. Ideally we\n # would print a warning message to the user but this is\n # a common occurrence, which would lead to lots of\n # distracting noise.\n continue\n srcfile = srcdir / tdeps[usename].fname\n if not srcfile.is_file():\n if srcfile.name != src.name: # generated source file\n pass\n else: # subproject\n continue\n elif srcfile.samefile(src): # self-reference\n continue\n\n mod_name = compiler.module_name_to_filename(usename)\n mod_files.append(str(dirname / mod_name))\n else: # submodules\n submodmatch = submodre.match(line)\n if submodmatch is not None:\n parents = submodmatch.group(1).lower().split(':')\n assert len(parents) in {1, 2}, (\n 'submodule ancestry must be specified as'\n f' ancestor:parent but Meson found {parents}')\n\n ancestor_child = '_'.join(parents)\n if ancestor_child not in tdeps:\n raise MesonException(\"submodule {} relies on ancestor module {} that was not found.\".format(submodmatch.group(2).lower(), ancestor_child.split('_', maxsplit=1)[0]))\n submodsrcfile = srcdir / tdeps[ancestor_child].fname\n if not submodsrcfile.is_file():\n if submodsrcfile.name != src.name: # generated source file\n pass\n else: # subproject\n continue\n elif submodsrcfile.samefile(src): # self-reference\n continue\n mod_name = compiler.module_name_to_filename(ancestor_child)\n mod_files.append(str(dirname / mod_name))\n return mod_files", "def depends_on_file(self):\n return 'joinwith' # Join depends on the 'joinwith' attribute", "def _generate_src():\n for ext in extensions:\n yield self.src_format[ext](f=\"{}{}\".format(name, ext))", "def list_strat_dependent_files(self):\n file_dict = deepcopy(_code_dep_files)\n file_dict.update({'mlo_sha1': self.mlo_file, 'smo_sha1': self.smo_file})\n return file_dict", "def get_html_files(f_name):\n files = sorted([os.path.join(f_name, f) for f in os.listdir(f_name) if os.path.isfile(os.path.join(f_name, f))]) #extra check if is file unnecessary\n return files", "def all_changed_files(self):\n return [path_to_file_type(os.path.join(self.path, p)) for p in self.changed_paths() if p]" ]
[ "0.6667309", "0.649749", "0.64170736", "0.63219726", "0.6255717", "0.61782795", "0.6177389", "0.61124706", "0.60708463", "0.60316014", "0.6030461", "0.6025385", "0.60198873", "0.598662", "0.59268177", "0.5915987", "0.58921176", "0.58828634", "0.58777714", "0.5809368", "0.5792631", "0.57712483", "0.57639813", "0.571586", "0.5712797", "0.56780356", "0.56753504", "0.56672317", "0.56671107", "0.5650557", "0.5621721", "0.5618622", "0.5599616", "0.5578899", "0.5574374", "0.5569746", "0.5550369", "0.55494523", "0.5539612", "0.5527859", "0.55251944", "0.55155927", "0.5505614", "0.550401", "0.54945356", "0.5491226", "0.54854095", "0.5481805", "0.5475028", "0.5474311", "0.5467609", "0.5463588", "0.54607755", "0.5451606", "0.54418784", "0.5438029", "0.5436207", "0.54277146", "0.54124314", "0.54122686", "0.5373818", "0.5362526", "0.5355331", "0.53530073", "0.53492683", "0.5347167", "0.53450537", "0.53351074", "0.5332632", "0.53181714", "0.5317963", "0.53176326", "0.5316434", "0.53110445", "0.5308857", "0.5304979", "0.5304963", "0.5299704", "0.5293175", "0.52930695", "0.5292125", "0.5288152", "0.5285921", "0.5278182", "0.52776384", "0.5271895", "0.5271895", "0.5271895", "0.5269081", "0.52656037", "0.5265314", "0.5262677", "0.5257339", "0.5250529", "0.52485424", "0.52478987", "0.5247274", "0.5246217", "0.52307624", "0.5227267" ]
0.6414305
3
Distributes an archive to your web servers
def do_deploy(archive_path): if not os.path.exists(archive_path): return False else: try: put(archive_path, "/tmp/") filename = archive_path.split('/') no_ext = filename[-1].split('.') archive = no_ext[0] run("mkdir -p /data/web_static/releases/" + archive + "/") run("tar -zxf /tmp/" + filename[1] + " -C /data/web_static/releases/" + archive + "/") run("rm /tmp/" + filename[1]) run("mv /data/web_static/releases/" + archive + "/web_static/* /data/web_static/releases/" + archive + "/") run("rm -rf /data/web_static/releases/" + archive + "/web_static") run("rm -rf /data/web_static/current") run("ln -s /data/web_static/releases/" + archive + "/ /data/web_static/current") print("New version deployed!") return True except: return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def do_pack():\n\n now = datetime.now()\n # format the name of the file with the timestamps\n now_year = now.year\n now_month = now.month\n now_day = now.day\n now_hour = now.hour\n now_minute = now.minute\n now_second = now.second\n # apply the format\n file_name = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(\n now_year, now_month, now_day, now_hour, now_minute, now_second\n )\n # All archives must be stored in the folder versions\n local('mkdir -p versions')\n # execute locally the compression of the folder\n command = local(\"tar -cvzf \" + file_name + \" ./web_static/\")\n # return the archive path if the archive has been correctly generated\n if command.succeeded:\n return file_name\n else:\n return None", "def do_pack():\n date = datetime.datetime.now()\n archive = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(date.year,\n date.month,\n date.day,\n date.hour,\n date.minute,\n date.second)\n local('mkdir -p versions')\n check = local('tar -cvzf {} web_static'.format(archive))\n if check.failed:\n return None\n else:\n return archive", "def do_pack():\n\n now = datetime.now()\n time_now = now.strftime(\"%Y%m%d%H%M%S\")\n archive_name = \"versions/web_static_\" + time_now + \".tgz\"\n local('mkdir -p versions')\n archive_command = local(\"tar -zcvf \" + archive_name + \" web_static\")\n\n if archive_command.succeeded:\n return archive_name\n\n return None", "def do_deploy(archive_path):\n if path.exists(archive_path):\n\n # File name without .tgz\n file_ext = archive_path.split('/')[1]\n file_alone = file_ext.split(\".\")[0]\n curr_release = \"/data/web_static/releases/\" + file_alone + '/'\n\n result = True\n\n # Deploy compressed file to the server /tmp/ directory\n upload = put(archive_path, \"/tmp/\")\n if upload.failed:\n result = False\n\n # Make dir to store the release\n dir_release = run(\"sudo mkdir -p \" + curr_release)\n if dir_release.failed:\n result = False\n\n # Uncompress file inside the folder created\n uncompress = run(\"sudo tar -xzf \" + \"/tmp/\\\n\" + file_ext + \" -C \" + curr_release)\n if uncompress.failed:\n result = False\n\n # Move all files from web_static to folder release\n move_info = run(\"sudo mv \" + curr_release + \"\\\nweb_static/* \" + curr_release)\n if move_info.failed:\n result = False\n\n # Remove empty web_static directory\n rm_empty = run(\"sudo rm -rf \" + curr_release + \"\\\nweb_static/\")\n if rm_empty.failed:\n result = False\n\n # Remove symbolic link current\n rm_link = run(\"sudo rm -rf /data/\\\nweb_static/current\")\n if rm_link.failed:\n result = False\n\n # Make new symbolic link\n new_link = run(\"sudo ln -s \" + curr_release + \" /data/\\\nweb_static/current\")\n if new_link.failed:\n result = False\n\n return result\n else:\n return False", "def do_pack():\n now = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n local('mkdir -p versions')\n result = local('tar -czvf versions/web_static_{}.tgz web_static'\n .format(now))\n if result.failed:\n return None\n else:\n return result", "def do_pack():\n now = datetime.datetime.now()\n path = 'versions/web_static_' +\\\n '{}{}{}{}{}{}'.format(now.year, now.month,\n now.day, now.hour,\n now.minute, now.second) + '.tgz'\n\n local('mkdir -p versions')\n success = local('tar -cvzf {:s} web_static'.format(path), capture=True)\n if success.return_code == 0:\n return path", "def do_pack():\n\n sd = '{0:%Y%m%d%H%M%S}'.format(datetime.now())\n fname = 'versions/web_static_' + sd + '.tgz'\n local('mkdir -p versions')\n rs = local('tar -cvzf ' + fname + ' web_static')\n\n if rs.succeeded:\n return fname\n return None", "def do_pack():\n d = datetime.now()\n local(\"mkdir -p versions\")\n file_name = 'versions/web_static_{}{}{}{}{}{}.tgz\\\n'.format(d.year, d.month, d.day, d.hour, d.minute, d.second)\n status = local(\"tar -cvzf\" + file_name + \" ./web_static/\", capture=True)\n if status.succeeded:\n return file_name\n return None", "def do_pack():\n local(\"sudo mkdir -p versions\")\n date_time = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n name_file = \"versions/web_static{}.tgz\".format(date_time)\n local(\"sudo tar -cvzf {} web_static\".format(name_file))\n return name_file", "def do_pack():\n files = 'versions/web_static_{}{}{}{}{}{}.tgz'\\\n .format(T.year, T.month, T.day, T.hour, T.minute, T.second)\n local('mkdir -p versions')\n execute = local(\"tar -cvzf \" + files + \" ./web_static/\")\n if execute.succeeded:\n return files\n return None", "def do_pack():\n a = datetime.now()\n file_name = \"versions/web_static_{}{}{}{}{}{}.tgz\\\n\".format(a.year if a.year > 999 else \"0\" + str(a.year),\n a.month if a.month > 9 else \"0\" + str(a.month),\n a.day if a.day > 9 else \"0\" + str(a.day),\n a.hour if a.hour > 9 else \"0\" + str(a.hour),\n a.minute if a.minute > 9 else \"0\" + str(a.minute),\n a.second if a.second > 9 else \"0\" + str(a.second))\n try:\n print(\"Packing web_static to \" + file_name)\n local(\"mkdir -p versions\")\n\n local(\"tar -cvzf \" + file_name + \" web_static\")\n return file_name\n except:\n return None", "def do_pack():\n time_test = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n file_name = \"versions/web_static_\" + time_test + \".tgz\"\n command1 = \"mkdir -p versions\"\n command2 = \"tar -czvf \" + file_name + \" web_static\"\n local(command1)\n com = local(command2)\n if com.return_code == 0:\n return file_name\n else:\n return None", "def do_deploy(archive_path):\n if os.path.isfile('{}'.format(archive_path)) is False:\n return False\n tgz_file = archive_path.split('/')[-1]\n storage_location = '/tmp/'\n new_location = '/data/web_static/releases/'\n upload = put('{}'.format(archive_path), storage_location)\n if upload is False:\n return False\n create_dir = run('mkdir -p {}'.format(new_location +\n tgz_file.replace('.tgz', '')))\n if create_dir is False:\n return False\n uncompress = run('tar -xzvf {} -C {}'.format(storage_location +\n tgz_file, new_location +\n tgz_file.replace('.tgz', '')))\n if uncompress is False:\n return False\n delete_file = run('rm -f {}'.format(storage_location + tgz_file))\n if delete_file is False:\n return False\n new_route = \"{}\".format(new_location + tgz_file.replace('.tgz', ''))\n move_files = run('mv {}/web_static/* {}'.format(new_route, new_route))\n if move_files is False:\n return False\n delete_folder = run('rm -rf {}/web_static'.format(new_route))\n if delete_folder is False:\n return False\n sym_link_name = '/data/web_static/current'\n delete_sym = run('rm -f {}'.format(sym_link_name))\n if delete_sym is False:\n return False\n create_sym = run('ln -sT {} {}'.format(new_location +\n tgz_file.replace('.tgz', ''),\n sym_link_name))\n if create_sym is False:\n return False\n return True", "def do_pack():\n from os import mkdir, path\n\n filename = \"web_static_{}.tgz\".format(now.strftime(\"%Y%m%d%H%M%S\"))\n filepath = \"versions/{}\".format(filename)\n\n try:\n mkdir('./versions')\n except FileExistsError:\n pass\n\n print(\"Packing web_static to {}\".format(filepath))\n cmd = local('tar -cvzf {} web_static'.format(filepath))\n if (cmd.return_code == 0):\n filesize = path.getsize(filepath)\n print(\"web_static packed: {} -> {}Bytes\".format(filepath, filesize))\n return filepath\n return None", "def do_deploy(archive_path):\n if not path.exists(archive_path) and not path.isfile(archive_path):\n return False\n\n myspath = archive_path.split('/')[1].split(\".\")\n i = myspath[0]\n\n try:\n # Upload the archive to the /tmp/ directory of the web server\n put(archive_path, '/tmp')\n\n # create dir\n run(\"sudo mkdir -p /data/web_static/releases/\" + i + \"/\")\n\n # Uncompress the archive to the folder\n # /data/web_static/releases/<archive filename\n # without extension> on the web server\n run(\"sudo tar -xzf /tmp/\" + i + \".tgz\" +\n \" -C /data/web_static/releases/\" + i + \"/\")\n\n # Delete the archive from the web server\n run(\"sudo rm /tmp/\" + i + \".tgz\")\n\n run(\"sudo mv /data/web_static/releases/\" + i +\n \"/web_static/* /data/web_static/releases/\" + i + \"/\")\n\n # Delete the symbolic link\n run(\"sudo rm -rf /data/web_static/releases/\" + i + \"/web_static\")\n run(\"sudo rm -rf /data/web_static/current\")\n\n # Create a new the symbolic link\n run(\"sudo ln -s /data/web_static/releases/\" + i +\n \"/ /data/web_static/current\")\n except Exception:\n return False\n\n return True", "def do_pack():\n time = datetime.now()\n file = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(\n time.year,\n time.month,\n time.day,\n time.hour,\n time.minute,\n time.second\n )\n local('mkdir -p versions')\n if local('tar -cvzf ' + file + ' web_static').succeeded:\n return file\n return None", "def do_pack():\n a = datetime.now()\n file_name = \"versions/web_static_{}{}{}{}{}{}.tgz\".format(a.year,\n a.month,\n a.day,\n a.hour,\n a.minute,\n a.second)\n try:\n print(\"Packing web_static to \" + file_name)\n local(\"mkdir -p versions\")\n\n local(\"tar -cvzf \" + file_name + \" web_static\")\n return file_name\n except:\n return None", "def do_deploy(archive_path):\n try:\n test = put(archive_path, \"/tmp/\")\n lista = archive_path.split('/')\n folder = lista[-1][:lista[-1].find(\".\")]\n dest = \"/data/web_static/releases/\" + folder\n run(\"mkdir -p {}\".format(dest))\n run(\"tar -xzf {} -C {}\".format(test[0], dest))\n run(\"rm {}\".format(test[0]))\n run(\"mv /data/web_static/releases/{}/web_static/* \\\n /data/web_static/releases/{}/\".format(folder, folder))\n run(\"rm -rf /data/web_static/releases/{}/web_static\".format(folder))\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s /data/web_static/releases/{} /data/web_static/current\".\n format(folder))\n return True\n except Exception:\n return False", "def do_pack():\n makedirs('versions', exist_ok=True)\n date = 'versions/web_static_{}{}{}{}{}{}.tgz'.format(\n time.year, time.month, time.day, time.minute, time.second)\n check = local(\"tar -cvzf \" + date + \" ./web_static/\")\n if check.succeeded:\n return date\n return None", "def do_deploy(archive_path):\n if not os.path.isfile(archive_path):\n return False\n\n try:\n file_name = archive_path[9:]\n file_n_short = file_name[:-4]\n curr_path = os.getcwd()\n full_path = curr_path + \"/\" + archive_path\n put(full_path, \"/tmp/\")\n run(\"mkdir -p /data/web_static/releases/\" + file_n_short)\n run(\"tar -xzf /tmp/\" + file_name + \" -C /data/web_static/releases/\" +\n file_n_short + \"/\")\n run(\"rm /tmp/\" + file_name)\n run(\"mv /data/web_static/releases/\" + file_n_short +\n \"/web_static/* /data/web_static/releases/\" + file_n_short + \"/\")\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s /data/web_static/releases/\" +\n file_n_short + \" /data/web_static/current\")\n return True\n except:\n print(\"error\")\n return False", "def do_pack():\n now = datetime.now()\n file_name = \"web_static_{}{}{}{}{}{}.tgz\".format(\n now.year,\n now.month,\n now.day,\n now.hour,\n now.minute,\n now.second\n )\n try:\n local(\"sudo tar -cvzf {} ./web_static\".format(file_name))\n local(\"sudo mkdir -p versions\")\n local(\"sudo mv ./{} versions/\".format(file_name))\n except:\n return (None)\n return (\"versions/{}\".format(file_name))", "def do_pack():\n time_f = '%Y%m%d%H%M%S'\n try:\n if not os.path.exists('versions'):\n local('mkdir versions')\n to = 'versions/web_static_{}.tgz'.format(\n datetime.now().strftime(time_f))\n\n local('tar -cvzf {} web_static'.format(to))\n return(to)\n except:\n return (None)", "def do_pack():\n with settings(warn_only=True):\n res = local(\"mkdir -p versions\")\n date = dt.now()\n pathname = \"versions/web_static_\"\n pathname += str(date.year)\n pathname += str(date.month)\n pathname += str(date.day)\n pathname += str(date.hour)\n pathname += str(date.minute)\n pathname += str(date.second)\n pathname += \".tgz\"\n res2 = local(\"tar -cvzf \" + pathname + \" web_static\")\n if res2.return_code == 0:\n return pathname", "def do_pack():\n try:\n date = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n if isdir(\"versions\") is False:\n local(\"mkdir versions\")\n file_name = \"versions/web_static_{}.tgz\".format(date)\n local(\"tar -cvzf {} web_static\".format(file_name))\n return file_name\n except BaseException:\n return None", "def do_deploy(archive_path):\n if (not archive_path or not os.path.exists(archive_path)):\n return False\n try:\n put(archive_path, \"/tmp/\")\n file_name = archive_path.split('/')[1]\n folder_name = file_name.split('.')[0]\n path = \"/data/web_static/releases/\" + folder_name\n run(\"mkdir -p \" + path)\n run(\"tar -xzvf /tmp/\" + file_name + \" -C \" + path)\n run(\"mv \" + path + \"/web_static/* \" + path)\n run(\"rm -rf \" + path + \"/web_static/\")\n run(\"rm /tmp/\" + file_name)\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s \" + path + \" /data/web_static/current\")\n return True\n except:\n return False", "def do_pack():\n\n local(\"mkdir -p versions\")\n current = dt.now()\n current = current.now()\n tgz = \"web_static_{}.tgz\".format(current.strftime(\"%Y%m%d%H%M%S\"))\n working = local(\"tar -cavf versions/{} web_static\".format(tgz))\n\n if working.failed:\n return None\n else:\n return \"versions/{}\".format(tgz)", "def do_pack():\n try:\n if not os.path.exists(\"versions\"):\n local(\"mkdir versions\")\n date = datetime.now()\n date = date.strftime(\"%Y%m%d%H%M%S\")\n new_versions = \"versions/web_static_{}.tgz\".format(date)\n local(\"tar -cvzf {} web_static\".format(new_versions))\n return new_versions\n except:\n return None", "def do_deploy(archive_path):\n if not os.path.exists(archive_path):\n return False\n\n file_name = os.path.basename(archive_path)\n tmp_path = \"/tmp/{}\".format(file_name)\n no, wout = os.path.splitext(file_name)\n dest = \"/data/web_static/releases/{}\".format(no)\n curr = \"/data/web_static/current\"\n try:\n put(archive_path, tmp_path)\n run(\"sudo mkdir -p {}\".format(dest))\n run(\"sudo tar -xzf {} -C {}\".format(tmp_path, dest))\n run(\"sudo rm {}\".format(tmp_path))\n run(\"sudo mv {}/web_static/* {}/\".format(dest, dest))\n run(\"sudo rm -rf {}/web_static\".format(dest))\n run(\"sudo rm -rf {}\".format(curr))\n run(\"sudo ln -s {} {}\".format(dest, curr))\n return True\n except:\n return False", "def do_pack():\n with api.settings(warn_only=True):\n isdir = os.path.isdir('versions')\n if not isdir:\n mkdir = api.local('mkdir versions')\n if mkdir.failed:\n return False\n suffix = datetime.now().strftime('%Y%m%d%M%S')\n path = 'versions/web_static_{}.tgz'.format(suffix)\n tar = api.local('tar -cvzf {} web_static'.format(path))\n if tar.failed:\n return False\n size = os.stat(path).st_size\n print('web_static packed: {} -> {}Bytes'.format(path, size))\n return path", "def do_pack():\n date = (datetime.strftime(datetime.now(), \"%Y%m%d%H%M%S\"))\n name = \"versions/web_static_{}.tgz\".format(date)\n\n if not os.path.exists(\"./versions/\"):\n os.makedirs(\"./versions/\")\n try:\n local(\"tar -cvzf {} web_static\".format(name))\n return (name)\n except:\n return (None)", "def do_pack():\n time = datetime.utcnow().strftime('%Y%m%d%H%M%S')\n file_name = \"versions/web_static_{}.tgz\".format(time)\n try:\n local(\"mkdir -p ./versions\")\n local(\"tar --create --verbose -z --file={} ./web_static\"\n .format(file_name))\n return file_name\n except:\n return None", "def do_pack():\n with api.settings(warn_only=True):\n isdir = os.path.isdir(\"versions\")\n if not isdir:\n mkdir = api.local(\"mkdir versions\")\n if mkdir.failed:\n return None\n sfx = datetime.now().strftime(\"%Y%m%d%M%S\")\n path = \"versions/web_static_{:s}.tgz\".format(sfx)\n tar = api.local(\"tar -cvzf {:s} web_static\".format(path))\n if tar.failed:\n return None\n size = os.stat(path).st_size\n print(\"wb_static packed: {} -> {}Bytes\".format(path, size))\n return path", "def do_deploy(archive_path):\n if not os.path.exists(archive_path):\n return False\n try:\n file_name = os.path.basename(archive_path)\n put('file_name', '/tmp/{}'.format(file_name))\n spaces = file_name.replace('.', ' ')\n without_ext = spaces.split(' ')\n without_ext1 = with_ext[-2]\n run('mkdir -p /data/web_static/releases/{}'.format(without_ext1))\n run('tar -xzf /tmp/{} -C /data/web_static/releases/{}/'.format(file_name, without_ext1))\n run('rm /tmp/{}'.format(file_name))\n run('mv /data/web_static/releases/{}/web_static/* /data/web_static/releases/{}/'.format(without_ext1, without_ext1))\n run('rm -rf /data/web_static/releases/{}/web_static'.format(without_ext1))\n run('rm -rf /data/web_static/current')\n run('ln -s /data/web_static/releases/{}/ /data/web_static/current'.format(without_ext1))\n return True\n except:\n return False", "def do_pack():\n local(\"mkdir -p versions\")\n time = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S\")\n file = local(\"tar -czvf versions/web_static_%s.tgz web_static\" % time)\n if file:\n return \"versions/web_static_{}.tgz\".format(time)\n else:\n return None", "def do_deploy(archive_path):\n if not path.exists(archive_path):\n return False\n try:\n file_name = archive_path.split(\"/\")[-1]\n just_name = file_name.split(\".\")[0]\n api.put(archive_path, \"/tmp/\")\n api.run(\"mkdir -p /data/web_static/releases/{}\".format(just_name))\n api.run(\"tar -xzf /tmp/{} -C /data/web_static/releases/{}\".format(\n file_name, just_name))\n api.run(\"rm /tmp/{}\".format(file_name))\n path_r = \"/data/web_static/releases/\"\n api.run('mv {0}{1}/web_static/* {0}{1}/'.format(path_r, just_name))\n api.run('rm -rf /data/web_static/releases/{}/web_static'.format(\n just_name))\n api.run(\"rm -rf /data/web_static/current\")\n api.run(\"ln -s /data/web_static/releases/{} \\\n /data/web_static/current\".format(just_name))\n return True\n except:\n return False", "def deploy():\n archive_path = do_pack()\n if archive_path is None:\n print(\"pass\")\n return False\n return do_deploy(archive_path)", "def do_deploy(archive_path):\n\n if os.path.exists(archive_path):\n\n put(archive_path, \"/tmp/\")\n filename = os.path.basename(archive_path)\n (file, ext) = os.path.splitext(filename)\n rel_path = \"/data/web_static/releases/\"\n run(\"mkdir -p {}{}/\".format(rel_path, file))\n run(\"tar -xzvf /tmp/{} -C {}{}/\".format(filename, rel_path, file))\n run(\"rm -f /tmp/{}\".format(filename))\n run(\"mv {}{}/web_static/* {}{}/\".format(rel_path, file,\n rel_path, file))\n run(\"rm -rf {}{}/web_static\".format(rel_path, file))\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -sf {}{}/ /data/web_static/current\".format(rel_path, file))\n\n return True\n return False", "def do_pack():\n try:\n now = time.strftime(\"%Y%m%d%H%M%S\")\n local('mkdir -p ./versions')\n local('tar -cvzf versions/web_static_{}.tgz web_static'.format(now))\n return(\"versions/web_static_{}.tgz\".format(now))\n except:\n return None", "def do_deploy(archive_path):\n if not path.exists(archive_path):\n return False\n splitted = archive_path.split(\"/\")\n noexten = path.splitext(splitted[1])[0]\n\n try:\n put(archive_path, \"/tmp/\")\n run(\"mkdir -p /data/web_static/releases/{}\".format(noexten))\n run(\"tar -xzf /tmp/{} -C /data/web_static/releases/{}/\"\n .format(splitted[1], noexten))\n run(\"rm /tmp/{}\".format(splitted[1]))\n run(\"mv /data/web_static/releases/{}/web_static/* \\\n /data/web_static/releases/{}/\".format(noexten, noexten))\n run(\"rm -rf /data/web_static/releases/{}/web_static\".format(noexten))\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s /data/web_static/releases/{}/\\\n /data/web_static/current\".format(noexten))\n except Exception:\n return False", "def do_pack():\n try:\n if isdir('versions') is False:\n local(\"mkdir versions\")\n tgz_file = \"versions/web_static_{}.tgz\".format(\n time.strftime(\"%Y%m%d%H%M%S\"))\n local(\"tar -cvzf {} web_static\".format(tgz_file))\n return tgz_file\n except:\n return None", "def do_pack():\n try:\n if os.path.isdir(\"versions\") is False:\n os.mkdir(\"versions\")\n time = datetime.datetime.now().strftime(\"%Y%m%d%H%M%S\")\n packed = 'versions/web_static_' + time + '.tgz'\n fabric.api.local(\"tar -cvzf {} web_static\".format(packed))\n return packed\n except:\n return None", "def do_deploy(archive_path):\n if not os.path.exists(archive_path):\n return False\n\n file_ext = archive_path[archive_path.find('/') + 1:]\n file_name = archive_path[archive_path.find('/') + 1: -4]\n\n result = put(archive_path, '/tmp/' + file_ext)\n if result.failed:\n return False\n\n result = run('mkdir -p /data/web_static/releases/' + file_name + '/')\n if result.failed:\n return False\n\n result = run('tar -xzf /tmp/' + file_ext +\n ' -C /data/web_static/releases/' + file_name + '/')\n if result.failed:\n return False\n\n result = run('rm /tmp/' + file_ext)\n if result.failed:\n return False\n\n result = run('mv /data/web_static/releases/' + file_name +\n '/web_static/* /data/web_static/releases/' + file_name + '/')\n if result.failed:\n return False\n\n result = run('rm -rf /data/web_static/releases/' + file_name +\n '/web_static')\n if result.failed:\n return False\n\n result = run('rm -rf /data/web_static/current')\n if result.failed:\n return False\n\n result = run('ln -s /data/web_static/releases/' +\n file_name + '/ /data/web_static/current')\n if result.failed:\n return False\n\n print('New version deployed!')\n return True", "def do_pack():\n local(\"mkdir -p versions\", capture=True)\n time = datetime.now()\n date = time.strftime(\"%Y%m%d%H%M%S\")\n path = \"versions/web_static_{}.tgz\".format(date)\n if local(\"tar -czvf {} web_static/\".format(path), capture=False):\n return path\n else:\n return None", "def do_pack():\n\n datenow = datetime.now()\n full_date = datenow.strftime(\"%Y%m%d%H%M%S\")\n\n try:\n if not os.path.isdir(\"versions\"):\n local(\"mkdir versions\")\n local_command = local(\"tar -cvzf versions/web_static_{}.tgz web_static\"\n .format(full_date))\n return local_command\n except Exception:\n return None", "def do_deploy(archive_path):\n if not os.path.isfile(archive_path):\n return False\n with api.cd(\"/tmp\"):\n basename = os.path.basename(archive_path)\n root, ext = os.path.splitext(basename)\n opath = \"/data/web_static/releases/{}\".format(root)\n try:\n ppath = api.put(archive_path)\n if files.exists(opath):\n api.run(\"rm -rdf {}\".format(opath))\n api.run(\"mkdir -p {}\".format(opath))\n api.run(\"tar -xzf {} -C {}\".format(ppath[0], opath))\n api.run(\"rm -f {}\".format(ppath[0]))\n api.run(\"mv -u {}/web_static/* {}\".format(opath, opath))\n api.run(\"rm -rf {}/web_static\".format(opath))\n api.run(\"rm -rf /data/web_static/current\")\n api.run(\"ln -s {} /data/web_static/current\".format(opath))\n print(\"New version deployed!\")\n except:\n return False\n else:\n return True", "def do_deploy(archive_path):\n if not os.path.isfile(archive_path):\n return False\n with api.cd('/tmp'):\n basename = os.path.basename(archive_path)\n root, ext = os.path.splitext(basename)\n outpath = '/data/web_static/releases/{}'.format(root)\n try:\n putpath = api.put(archive_path)\n if files.exists(outpath):\n api.run('rm -rdf {}'.format(outpath))\n api.run('mkdir -p {}'.format(outpath))\n api.run('tar -xzf {} -C {}'.format(putpath[0], outpath))\n api.run('rm -f {}'.format(putpath[0]))\n api.run('mv -u {}/web_static/* {}'.format(outpath, outpath))\n api.run('rm -rf {}/web_static'.format(outpath))\n api.run('rm -rf /data/web_static/current')\n api.run('ln -s {} /data/web_static/current'.format(outpath))\n print('New version deployed!')\n except:\n return False\n else:\n return True", "def archive(ctx, config):\n log.info('Creating archive directory...')\n archive_dir = misc.get_archive_dir(ctx)\n run.wait(\n ctx.cluster.run(\n args=[\n 'install', '-d', '-m0755', '--', archive_dir,\n ],\n wait=False,\n )\n )\n\n try:\n yield\n except Exception:\n # we need to know this below\n set_status(ctx.summary, 'fail')\n raise\n finally:\n passed = get_status(ctx.summary) == 'pass'\n if ctx.archive is not None and \\\n not (ctx.config.get('archive-on-error') and passed):\n log.info('Transferring archived files...')\n logdir = os.path.join(ctx.archive, 'remote')\n if (not os.path.exists(logdir)):\n os.mkdir(logdir)\n for rem in ctx.cluster.remotes.iterkeys():\n path = os.path.join(logdir, rem.shortname)\n misc.pull_directory(rem, archive_dir, path)\n # Check for coredumps and pull binaries\n fetch_binaries_for_coredumps(path, rem)\n\n log.info('Removing archive directory...')\n run.wait(\n ctx.cluster.run(\n args=[\n 'rm',\n '-rf',\n '--',\n archive_dir,\n ],\n wait=False,\n ),\n )", "def do_deploy(archive_path):\n if not os.path.isfile(archive_path):\n return False\n with api.cd(\"/tmp\"):\n basename = os.path.basename(archive_path)\n root, ext = os.path.splitext(basename)\n opath = \"/data/web_static/releases/{}\".format(root)\n try:\n ppath = api.put(archive_path)\n if files.exists(opath):\n api.run(\"rm -rdf {}\".format(opath))\n api.run(\"mkdir -p {}\".format(opath))\n api.run(\"tar -xzf {} -C {}\".format(ppath[0], opath))\n api.run(\"rm -f {}\".format(ppath[0]))\n api.run(\"mv -u {}/web_static/* {}\".format(opath, opath))\n api.run(\"rm -rf {}/web_static\".format(opath))\n api.run(\"rm -rf /data/web_static/current\")\n api.run(\"ln -sf {} /data/web_static/current\".format(opath))\n print(\"New version deployed!\")\n except:\n return False\n else:\n return True", "def do_deploy(archive_path):\n if (path.isfile(archive_path) != 1):\n return False\n try:\n file = archive_path.split('/')[-1]\n new_folder = '/data/web_static/releases/{}'.format(file.split('.')[0])\n put(archive_path, '/tmp/')\n run('mkdir -p {}'.format(new_folder))\n run('tar -xzf /tmp/{} -C {}'.format(file, new_folder))\n run('mv {}/web_static/* {}'.format(new_folder, new_folder))\n run('rm -rf {}/web_static'.format(new_folder))\n run('rm /tmp/{}'.format(file))\n run('rm -rf /data/web_static/current')\n run('ln -sf {} /data/web_static/current'.format(new_folder))\n return True\n except:\n return False", "def do_deploy(archive_path):\n\n if not os.path.exists(archive_path):\n return False\n\n ret = True\n\n tmpfolder = put(archive_path, '/tmp/')\n\n if tmpfolder.failed:\n ret = False\n\n dirc = archive_path.replace(\".tgz\", \"\").replace(\"versions/\", \"\")\n dest = run('mkdir -p /data/web_static/releases/' + dirc + '/')\n\n if dest.failed:\n ret = False\n\n unpack = run('tar -xzf /tmp/' + dirc + '.tgz' +\n ' -C /data/web_static/releases/' + dirc + '/')\n\n if unpack.failed:\n ret = False\n\n clean = run('rm /tmp/' + dirc + '.tgz')\n\n if clean.failed:\n ret = False\n\n move = run('mv /data/web_static/releases/' + dirc +\n '/web_static/* /data/web_static/releases/' + dirc + '/')\n\n if move.failed:\n ret = False\n\n cleanfolder = run('rm -rf /data/web_static/releases/' + dirc +\n '/web_static')\n\n if cleanfolder.failed:\n ret = False\n\n rmold = run('rm -rf /data/web_static/current')\n\n if rmold.failed:\n ret = False\n\n new = run('ln -sf /data/web_static/releases/' + dirc +\n '/' + ' /data/web_static/current')\n\n if new.failed:\n ret = False\n\n if ret:\n print(\"New version deployed!\")\n\n return ret", "def dist():\n PackCommandExecutor().pack()\n DistCommandExecutor().dist()", "def do_pack():\n\n local('mkdir -p versions')\n\n time = datetime.now().strftime(\"%Y%m%d%H%M%S\")\n file_time = 'versions/web_static_{}.tgz'.format(time)\n\n compressed = local(\"tar -cvzf \" + file_time + \" web_static/\")\n\n if compressed.succeeded:\n return file_time\n return None", "def publish():\n fab.local(\"env/bin/python setup.py sdist\")\n tar_filename = fab.local(\n \"env/bin/python setup.py --fullname\", capture=True\n )\n dist_filename = \"dist/{}.tar.gz\".format(tar_filename)\n fab.put(dist_filename, PYREPO_DIR)", "def do_deploy(archive_path):\n if not archive_path:\n return False\n if not os.path.exists(archive_path):\n return False\n\n filename = archive_path.split(\"/\")[-1]\n put(archive_path, \"/tmp/{}\".format(filename))\n\n run(\"sudo mkdir -p /data/web_static/releases/{}\".format(filename))\n run(\"sudo tar -xzf /tmp/{} -C /data/web_static/releases/{}\"\n .format(filename, filename))\n run(\"sudo rm /tmp/{}\".format(filename))\n run(\"sudo mv /data/web_static/releases/{}/web_static/*\"\n \" /data/web_static/releases/{}\"\n .format(filename, filename))\n run(\"sudo rm -rf /data/web_static/releases/{}/web_static\"\n .format(filename))\n run(\"sudo rm -rf /data/web_static/current\")\n run(\"sudo ln -s /data/web_static/releases/{}/ /data/web_static/current\"\n .format(filename))\n print(\"New version successfully deployed!\")", "def deploy():\n archive_path = do_pack()\n\n if not archive_path:\n return False\n\n return do_deploy(archive_path)", "def do_deploy(archive_path):\n if (os.path.isfile(archive_path) is False):\n print(\"wtf\")\n return False\n\n arch_name = archive_path.split('/')[-1]\n folder = (\"/data/web_static/release/\" + arch_name.split(\".\")[0])\n try:\n put(archive_path, \"/tmp/\")\n run(\"mkdir -p {}/\".format(folder))\n run(\"tar -xzf /tmp/{} -C {}\".format(arch_name, folder))\n run(\"rm /tmp/{}\".format(arch_name))\n run(\"mv {}/web_static/* {}/\".format(folder, folder))\n run(\"rm -rf {}/web_static\".format(folder))\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s {}/ /data/web_static/current\".format(folder))\n print(\"New version deployed!\")\n return (True)\n except:\n print(\"Not Deployed\")\n return (False)", "def do_deploy(archive_path):\n if not exists(archive_path):\n return False\n fileNameExt = archive_path.split('/')[-1]\n fileName = fileNameExt.split(\".\")[0]\n result = put(archive_path, '/tmp/{}'.format(fileNameExt))\n if result.failed:\n return False\n result = run(\"rm -rf /data/web_static/releases/{}/\".format(fileName))\n if result.failed:\n return False\n result = run(\"mkdir -p /data/web_static/releases/{}/\".format(fileName))\n if result.failed:\n return False\n result = run(\"tar -xzf /tmp/{} -C /data/web_static/releases/{}/\"\n .format(fileNameExt, fileName))\n if result.failed:\n return False\n result = run(\"rm /tmp/{}\".format(fileNameExt))\n if result.failed:\n return False\n input = \"mv /data/web_static/releases/{}/web_static/*\\\n /data/web_static/releases/{}/\".format(fileName, fileName)\n result = run(input)\n if result.failed:\n return False\n result = run(\"rm -rf /data/web_static/releases/{}/web_static\"\n .format(fileName))\n if result.failed:\n return False\n result = run(\"rm -rf /data/web_static/current\")\n if result.failed:\n return False\n result = run(\"ln -s /data/web_static/releases/{}/ /data/web_static/current\"\n .format(fileName))\n if result.failed:\n return False\n print(\"New version deployed!\")\n return True", "def deploy():\n\n project_dir = '/home/gastosabertos/gastos_abertos_website'\n with cd(project_dir):\n local('tar -cvzf build.tar.gz build')\n run('cp -r build build-old')\n put('build.tar.gz', '.')\n run('tar -xvf build.tar.gz')", "def deploy():\n new_archive = do_pack()\n\n if new_archive is None:\n return False\n\n res = do_deploy(new_archive)\n return res", "def do_deploy(archive_path):\n\n if not os.path.exists(archive_path):\n return(False)\n try:\n put(archive_path, \"/tmp/\")\n folder_path = \"/data/web_static/releases/\" + archive_path[9:-4]\n name_file = archive_path[9:]\n name_folder = archive_path[9:-4]\n date = archive_path[21:-4]\n releases = \"/data/web_static/releases/\"\n\n run(\"mkdir -p {}\".format(folder_path))\n run(\"tar -xzf /tmp/{} -C {}\".format(name_file, folder_path))\n run(\"rm /tmp/{}\".format(name_file))\n run(\"mv {}{}/web_static/* {}{}/\"\n .format(releases, name_folder, releases, name_folder))\n run(\"rm -rf {}{}/web_static\".format(releases, name_folder))\n run(\"rm -rf /data/web_static/current\")\n run(\"ln -s {} /data/web_static/current\".format(folder_path))\n print(\"New version deployed!\")\n\n return(True)\n except BaseException:\n return (False)", "def publish():\n reset()\n compress()\n build()\n s3deploy()\n log_success()", "def do_deploy(archive_path):\n if path.exists(archive_path) is False:\n print(\"pass\")\n return False\n\n try:\n file_ = archive_path.split(\"/\")[1]\n filename = file_.split(\".\")[0]\n put(archive_path, \"/tmp/\")\n run(\"sudo mkdir -p /data/web_static/releases/\" + filename)\n run(\"sudo tar -zxf /tmp/{}.tgz -C {}\".format(\n filename, \"/data/web_static/releases/\" + filename))\n run(\"sudo rm /tmp/{}\".format(file_))\n run('sudo mv /data/web_static/releases/{}/web_static/* /data/web_static\\\n/releases/{}'.format(filename, filename))\n run(\"sudo rm -rf /data/web_static/current\")\n run(\"sudo ln -sf /data/web_static/releases/{}\\\n /data/web_static/current\".format(filename))\n print(\"New version deployed!\")\n return True\n except:\n return False", "def deploy():\n archive_path = do_pack()\n if archive_path is False:\n return false\n\n deploy_return = do_deploy(archive_path)\n return deploy_return", "def deploy():\n\n archive_path = do_pack()\n\n if archive_path is None:\n return False\n\n return do_deploy(archive_path)", "def pack():\n clean_local()\n build()\n copy_json()\n optimize()\n tarball()", "def sdist():\n pass", "def do_deploy(archive_path):\n from os import path\n\n if not path.exists(archive_path):\n print(\"No archive path given!\")\n return False\n\n filename = archive_path.split('/')[1]\n dest_path = \"/data/web_static/releases/{}/\".format(filename.split('.')[0])\n\n try:\n print(\"Executing task do_deploy\")\n put(archive_path, \"/tmp/{}\".format(filename))\n run('mkdir -p {}'.format(dest_path))\n run('tar -xzf /tmp/{} -C {}'.format(filename, dest_path))\n run('rm /tmp/{}'.format(filename))\n run('mv {}web_static/* {}'.format(dest_path, dest_path))\n run('rm -rf {}web_static'.format(dest_path))\n run('rm -rf /data/web_static/current')\n run('ln -s {} /data/web_static/current'.format(dest_path))\n print(\"New version deployed!\")\n return True\n except Exception:\n return False", "def deploy():\n build()\n copy()\n install()", "def files_distribute(self):\n self._post('files/distribute')", "def deploy():\n myfile = do_pack()\n if myfile is None:\n return False\n return do_deploy(myfile)", "def master_archive(f, e):\n template = e.get_template(TEMPLATES['archive'])\n write_file(\"archives.html\", template.render(entries=f))", "def web_archive():\n\n try:\n auth_check()\n except Exception as e:\n return flask.redirect(str(e))\n\n return flask.render_template('archive.html', user = flask.session['user'],\n archives = db_get_archives())", "def archive(mongo_backup_file):\r\n filename = get_archive_filename()\r\n tar = tarfile.open(filename, \"w|gz\")\r\n tar.add(mongo_backup_file)\r\n tar.close()\r\n\r\n return filename", "def deploy_to_s3():\n env.gzip_path = '%(path)s/repository/gzip/assets/' % env\n run(('s3cmd -P --add-header=Content-encoding:gzip --guess-mime-type --rexclude-from=%(path)s/repository/s3exclude sync %(gzip_path)s s3://%(s3_bucket)s/%(project_name)s/') % env)", "def publish(self, filename):\n # 1) Encrypt file\n # 2) Publish to remote cloud server\n # 3) Wait for the result\n # 4) Store results in files located inside RAM folder", "def copy():\n put(os.path.join('dist', get_egg_name()), remote_egg_dir)", "def package(self, outfile, update=False, local=True, remote=True):\n log.debug(\"Packaging and streaming %s\" % self.name)\n with TarPackaging(outfile) as tar:\n self._build(tar, update, local, remote, True)\n log.debug(\"Packaged %s\" % self.name)", "def deploy():\n filepath = do_pack()\n if (filepath is None):\n return False\n return do_deploy(filepath)", "def deploy():\n return do_deploy(do_pack())", "def deploy():\n return do_deploy(do_pack())", "def deploy():\n return do_deploy(do_pack())", "def create_zip_file():\n shutil.make_archive(os.path.join(DIST_DIR, \"build\"), \"zip\", BUILD_DIR)", "def publish():\n if sys.argv[-1] == 'publish':\n os.system('python setup.py sdist')\n os.system('twine upload dist/*')\n sys.exit()", "def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()", "def deploy():", "def deploy():\n require('hosts', provided_by=[prod])\n require('whole_path', provided_by=[prod])\n require('code_root')\n upload_tar_from_git(env.whole_path)\n install_requirements()\n symlink_current_release()\n migrate()\n restart_webservers()\n setup_permissions()\n collectstatic()", "def move_packages (name, stage_dir, package_dir):\n\n print (\"Storing packages for \", name)\n\n # Take care of the zip file\n print (\"\\tZip file...\")\n target_file = join (package_dir, name + \".zip\")\n shutil.copy (join (stage_dir, \"zip-archive.zip\"), target_file)\n ex (\"md5sum \" + target_file + \" > \" + target_file + \".md5\")\n\n\n tar_file = join (stage_dir, \"tar-archive.tar\")\n target_file = join (package_dir, name + \".tar\")\n\n # bzip\n print (\"\\tBzip2 file.....\")\n shutil.copy (tar_file, target_file)\n ex (\"bzip2 \" + target_file)\n ex (\"md5sum \" + target_file + \".bz2 > \" + target_file + \".bz2.md5\")\n\n print (\"\\tgzip file.....\")\n shutil.copy (tar_file, target_file)\n ex (\"gzip \" + target_file)\n ex (\"md5sum \" + target_file + \".gz > \" + target_file + \".gz.md5\")", "def download_result_archive(run_id):\n from robflask.service import service\n with service() as api:\n ioBuffer = api.runs().get_result_archive(run_id=run_id)\n return send_file(\n ioBuffer.open(),\n as_attachment=True,\n attachment_filename='run.tar.gz',\n mimetype='application/gzip'\n )", "def download(request):\n \n\n def make_archive(source, destination):\n print(source, destination)\n base = os.path.basename(destination)\n name = base.split('.')[0]\n format = base.split('.')[1]\n archive_from = os.path.dirname(source)\n archive_to = os.path.basename(source.strip(os.sep))\n print(source, destination, archive_from, archive_to)\n shutil.make_archive(name, format, archive_from, archive_to)\n shutil.move('%s.%s' % (name, format), destination)\n\n user_id = request.session['user_id']\n user_root = request.session['user_root']\n search_id = request.session['search_id']\n logger = Logger(user_root,user_id)\n logger.write(\"start compressing images..\")\n t_start_zip=time.time()\n zip_target = os.path.join(user_root, search_id)\n zip_path = os.path.join(user_root, search_id, \"Color_images.zip\")\n make_archive(zip_target, zip_path)\n print(\"finish zip.\")\n zip_file = open(zip_path, '+rb')\n response = HttpResponse(zip_file, content_type='application/zip')\n response[\n 'Content-Disposition'] = 'attachment; filename=%s' % \"dataset.zip\"\n response['Content-Length'] = os.path.getsize(zip_path)\n zip_file.close()\n logger.write(\"compressing images finished (\"+convert_duration_time(time.time(),t_start_zip)+\"s)\")\n\n return response", "def extract_file(self):\n# path_destination = os.path.join(\n# self.root, self.resources.replace(\".zip\", \"\"))\n# os.makedirs(path_destination, exist_ok=True)\n shutil.unpack_archive(os.path.join(\n self.root, self.resources), self.root)\n os.remove(os.path.join(self.root, self.resources))", "def zip_repo(src_path, dest_path):\n tar = tarfile.open(dest_path, \"w:gz\")\n for file_name in glob.glob(os.path.join(src_path, \"*\")):\n tar.add(file_name, os.path.basename(file_name))\n\n tar.close()", "def dist(context):\n context.run(\"python setup.py sdist\")\n context.run(\"python setup.py bdist_wheel\")", "def create_archive(filelist):\n\t\n\n\ttmp = tempfile.NamedTemporaryFile()\n\t# with tempfile.SpooledTemporaryFile() as tmp:\n\twith zipfile.ZipFile(tmp, 'w', zipfile.ZIP_DEFLATED) as archive:\n\t\tarcname = './docs/'\n\t\tfor x in filelist:\n\t\t\tfilename = os.path.basename(x[1])\n\t\t\t_file = x[0]\n\t\t\t# make sure we're at the start...\n\t\t\t_file.seek(0)\n\t\t\tarchive.write(_file.name, arcname=os.path.join(arcname, filename))\n\n\t# Reset file pointer\n\ttmp.seek(0)\n\n\treturn tmp\n\n\t\t# Write file data to response\n\t\t# return HttpResponse(tmp.read(), content_type='application/x-zip-compressed')", "def _download_archive(self):\n _logger.debug('Downloading archive...')\n response = urlopen(self.url)\n\n with open(self._archive_full_path, 'wb') as archive_file:\n chunk_size = 1024 * 1024 # 1 MB\n chunk = response.read(chunk_size)\n\n while chunk:\n archive_file.write(chunk)\n chunk = response.read(chunk_size)\n\n _logger.debug('Archive {name} has been successfully downloaded.'.format(name=self.archive_name))", "def archive(self):\n logging.info(_('Creating compressed archive...'))\n\n report_file_ext = 'bz2'\n compressor = 'bzip2'\n caller = Caller({})\n try:\n caller.call('xz --version')\n report_file_ext = 'xz'\n compressor = 'xz'\n except Exception:\n logging.debug('xz compression not available')\n\n if not os.path.exists(self.conf[\"output\"]):\n os.makedirs(self.conf[\"output\"])\n\n self.conf[\"path\"] = os.path.join(\n self.conf[\"output\"],\n \"sosreport-%s-%s.tar.%s\" % (\n 'LogCollector',\n time.strftime(\"%Y%m%d%H%M%S\"),\n report_file_ext\n )\n )\n\n if self.conf[\"ticket_number\"]:\n self.conf[\"path\"] = os.path.join(\n self.conf[\"output\"],\n \"sosreport-%s-%s-%s.tar.%s\" % (\n 'LogCollector',\n self.conf[\"ticket_number\"],\n time.strftime(\"%Y%m%d%H%M%S\"),\n report_file_ext\n )\n )\n\n config = {\n 'report': os.path.splitext(self.conf['path'])[0],\n 'compressed_report': self.conf['path'],\n 'compressor': compressor,\n 'directory': self.conf[\"local_tmp_dir\"],\n 'rname': os.path.basename(self.conf['path']).split('.')[0],\n }\n caller.configuration = config\n shutil.move(\n os.path.join(\n self.conf[\"local_tmp_dir\"],\n 'working'\n ),\n os.path.join(\n self.conf[\"local_tmp_dir\"],\n config[\"rname\"]\n ),\n )\n caller.call(\"tar -cf '%(report)s' -C '%(directory)s' '%(rname)s'\")\n shutil.rmtree(self.conf[\"local_tmp_dir\"])\n caller.call(\"%(compressor)s -1 '%(report)s'\")\n os.chmod(self.conf[\"path\"], stat.S_IRUSR | stat.S_IWUSR)\n sha256_out = caller.call(\"sha256sum '%(compressed_report)s'\")\n checksum = sha256_out.split()[0]\n with open(\"%s.sha256\" % self.conf[\"path\"], 'w') as checksum_file:\n checksum_file.write(sha256_out)\n\n msg = ''\n if os.path.exists(self.conf[\"path\"]):\n archiveSize = float(os.path.getsize(self.conf[\"path\"])) / (1 << 20)\n\n size = '%.1fM' % archiveSize\n\n msg = _(\n 'Log files have been collected and placed in {path}\\n'\n 'The sha256 for this file is {checksum} and its size is {size}'\n ).format(\n path=self.conf[\"path\"],\n size=size,\n checksum=checksum,\n )\n\n if archiveSize >= 1000:\n msg += _(\n '\\nYou can use the following filters -c, -d, -H in the '\n 'next execution to limit the number of Datacenters,\\n'\n 'Clusters or Hosts that are collected in order to '\n 'reduce the archive size.'\n )\n return msg", "def sdist(options):\r\n pass", "def deploy():\n require(\"hosts\", provided_by=[production, staging])\n env.release = time.strftime(\"%Y-%m-%d_%H:%M:%S\")\n upload_tar_from_git()\n install_requirements()\n setup_webserver()\n symlink_current_release()\n restart_webserver()", "def update_rootfs_archive(self):\n logging.info(\"starting to update rootfs archive\")\n\n # Remove existing archive before generating the new one\n try:\n if os.path.isfile(self.project.archive_filename):\n logging.info(\"removing previous archive file : \" + self.project.archive_filename)\n os.remove(self.project.archive_filename)\n\n # Catch file removal exceptions\n except OSError as exception:\n logging.critical(\"Error: %s - %s.\", exception.filename, exception.strerror)\n self.cleanup_installation_files()\n exit(1)\n\n # Create the new archive\n cache_archive = tarfile.open(self.project.archive_filename)\n cache_archive.add(name=self.project.rootfs_mountpoint)\n cache_archive.close()", "def zipfiles (downloadable, name):\n\n print \"compressing files. almost done.\"\n import zipfile\n for book in downloadable:\n if (os.path.exists(os.path.join(name, book[1]))):\n files = os.listdir(os.path.join(name, book[1]))\n cbz = zipfile.ZipFile(os.path.join(name, name + '-' + book[1] + '.cbz'), 'w')\n for file in files:\n cbz.write(os.path.join(name, book[1],file))\n cbz.close()", "def backup(self):\n if self.url is not None:\n\n # zip backup folder\n zipapp.create_archive(self.logs_directory, self.send_zip)\n\n # then send zipped folder to the URL\n try:\n requests.post(self.url, files={\n 'uploaded_file': (os.path.basename(self.send_zip), open(self.send_zip, 'rb')),\n })\n except requests.exceptions.ConnectionError as error:\n print(error)" ]
[ "0.7481866", "0.7308439", "0.717865", "0.70625347", "0.70540977", "0.6997107", "0.69896996", "0.6985268", "0.6972793", "0.6966511", "0.6944373", "0.69133264", "0.68920314", "0.6885152", "0.68583626", "0.68551016", "0.6854341", "0.68525314", "0.68385327", "0.681433", "0.6787846", "0.67804325", "0.6770692", "0.6757624", "0.6738277", "0.6732619", "0.67149985", "0.671126", "0.66907954", "0.66871643", "0.667895", "0.667712", "0.6676645", "0.6665134", "0.6657859", "0.66436195", "0.6641538", "0.663876", "0.66356134", "0.66198075", "0.6618094", "0.66142863", "0.658926", "0.65892524", "0.6557225", "0.6557087", "0.6544556", "0.6544389", "0.6544134", "0.6541009", "0.6539012", "0.64698863", "0.64509815", "0.64494324", "0.6441969", "0.6432309", "0.64295495", "0.63977903", "0.6392973", "0.63713247", "0.63622797", "0.6341991", "0.6330732", "0.6325364", "0.6318153", "0.62888366", "0.62639666", "0.6251908", "0.61226726", "0.6115114", "0.61098564", "0.60883516", "0.607029", "0.60384434", "0.6021044", "0.59841007", "0.5982305", "0.5976102", "0.5966733", "0.5966733", "0.5966733", "0.59328496", "0.5919376", "0.5912116", "0.58950835", "0.58948493", "0.58907795", "0.58644897", "0.5852633", "0.5804433", "0.579209", "0.5790288", "0.57677186", "0.57490194", "0.57259214", "0.5724002", "0.572249", "0.571782", "0.5706398", "0.5702486" ]
0.6309926
65
To search and replace the text config string the file
def search_and_replace_config(param_dict, config_file): if param_dict is None: raise ValueError('could not find parameters to update the configuration: %s' % param_dict) if config_file is None: raise ValueError('could not find config file to update the configuration: %s' % config_file) search_users_config = param_dict['search_users_config'] replace_users_config = param_dict['replace_users_config'] search_processes_config = param_dict['search_processes_config'] replace_processes_config = param_dict['replace_processes_config'] search_users_config_ubuntu = param_dict['search_users_config_ubuntu'] replace_users_config_ubuntu = param_dict['replace_users_config_ubuntu'] search_processes_config_ubuntu = param_dict['search_processes_config_ubuntu'] replace_processes_config_ubuntu = param_dict['replace_processes_config_ubuntu'] print ("File to perform search and replace on: %s" % config_file) modify_config_file(param_dict, config_file, search_users_config, replace_users_config) modify_config_file(param_dict, config_file, search_processes_config, replace_processes_config) modify_config_file(param_dict, config_file, search_users_config_ubuntu, replace_users_config_ubuntu) modify_config_file(param_dict, config_file, search_processes_config_ubuntu, replace_processes_config_ubuntu)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def modify_config_file(config_file, search_config, replace_config):\n with open(config_file, 'r+') as f:\n content = f.read()\n f.seek(0)\n f.write(content.replace(search_config, replace_config))\n f.truncate()\n f.close()", "def replace_includes(self, file_name):\n\n indexBegin = 0\n indexEnd = 0\n text = self.dir_helper.read_file(file_name)\n while indexBegin != -1:\n indexBegin = text.find('\\input{', indexBegin+1)\n indexEnd = text.find('}', indexBegin+1)\n text_to_replace = text[indexBegin:indexEnd+1]\n if indexBegin != -1:\n # print 'text_to_replace : ' + text_to_replace\n new_path = self.construct_path(text_to_replace)\n new_text = self.replace_includes(file_name = new_path)\n text = text.replace(text_to_replace, new_text)\n\n return text", "def config_edits(configfile):\n try:\n\n # Read in the file\n filedata = None\n with open(configfile, 'r') as file:\n filedata = file.read()\n\n # Replace the target string\n filedata = filedata.replace(\n '/home/scratch01/sradanov/A2C2/NCEP/', '').replace('/home/estimr2/sradanov/Operational/', '')\n\n # Write the file out again\n with open(configfile, 'w') as file:\n file.write(filedata)\n\n LOGGER.info('configfile modified')\n except Exception:\n LOGGER.exeption('Failed to modify configfile:')\n\n return configfile", "def replace(file,original_text,replacement_text):\n with open(file, \"rt\") as fin:\n with open(str(file+\"temp\"), \"wt\") as fout:\n for line in fin:\n fout.write(line.replace(original_text,replacement_text))\n os.rename(str(file+\"temp\"),file)\n return", "def _config_file_content_substitute(self, filename, keys):\n # Open the file for substitution\n try:\n f = open(filename, \"r\")\n lines = f.readlines()\n f.close()\n except:\n self.log.warning(\"Error reading from parameter file \" + filename + \".\")\n raise\n\n try:\n # Backup the original file\n f = open(filename + \".original\", \"w\")\n f.write(\"\".join(lines))\n f.close()\n except:\n self.log.warning(\"Error making a backup file of \" + filename + \". Skipped.\")\n\n # Define the fields within the file\n fields = 2\n keyfield = 0\n datafield = 1\n if \"dacParameters\" in filename or \"tbmParameters\" in filename or \"tbParameters\" in filename:\n fields = 3\n keyfield = 1\n datafield = 2\n\n keys_replaced = []\n # iterate over all lines\n for i in range(len(lines)):\n line = lines[i].strip()\n if len(line) == 0 or line[0] == '-' or line[0] == '#':\n continue\n line = line.split(None, fields - 1)\n if len(line) != fields:\n continue\n # check whether this line matches a key\n if not line[keyfield] in keys:\n continue\n line[datafield] = keys[line[keyfield]]\n keys_replaced.append(line[keyfield])\n if line[datafield].startswith('DTB') and line[keyfield] == 'id':\n lines[i] = \" : \".join(line)\n lines[i] += '\\n'\n else:\n lines[i] = \" \".join(line)\n lines[i] += '\\n'\n try:\n RequireTestParametersExisting = self.init.get('VerifyTestParameters', 'CheckExistence').strip().lower() == 'true'\n except:\n RequireTestParametersExisting = False\n\n for key in keys:\n if not key in keys_replaced:\n WarningMessage = \"Warning: key '%s' in file '%s' does not exist! Update '%s' file in parameters directory or 'Tests *' section in ini file!\"%(key, filename, filename)\n self.log.warning(WarningMessage)\n if RequireTestParametersExisting:\n raise Exception(WarningMessage)\n\n try:\n # Write the new file\n f = open(filename, \"w\")\n f.write(\"\".join(lines))\n f.close()\n except:\n self.log.warning(\"Error saving parameters in \" + filename + \".\")\n raise", "def config_replace(context,target,filename):\n\n result = context.get_operation('config_replace')\n return result", "def edit_toml(file, key_word, phrase_to_replace):\n f = open(file, \"r+\")\n\n phrase = \"\"\n while True:\n line = f.readline()\n if not line:\n break\n if line.count(key_word) > 0:\n print('%s found in the code, about to change with given phrase' % key_word)\n phrase += phrase_to_replace+'\\n'\n else:\n phrase += line\n f.close()\n f = open(file, \"w+\")\n f.write(phrase)\n print(\"Successfully replaced with given phrase!\")\n f.close()", "def replace_in_file(file_path, find, replace):\n\tcontent = read_file(file_path)\n\tcontent = content.replace(find, replace)\n\twrite_file(file_path, content)", "def search_replace(path, f):\n\n base, name = os.path.split(path)\n\n while True:\n tmp_path = '%s.%s.sr~' % (path, str(time.time()).replace('.', ''))\n if not os.path.exists(tmp_path):\n break\n\n data = f.read()\n\n if search_line(data):\n print path\n\n if config.explain:\n return\n\n # Read and create new data\n data = config.search.sub(config.replace, data)\n\n try:\n open(tmp_path, 'w').write(data)\n except IOError, ex:\n logging.error('Can\\'t create temporary file for %s' % path)\n return\n\n if config.backup: \n # Create backup\n backup_path = None\n for count in xrange(999):\n test_path = os.path.join(base, '.%s.%03d~' % (name, count))\n if not os.path.exists(test_path):\n backup_path = test_path\n break\n if not backup_path:\n logging.error('Can\\'t create backup for %s. File was not modified' % path)\n return\n\n try:\n if config.backup:\n shutil.copy(path, backup_path)\n else:\n pass\n # yep, we do nothing in this try block\n except IOError, ex:\n logging.error(ex)\n else:\n try:\n shutil.copy(tmp_path, path)\n except IOError, ex:\n logging.error(ex)\n finally:\n os.unlink(tmp_path)", "def replace_filevalue(file_name, orgval, newval):\n for line in fileinput.input(file_name, inplace = 1):\n print line.replace(str(orgval), str(newval)),\n fileinput.close()", "def edit_xml(file, key_word, phrase_to_replace):\n f = open(file, \"r+\")\n\n phrase = \"\"\n while True:\n line = f.readline()\n if not line:\n break\n if line.count(key_word) > 0:\n print('%s found in the code, about to change with given phrase...' % key_word)\n phrase += phrase_to_replace\n else:\n phrase += line\n f.close()\n f = open(file, \"w+\")\n f.write(phrase)\n print(\"Successfully replaced with given phrase!\")\n f.close()", "def substitute(filename, key, value):\n try:\n unmodified_file = open(filename, \"r\")\n initial_text = unmodified_file.read()\n unmodified_file.close()\n modified_text = string.replace(initial_text, key, value)\n modified_file = open(filename, \"w\")\n modified_file.write(modified_text)\n modified_file.close()\n except IOError:\n output(\"Could not substitute \\\"\" + value + \"\\\" for \\\"\" + key + \\\n \"\\\" in file \\\"\" + filename + \"\\\".\\nExiting... (Sorry!)\\n\")\n sys.exit(1)", "def substitute_string_in_tstest_file(file_name, replacements):\n lines = []\n infile = codecs.open(file_name, 'r', encoding='utf-16')\n for line in infile:\n for src, target in replacements.iteritems():\n line = line.replace(src, target)\n lines.append(line)\n infile.close()\n\n outfile = codecs.open(file_name, 'w', encoding='utf-16')\n outfile.writelines(lines)\n outfile.close()", "def updateTemplateFile(self, source, placeHolder, value):\n source_file = open(source).read()\n source_file = source_file.replace(placeHolder, value)\n updated_file = open(source, 'w')\n updated_file.write(source_file)\n updated_file.close()", "def replace(self, text):\n for key, val in self.env.items():\n text = text.replace(\"$%s\" % key, val)\n return text", "def replaceStringInFile():\n sel = nuke.selectedNodes()\n pane = nuke.Panel('replace string in file knob')\n pane.addSingleLineInput('replace this', '')\n pane.addSingleLineInput('by this', '')\n val = pane.show()\n\n if val and sel:\n for node in sel:\n try:\n str1 = pane.value('replace this')\n str2 = pane.value('by this')\n file = str(node['file'].value())\n newfile = file.replace(str1, str2)\n node['file'].setValue(newfile)\n print 'replacing string in', node.name()\n except:\n print 'failed on', node.name()", "def template_replace(template, replace_map, result):\n # Read content of source file.\n with open(template) as fp:\n lines = fp.readlines()\n # Replace placeholders.\n for key, value in list(replace_map.items()):\n for i, line in enumerate(lines):\n # Ignore VHDL comments\n if not line.strip().startswith('--'):\n lines[i] = line.replace(key, value)\n # Write content to destination file.\n with open(result, 'w') as fp:\n fp.write(''.join(lines))", "def loadText(self,filePath):\n ins = file(filePath,'r')\n reComment = re.compile(r\"#.*\")\n reSection = re.compile(r'@ +(srcmod|replace)',re.M)\n reReplace = re.compile(r\"(\\w[-\\w ']+)\\s*:\\s*(.+)\")\n reNewIds = re.compile(r\",\\s*\")\n mode = None\n for line in ins:\n line = reComment.sub('',line.strip())\n maSection = reSection.match(line)\n if maSection:\n mode = maSection.group(1)\n elif not line: #--Empty/comment line\n pass\n elif mode == 'srcmod':\n self.srcModName = line\n elif mode == 'replace':\n maReplace = reReplace.match(line)\n if not maReplace: continue\n oldId = maReplace.group(1)\n self.newIds[oldId.lower()] = reNewIds.split(maReplace.group(2))\n ins.close()", "def update_file(filename, sentinel, text):\n content = None\n with codecs.open(filename, 'r', encoding='utf-8') as handle:\n content = handle.read()\n\n replacement = u\"{0}\\n\\n{1}\".format(sentinel, text)\n content = content.replace(sentinel, replacement, 1)\n with codecs.open(filename, 'w', encoding='utf-8') as handle:\n handle.write(content)\n return", "def parse_file_replace(path, args):\n try:\n fisier = open(path, 'r')\n except IOError:\n print(\"Nu am putut deschide fisierul :\", path)\n return\n full_data = fisier.read()\n fisier.close()\n\n try:\n fisier = open(path, \"w+\")\n except IOError:\n print(\"Nu am putut deschide fisierul :\", path)\n return\n\n data = \"\"\n for line in full_data:\n data += line\n\n if args.ignore_case:\n pattern = re.compile(re.escape(args.pattern), re.IGNORECASE)\n pattern.sub(args.pattern, data)\n else:\n data = data.replace(args.pattern, args.replace)\n\n fisier.write(data)\n fisier.close()", "def edit_cfg(config_file):\n\n GUI().cfgEditor(config_file)", "def line_replacer(config,change_this_line,key):\n for arg in config['HyperParameter'][key]: \n pattern=r'{}[ ]*=.*,'.format(arg)\n replace_value=config['HyperParameter'][key][arg][counter]\n if type(replace_value) is str:\n replace_value=\"'\"+replace_value+\"'\"\n change_this_line=re.sub(pattern,\"{}= {},\".format(arg,replace_value),change_this_line)\n return change_this_line", "def load_from_string(self, config_text):\n\n for sub in self.options['full_text_sub']:\n config_text = re.sub(\n sub['search'],\n sub['replace'],\n config_text)\n\n current_section = self\n current_section.real_indent_level = -1\n most_recent_item = current_section\n indent_adjust = 0\n end_indent_adjust = []\n temp_banner = []\n in_banner = False\n banner_end_lines = ['EOF', '%', '!']\n banner_end_contains = []\n\n def end_of_banner_test(config_line):\n \"\"\"\n :param config_line: type str\n :return: boolean\n \"\"\"\n if config_line.startswith('^'):\n return True\n elif config_line in banner_end_lines:\n return True\n elif any([c in config_line for c in banner_end_contains]):\n return True\n return False\n\n for line in config_text.splitlines():\n # Process banners in configuration into one line\n if in_banner:\n if line != '!':\n temp_banner.append(line)\n\n # Test if this line is the end of a banner\n if end_of_banner_test(str(line)):\n in_banner = False\n most_recent_item = self.add_child(\n \"\\n\".join(temp_banner), True)\n most_recent_item.real_indent_level = 0\n current_section = self\n temp_banner = []\n continue\n else:\n # Test if this line is the start of a banner\n if line.startswith('banner '):\n in_banner = True\n temp_banner.append(line)\n banner_words = line.split()\n try:\n banner_end_contains.append(banner_words[2])\n banner_end_lines.append(banner_words[2][:1])\n banner_end_lines.append(banner_words[2][:2])\n except IndexError:\n pass\n continue\n\n actual_indent = len(line) - len(line.lstrip())\n line = ' ' * actual_indent + ' '.join(line.split())\n for sub in self.options['per_line_sub']:\n line = re.sub(\n sub['search'],\n sub['replace'],\n line)\n line = line.rstrip()\n\n # If line is now empty, move to the next\n if not line:\n continue\n\n # Determine indentation level\n this_indent = len(line) - len(line.lstrip()) + indent_adjust\n\n line = line.lstrip()\n\n # Walks back up the tree\n while this_indent <= current_section.real_indent_level:\n current_section = current_section.parent\n\n # Walks down the tree by one step\n if this_indent > most_recent_item.real_indent_level:\n current_section = most_recent_item\n\n most_recent_item = current_section.add_child(line, True)\n most_recent_item.real_indent_level = this_indent\n\n for expression in self.options['indent_adjust']:\n if re.search(expression['start_expression'], line):\n indent_adjust += 1\n end_indent_adjust.append(expression['end_expression'])\n break\n if end_indent_adjust and re.search(end_indent_adjust[0], line):\n indent_adjust -= 1\n del (end_indent_adjust[0])\n\n # Assert that we are not in a banner still for some reason\n assert not in_banner\n\n if self.host.os in ['ios']:\n self._remove_acl_remarks()\n self._add_acl_sequence_numbers()\n self._rm_ipv6_acl_sequence_numbers()\n\n return self", "def repl_file(self, dir, file, dirkey, filekey, txtkey):\n startloc = os.path.join(self.loc, dir, file)\n newdir = self.dictreplace(dir, dirkey)\n newfile = self.dictreplace(file, filekey)\n enddir = os.path.join(self.loc, newdir)\n endloc = os.path.join(enddir, newfile)\n if not os.path.exists(enddir):\n os.makedirs(enddir)\n if startloc != endloc:\n print(\"Reading \" + startloc)\n print(\"Writing \" + endloc)\n self.replace_all_vals(startloc, endloc, txtkey)", "def options_substitute(self, configfile, options):\n\n # Parse config file in-place and replace the desired options+values\n for line in fileinput.input(configfile, inplace=True, backup=\".bak\"):\n # Check if the current line contains any of the desired options\n for opt, value in options.items():\n if opt in line:\n # Now count unknown words and try to judge if this is\n # real configuration or just a comment\n unknown = 0\n for word in line.split():\n if word == '#' or word == '\\t':\n continue\n elif word == opt:\n # If a foreign word went before our option identifier,\n # we are not in code but in comments\n if unknown != 0:\n unknown = 2\n break\n else:\n unknown += 1\n\n # Only consider the line as the actual code when the keyword\n # is followed by exactly one word value. Otherwise consider this\n # line as plain comment and leave intact\n if unknown == 1:\n # Convert value into string representation in spd_val\n if isinstance(value, bool):\n if value:\n spd_val = \"1\"\n elif not value:\n spd_val = \"2\"\n elif isinstance(value, int):\n spd_val = str(value)\n else:\n spd_val = str(value)\n\n print(opt + \" \" + spd_val)\n break\n\n else:\n print(line, end=' ')", "def change_content(options):\n call_command('''grep -r -l -- '%(patrn)s' . | tr '\\\\n' '\\\\0' | xargs -0 sed -i \"s/%(patrn)s/%(repl)s/g\"''', options)", "def change_line(file_path, included_strings, excluded_strings, replacement):\n if not os.path.isfile(file_path):\n print file_path+\" file not found!\"\n return\n temp_path = file_path+\"_temp\"\n temp_file = open(temp_path, 'w')\n with open(file_path, 'r') as f:\n for line in f:\n if all([x in line for x in included_strings]) and \\\n all([x not in line for x in excluded_strings]):\n temp_file.write(replacement)\n else:\n temp_file.write(line)\n temp_file.close()\n os.system(\"mv \"+temp_path+\" \"+file_path)\n return", "def findAndReplace(replace_dict, filename_in, filename_out, test_mode=False):\n f = open(filename_in)\n lines = f.readlines()\n f.close()\n \n print \"Generating %s\"%filename_out\n for i in range(len(lines)):\n for k in replace_dict.keys():\n (lines[i], n_subs) = re.subn(\"\\$\"+k+\"\\$\", str(replace_dict[k]), lines[i])\n \n if n_subs > 0:\n print \" \\'%s\\' -> \\'%s\\'\"%(k, replace_dict[k])\n \n if not test_mode:\n print \"Writing to file %s\"%filename_out \n fo = open(filename_out, \"w\")\n fo.writelines(lines)\n fo.close()", "def test_replaceInFile(self):\n in_ = 'foo\\nhey hey $VER\\nbar\\n'\n outf = open('release.replace', 'w')\n outf.write(in_)\n outf.close()\n\n expected = in_.replace('$VER', '2.0.0')\n replaceInFile('release.replace', {'$VER': '2.0.0'})\n self.assertEquals(open('release.replace').read(), expected)\n\n\n expected = expected.replace('2.0.0', '3.0.0')\n replaceInFile('release.replace', {'2.0.0': '3.0.0'})\n self.assertEquals(open('release.replace').read(), expected)", "def test_replaceInFile(self):\n content = \"foo\\nhey hey $VER\\nbar\\n\"\n with open(\"release.replace\", \"w\") as outf:\n outf.write(content)\n\n expected = content.replace(\"$VER\", \"2.0.0\")\n replaceInFile(\"release.replace\", {\"$VER\": \"2.0.0\"})\n with open(\"release.replace\") as f:\n self.assertEqual(f.read(), expected)\n\n expected = expected.replace(\"2.0.0\", \"3.0.0\")\n replaceInFile(\"release.replace\", {\"2.0.0\": \"3.0.0\"})\n with open(\"release.replace\") as f:\n self.assertEqual(f.read(), expected)", "def _replace_config_variables(self, string, node_id, cluster_name, region):\n\n if node_id:\n string = string.replace(\"{instance_id}\", node_id)\n if cluster_name:\n string = string.replace(\"{cluster_name}\", cluster_name)\n if region:\n string = string.replace(\"{region}\", region)\n return string", "def str_entered(self, tf, name):\n section, option = name\n text = tf.text\n _stash.config.set(section, option, text)\n self.save()", "def repdictval(fname , paramdict , oname , ignorestrings=['#'], dictdelim = '='):\n\tf = open(fname, \"r\")\n\tline = f.readline()\n\ti = 0\n\tw = open(oname, \"w\")\n \n\n while line != '': \n tmp = line.strip()\n if tmp :\n for st in ignorestrings:\n\n tokens = tmp.split(st)\n\t\t\t\trelevant = tokens[0]\n\t\t\t\tlength =len(tokens)\n\n \tif len(relevant) >1: \n\t\t\t\t\t\t#Not a comment line\n \ttp = relevant.split(dictdelim)\n \t\tkey = tp[0].strip()\n\t\t\t\t\tval = tp[1].strip()\n\t\t\t\t\t\t#replace val\n \tmyval = paramdict[str(key)]\n\t\t\t\t\tmyline = key + ' ' + dictdelim + ' ' \n\t\t\t\t\tcomm =''\n\t\t\t\t\tif val != myval:\n\t\t\t\t\t \tcomm=\"\\n\" + ignorestrings[0]+\"replaced\"\n\t\t\t\t\tmyline += str(myval) +\"\\n\"+ str(comm) +\"\\n\" \n\t\t\t\t\tw.writelines(myline) \n\t\t\t\telse:\n\t\t\t\t\t\t#comment line, so just write \n\t\t\t\t\t#print line\n\t\t\t\t\tw.writelines(line)\n\t\t\t\t\n line=f.readline()\n \n f.close()\n\tw.close()", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n with open(filename, \"r+\") as txt_file:\n lines = []\n for line in txt_file:\n lines.append(line)\n if search_string in line:\n lines.append(new_string)\n with open(filename, \"w+\") as txt_file:\n txt_file.write(\"\".join(lines))", "def replace(self, pat, repl):\n re_pat = re.compile(pat)\n for infilename in self.file_names:\n infile = open(infilename, 'r')\n for line in infile:\n line = line.rstrip()\n line1 = re_pat.sub(repl, line)\n if line1 != line:\n print 'Repl: %s' % (line1, )", "def main():\n # Default values.\n configFile = None\n replaceDict = {}\n inFile = sys.stdin\n outFile = sys.stdout\n \n # Get command line options and arguments.\n try:\n opts, args = getopt.getopt(sys.argv[1:], 'c:i:o:r:h', ['configFile=',\n 'infile=', 'outfile=', 'replace=', 'help'])\n except getopt.GetoptError as e:\n print(str(e))\n print_usage()\n sys.exit(2)\n \n # Process command line options and arguments.\n for opt, arg in opts:\n if opt in ('-c', '--configFile'):\n print(\"Processed -c.\")\n \n try:\n configFile = io.open(arg, 'r')\n except Exception as e:\n print(str(e))\n raise\n \n for line in configFile:\n try:\n k, v = line.split(':')\n except Exception as e:\n continue\n k = k.strip()\n v = v.strip()\n replaceDict[k] = v\n \n if not configFile.closed:\n configFile.close()\n elif opt in ('-i', '--inFile'):\n print(\"Processed -i.\")\n try:\n inFile = open(arg, 'r')\n except Exception as e:\n print(str(e))\n raise\n elif opt in ('-o', '--outFile'):\n print(\"Processed -o.\")\n if os.path.isfile(arg):\n os.rename(arg, arg+'.bkp')\n try:\n outFile = open(arg, 'w')\n except Exception as e:\n print(str(e))\n raise\n elif opt in ('-r', '--replace'):\n try:\n k, v = arg.split(':')\n k = k.strip()\n v = v.strip()\n replaceDict[k] = v\n except Exception as e:\n continue\n elif opt in ('-h', '--help'):\n print(\"Processed -h.\")\n print_usage()\n else:\n print(\"Invalid command line option. Try again.\")\n print_usage()\n sys.exit(2)\n \n # Replace placeholder tags.\n replace_line = ''\n for line in inFile:\n for k in replaceDict.iterkeys():\n try:\n line = string.replace(line, k, replaceDict[k])\n except Exception as e:\n print(str(e))\n outFile.write(line)\n \n if not outFile.closed:\n outFile.close()\n if not inFile.closed:\n inFile.close()", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n contents,\n FormatStyle(\"#\", \"#\", \"\"),\n whitelist=[\"file/trailing_whitespace\"])", "def search_and_add(_file, search_string, new_string):\n with open(_file, encoding='utf-8') as f:\n buf = f.readlines()\n new_array = []\n for line in buf:\n new_array.append(line)\n if line == search_string:\n new_array.append(new_string)\n\n with open(_file, 'w') as f:\n for item in new_array:\n f.write(item)", "def setconfig(filepath, param, value):\n\n with open(filepath, 'rb') as f:\n lines = f.readlines()\n with open(filepath, 'wb') as f:\n updated = False\n for line in lines:\n if line.strip().startswith('#') or '=' not in line:\n # keep comments and other non informative lines unchanged\n f.write(line)\n continue\n k, v = line.split('=', 1)\n if k.strip() == param:\n # update with new value\n f.write('%s=%s\\n' % (param, value))\n updated = True\n else:\n # keep line unchanged\n f.write(line)\n if not updated:\n # append the new param at the end of the file\n f.write('%s=%s\\n' % (param, value))", "def search_and_replace(s_word: str, r_word: str, file):\n text = read_file(file)\n for j in range(len(text)):\n words = text[j].split(' ')\n for i in range(len(words)):\n # if word is last in line and it is not empty line\n if words[i][-1:] == '\\n' and len(words[i]) > 2:\n if words[i][-2] in string.punctuation:\n sym = words[i][-2]\n if words[i][:-2] == s_word:\n words[i] = r_word + sym + '\\n'\n # if word's last symbol is punctuation\n elif words[i][-1] in string.punctuation:\n sym = words[i][-1]\n if words[i][:-1] == s_word:\n words[i] = r_word + sym\n # if last 2 symbols is 's (e.g. John's; cat's; land's)\n elif words[i][-2:] == '\\'s':\n if words[i][:-2] == s_word:\n words[i] = r_word + '\\'s'\n elif words[i] == s_word:\n words[i] = r_word\n text[j] = ' '.join(words)\n write_file(file, text)", "def replace_text(self,\n\t text,\n\t fname,\n\t pattern=None,\n\t expect=None,\n\t shutit_pexpect_child=None,\n\t note=None,\n\t before=False,\n\t force=False,\n\t line_oriented=True,\n\t loglevel=logging.DEBUG):\n\t\tshutit_global.shutit_global_object.yield_to_draw()\n\t\treturn self.change_text(text,\n\t\t fname,\n\t\t pattern,\n\t\t expect,\n\t\t shutit_pexpect_child,\n\t\t before,\n\t\t force,\n\t\t note=note,\n\t\t line_oriented=line_oriented,\n\t\t replace=True,\n\t\t loglevel=loglevel)", "def search_replace(filename, search, replace):\n with open(filename, 'r') as f:\n filedata = f.read()\n modified_data = re.sub(search, replace, filedata, flags=re.M)\n with open(filename, 'w') as f:\n f.write(modified_data)", "def sed(file_path, search_pattern, replace_string):\n if isinstance(file_path, str):\n edit_file = Path(file_path)\n else:\n edit_file = file_path\n\n search_pattern = re.compile(rf'{search_pattern}')\n\n if not edit_file.exists():\n logging.error(f'Given file path {file_path} does not exist')\n return 1\n\n with open(edit_file, 'r') as read_file:\n file_contents = read_file.readlines()\n\n with open(edit_file, 'w') as write_file:\n for line in file_contents:\n write_file.write(re.sub(search_pattern, replace_string, line))\n\n return 0", "def replace(self, mapping: dict):\n file = self.project_dir / self.bash_script\n\n # read file\n with open(file) as f:\n content = f.read()\n\n # change its content (flags)\n fixed = content.replace(\n mapping[\"tests\"][\"original\"], mapping[\"tests\"][\"replacement\"]\n ).replace(mapping[\"class\"][\"original\"], mapping[\"class\"][\"replacement\"])\n\n # write to file\n with open(file, \"w\") as f:\n f.write(fixed)", "def replace_text_tags(lines, file, file_provider, error_handler):\n ret = []\n info = {}\n for line in lines:\n m = re.search(r'{' + name_re + '}', line)\n if not m: continue\n try:\n filename = file['content'][m.group('name')]\n except KeyError as e: # content tag not found in manifest; error next time\n continue\n for k, v in list(extract_info(file_provider(filename)).items()):\n if k not in info:\n info[k] = v\n\n for i, line in enumerate(lines):\n m = re.search(r'{' + name_re + '}', line)\n if m:\n span = m.span()\n if re.search(r'{' + name_re + '}', line[span[1]:]):\n # TODO fix this bug for real\n error_handler('''warning: multiple replacements found on line %d of /%s, but only the first will be replaced''' % (i, file['path']))\n name = m.group('name')\n if name == '_now':\n content_lines = [str(NOW_TIME)]\n else:\n try:\n filename = file['content'][name]\n content_lines = file_provider(filename)\n got_info = False\n while re.match(info_re, content_lines[0]):\n content_lines = content_lines[1:]\n got_info = True\n if got_info:\n content_lines = content_lines[1:]\n if filename.endswith('.markdown'):\n content_lines = markdown.markdown(''.join(content_lines)).splitlines(True)\n except KeyError as e: # content tag not found in manifest\n try:\n content_lines = [info[name]]\n except KeyError as e: # no info found either\n error_handler('''warning: content tag %s found in /%s, but no replacement file or named info is specified''' % (e, file['path']))\n ret.append(line)\n continue\n ret += process_replacement_lines(line[:span[0]], line[span[1]:], content_lines)\n else:\n ret.append(line)\n return ret", "def load( self ):\n ini = codecs.open(self.filename,\"r\",\"utf-8\",errors=\"replace\",buffering=0)\n for l in ini:\n l = l.strip()\n if l:\n (name,value) = l.split(\"=\",1)\n self.conf[name.strip()] = value.strip()\n ini.close()", "def modifyNamed():\n try:\n nconfile = open('/etc/named.conf',\"r\")\n nconf=nconfile.readlines()\n nconfile.close()\n if (nconf.__contains__('zone \"e164.com\" in {\\n') == False):\n # Back up the original file on local host\n if (os.path.isfile('/etc/named.conf.bkup') == False):\n os.system('sudo cp /etc/named.conf /etc/named.conf.bkup')\n pos = nconf.index('include \"/etc/named.conf.include\";\\n')\n temp1 = 'zone \"e164.com\" in {\\n type master;\\n' + \\\n ' file \"e164.zone\";\\n};\\n'\n nconf.insert(pos,temp1)\n nconfile = open('/etc/named.conf',\"w\")\n nconfile.writelines(nconf)\n nconfile.close()\n else:\n log.info('File named.conf already contains e164 information')\n except Exception, e:\n msg = \"file error: %s\" % str(e)\n #32363 Modified to resolve string formatting error\n log.error('File named.conf does not exist %s' %str(msg))", "def substitute_macros(text):\n f_text = text\n for (pattern,replacement) in context.environment.items():\n replacement = replacement.replace(os.path.sep,'/')\n f_text = f_text.replace('$(%s)' % pattern.upper(), replacement)\n return f_text", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s}\\n{m} Text{e}\",\n style,\n whitelist=[\"headerblock/filename\"])", "def replace_bibliography(self, text):\n\n indexBegin = 0\n indexEnd = 0\n indexBegin = text.find('\\\\bibliography{', indexBegin+1)\n indexEnd = text.find('}', indexBegin+1)\n text_to_replace = text[indexBegin:indexEnd+1]\n new_text = self.dir_helper.read_file(file_name = self.temp_dir + self.config.BBL_FILE)\n bbl_text = \"\"\n for line in new_text.split('\\n'):\n bbl_text = bbl_text + '\\t' + line + '\\n'\n text = text.replace(text_to_replace, bbl_text)\n\n return text", "def configure_template_file(outName, CONFIG_TXT):\n if os.path.isfile(outName):\n with open(outName, \"w\") as fid:\n fid.write(CONFIG_TXT)\n print('write configuration to file: {}'.format(outName))\n\n else:\n with open(outName, \"a\") as fid:\n fid.write(\"\\n\" + CONFIG_TXT)\n print('add the following to file: \\n{}'.format(outName))", "def parseConfigFindPath(stringFind,configFile):\n for line in configFile:\n if stringFind in line: # if find string specified, return pathname or specific value trying to find\n configFile.seek(0)\n return line.split()[-1].strip('\\n')\n configFile.seek(0)", "def replace_parts(file, file_out, replacements):\n # Read in original file\n with open(file, \"r\") as f:\n lines = f.readlines()\n\n # Replace lines in file\n for i, line in enumerate(lines[:]):\n # Replace file name and tag\n for key, val in replacements.items():\n if key in line:\n lines[i] = line.replace(str(key), str(val))\n\n with open(file_out, \"w\") as f:\n f.writelines(lines)", "def _change_file(file):\n\n with fileinput.FileInput(file, inplace=True, backup='.bak') as f:\n for index, line in enumerate(f):\n if index == 13:\n print(line.replace(line, line[15:]), end='')\n else:\n print(line.replace(line, line), end='')", "def substitute(files: str, pattern: str, replacement: str):\n with fileinput.input(\n files=glob.glob(files, recursive=True), inplace=True\n ) as file:\n for line in file:\n print(re.sub(pattern, replacement, line), end='')", "def test_config_from_text_wellformed_content():\n config_text = \"\"\"\n [resources]\n metadata_cache_uri = https://another-aqua.url\n \"\"\"\n config = Config(text=config_text)\n assert config.metadata_cache_uri == \"https://another-aqua.url\"", "def _config_regex(self):", "def changeParameterFile(speciesfolder, species):\n\twith open(\"{}/{}_parameters.cfg\".format(speciesfolder, species), \"r+\") as inFile:\n\t\tfor line in inFile:\n\t\t\tif \"generic\" in line:\n\t\t\t\tinFile.write(line.replace(\"generic\", species))\n\t\t\telse:\n\n\t\t\t\tinFile.write(line)", "def _customize_lis_config(lis_config_template, restart_dir, startdate):\n\n restart_file = _build_restart_filename(restart_dir, startdate)\n _check_restart_file(restart_file)\n\n enddate = startdate + datetime.timedelta(days=1)\n\n # Build dictionary storing replacements for certain lines\n linedict = {\n \"Start mode:\" : \"Start mode: restart\\n\",\n \"Starting year:\" : f\"Starting year: {startdate.year}\\n\",\n \"Starting month:\" : f\"Starting month: {startdate.month}\\n\",\n \"Starting day:\" : f\"Starting day: {startdate.day}\\n\",\n \"Starting hour:\" : \"Starting hour: 0\\n\",\n \"Starting minute:\" : \"Starting minute: 0\\n\",\n \"Starting second:\" : \"Starting second: 0\\n\",\n \"Ending year:\" : f\"Ending year: {enddate.year}\\n\",\n \"Ending month:\" : f\"Ending month: {enddate.month}\\n\",\n \"Ending day:\" : f\"Ending day: {enddate.day}\\n\",\n \"Ending hour:\" : \"Ending hour: 0\\n\",\n \"Ending minute:\" : \"Ending minute: 5\\n\",\n \"Ending second:\" : \"Ending second: 0\\n\",\n \"Noah.3.9 restart file:\" : \\\n f\"Noah.3.9 restart file: {restart_file}\\n\",\n }\n\n newlines = [] # List of lines for new file\n #with open(lis_config_template, \"r\", encoding=\"ascii\").readlines() as lines:\n # for line in lines:\n # for key, value in linedict.items():\n # if key in line:\n # line = value\n # newlines.append(line)\n\n with open(lis_config_template, \"r\", encoding=\"ascii\") as fobj:\n lines = fobj.readlines()\n for line in lines:\n for key, value in linedict.items():\n if key in line:\n line = value\n newlines.append(line)\n\n newfile = \"lis.config\"\n print(f\"[INFO] Writing {newfile} customized for new LIS run\")\n with open(newfile, \"w\", encoding=\"ascii\") as fobj:\n for line in newlines:\n fobj.write(line)\n fobj.close()", "def config(name, config, edit=True):\n\n configs = []\n for entry in config:\n key = next(iter(entry.keys()))\n configs.append(_parse_config(entry[key], key))\n\n # Python auto-correct line endings\n configstext = \"\\n\".join(salt.utils.data.decode(configs))\n if edit:\n with salt.utils.files.fopen(name, \"w\") as configfile:\n configfile.write(\"# This file is managed by Salt.\\n\")\n configfile.write(salt.utils.stringutils.to_str(configstext))\n return configstext", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s}\\n{m} Text{e}\",\n style,\n whitelist=[\"file/newline_last_char\"])", "def editFile(self, filename, text, textSettingFunction=None):\n if filename is not None:\n alreadyExists = path.exists(filename)\n if text is not None and len(text) > 0:\n if textSettingFunction is not None:\n textSettingFunction(self.currentItem, True)\n with open(filename, 'w') as w:\n w.write(text)\n if self.manageSVN and not alreadyExists:\n Popen([\"svn\", \"add\", filename]).communicate()\n else:\n if textSettingFunction is not None:\n textSettingFunction(self.currentItem, False)\n if alreadyExists:\n if self.manageSVN:\n Popen([\"svn\", \"delete\", '--force', filename]).communicate()\n else:\n os.remove(filename)", "def use_config_file(self):\n self.config_file = self.find_config_file()\n if self.config_file:\n self.apply_config_file(self.config_file)", "def genconfig(infilename, definitions, outfilename):\n\t\n\twith open(infilename, \"r\") as infile:\n\t\ttext = infile.read()\n\t\n\ttemplate = string.Template(text)\n\ttext = template.safe_substitute(definitions)\n\t\n\twith open(outfilename, \"w\") as outfile:\n\t\toutfile.write(text)", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n with open(filename, 'r') as f:\n lines = f.readlines()\n with open(filename, 'w') as f:\n for line in lines:\n if search_string in line:\n f.write(line)\n f.write(new_string)\n else:\n f.write(line)", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s}\\n{m} Text{e}\",\n style,\n whitelist=[\"headerblock/desc_space\"])", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n with open(filename, mode='r', encoding='utf-8') as f:\n lines = f.readlines()\n f. close()\n\n with open(filename, mode='w', encoding='utf-8') as f:\n for line in lines:\n f.write(line)\n if search_string in line:\n f.write(new_string)\n\n f.close()", "def _change_references(path, name, val):\n\n text = _open_file(path)\n for row in text.split('\\n'):\n if row.startswith(name + \"=\"):\n row = f'{name}={val}'\n yield row", "def update_conf_file():\n filepath = remote_dir + \"/apache2/conf/httpd.conf\"\n fabric.contrib.files.sed(filepath, 'myproject', project_name)", "def test_modify_file():\n\n def replace_word(line):\n return line.replace(\"World\", \"Disco\")\n\n txt_file = os.path.join(tempfile.gettempdir(), \"jade-unit-test-file.txt\")\n if os.path.exists(txt_file):\n os.remove(txt_file)\n with open(txt_file, \"w\") as f:\n f.write(\"Hello World\")\n\n modify_file(txt_file, replace_word)\n with open(txt_file, \"r\") as f:\n data = f.read()\n assert data == \"Hello Disco\"\n\n if os.path.exists(txt_file):\n os.remove(txt_file)", "def replace(mapping_dic, f_tr):\n f_opt_tr = f_tr.replace('.tr', '.opt.tr')\n f_tr = open(f_tr)\n f_opt_tr = open(f_opt_tr, 'w')\n for line in f_tr:\n if line.startswith('module: '):\n tokens = line.split(' ')\n module_name = tokens[1]\n mapping = mapping_dic[module_name].split()\n d = {}\n i = 0\n for item in mapping:\n d[int(item)] = i\n i += 1\n if 'SRC' not in line:\n f_opt_tr.write(line)\n continue\n line = re.sub(r'SRC: (\\d+)', lambda match: 'SRC: '+str(d.get(int(match.group(1)), 'SOMETHING WENT WRONG' + match.group(1))), line)\n line = re.sub(r'DST: (\\d+)', lambda match: 'DST: '+str(d.get(int(match.group(1)), 'SOMETHING WENT WRONG' + match.group(1))), line)\n f_opt_tr.write(line)", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n str = \"\"\n with open(filename, mode='r', encoding='utf-8') as fl:\n for line in fl:\n str += line\n if (search_string in line):\n str += new_string\n with open(filename, 'w') as fl:\n fl.write(str)", "def update_rally_regex(self, rally_conf='/etc/rally/rally.conf'):\n rconfig = configparser.RawConfigParser()\n rconfig.read(rally_conf)\n if not rconfig.has_section('openstack'):\n rconfig.add_section('openstack')\n rconfig.set('openstack', 'img_name_regex', f'^{self.image.name}$')\n with open(rally_conf, 'w', encoding='utf-8') as config_file:\n rconfig.write(config_file)", "def loadText(self,textFileName):\n #--Text File\n infoKey = None\n text = None\n texts = {}\n reHeader = re.compile('^#')\n reInfo = re.compile('@ +(\\d) +\"(.+?)\" +(\\d+)')\n reSingleQuote = re.compile('[\\x91\\x92]')\n reDoubleQuote = re.compile('[\\x93\\x94]')\n reEllipsis = re.compile('\\x85')\n reEolSpaces = re.compile(r' +\\r\\n')\n reExtraSpaces = re.compile(r' +')\n reIllegalChars = re.compile(r'[@#]')\n #--Read file\n textFile = file(textFileName,'rb')\n for line in textFile:\n if reHeader.match(line): continue\n maInfo = reInfo.match(line)\n if maInfo:\n infoKey = (int(maInfo.group(1)),maInfo.group(2),maInfo.group(3))\n texts[infoKey] = text = []\n else:\n text.append(line)\n textFile.close()\n #--Strip and clean texts\n updated = []\n unmatched = []\n trimmed = {}\n for infoKey in texts.keys():\n if infoKey not in self.infos:\n unmatched.append(infoKey)\n continue\n text = ''.join(texts[infoKey])\n #--Required Subs\n text = text.strip(' \\r\\n')\n text = reSingleQuote.sub('\\'',text)\n text = reDoubleQuote.sub('\"',text)\n text = reEllipsis.sub('...',text)\n text = reIllegalChars.sub('',text)\n #--Optional subs\n text = reEolSpaces.sub('\\r\\n',text)\n text = reExtraSpaces.sub(' ',text)\n #--Trim?\n if len(text) > 511:\n trimmed[infoKey] = (text[:511],text[511:])\n text = text[:511]\n info = self.infos[infoKey]\n if text != info.text:\n info.text = text\n info.setChanged()\n updated.append(infoKey)\n #--Report\n buff = cStringIO.StringIO()\n for header,infoKeys in ((_('Updated'),updated),(_('Unmatched'),unmatched)):\n if infoKeys:\n buff.write('=== %s\\n' % (header,))\n for infoKey in infoKeys:\n buff.write('* %s\\n' % (infoKey,))\n if trimmed:\n buff.write('=== %s\\n' % (_('Trimmed'),))\n for infoKey,(preTrim,postTrim) in trimmed.items():\n buff.write(`infoKey`+'\\n'+preTrim+'<<<'+postTrim+'\\n\\n')\n return buff.getvalue()", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s} Text\\n{m} Text\\n{m} Text{e}\\n\\n\",\n style,\n whitelist=[\"headerblock/space_copyright\"])", "def update(self, namein, nameout):\n\t\ttext = self.dict.sub(self.readFile(namein))\n\t\tself.writeFile(nameout, text)\n\t\treturn", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n with open(filename, 'r', encoding='utf-8') as f:\n line_list = []\n while True:\n line = f.readline()\n if line == \"\":\n break\n line_list.append(line)\n if search_string in line:\n line_list.append(new_string)\n with open(filename, 'w', encoding='utf-8') as f:\n f.writelines(line_list)", "def add_to_local_conf(prepared_test_build, string):\n\n with open(prepared_test_build['local_conf'], \"a\") as fd:\n fd.write('\\n## ADDED BY TEST\\n')\n fd.write(\"%s\\n\" % string)", "def get_replacement():\n run_linter_throw(\"path/to/file\",\n \"{s} /path/to/file\\n{m}\\n{m} Other{e}\\n\\n\",\n style,\n whitelist=[\"headerblock/copyright\"])", "def update(snippet_original, filename):\n## THIS IS APPENDING, NOT REPLACING\n\tlogging.info(\"Searching for {} in {}\".format(snippet_original, filename))\n\tlogging.debug(\"Opening file\")\n\twith open(filename, \"r+\") as f:\n\t\treader = csv.reader(f)\n\t\twriter = csv.writer(f)\n\t\tlogging.debug(\"Searching for '{}'\".format(snippet_original))\n\t\tin_file = False\n\t\tfor row in reader:\n\t\t\tif str(row[1]) == snippet_original:\n\t\t\t\tin_file = True\n\t\t\t\tprint row\n\t\t\t\tnew_text = raw_input(\"Insert new snippet text: \")\n\t\t\t\trow = writer.writerow([str(row[0]), new_text])\n\t\t\t\tprint row\n\t\tif in_file == False:\n\t\t\tprint \"That's not in this file\"\n\tlogging.debug(\"Search complete\")\n\treturn snippet_original, filename", "def sed_like_thing(pattern, repl, path):\n\n with codecs.open(path, 'rb', 'utf8') as inf:\n data = inf.read()\n\n data = re.sub(pattern, repl, data)\n\n with codecs.open(path, 'wb+', 'utf8') as outf:\n outf.write(data)", "def _apply_patch_odoo(self):\n paths = [os.path.join('openerp', 'tools', 'translate.py'),\n os.path.join('odoo', 'tools', 'translate.py')]\n for path in paths:\n s_file = os.path.join(self._server_path, path)\n if not os.path.isfile(s_file):\n continue\n cmd = [\"sed\", \"-i\", \"-e\",\n r\"s/translation'] = src/translation'] = ''/g\",\n s_file]\n print \" \".join(cmd)\n subprocess.call(cmd)", "def settings(self, text):\n rules = Settings().extract(text)\n first_line = 0\n if rules:\n if \"delimiters\" in rules:\n line = rules[\"delimiters\"][0].split(',')\n self.register_delimiters(*[x.strip() for x in line])\n if \"macro\" in rules:\n for line in rules[\"macro\"]:\n key, body = line.split(maxsplit=1)\n self._macros.append(Macro(key, body))\n first_line = rules['end']\n\n return text[first_line:] # cuts off the settings block", "def convert_text(s):\n for d in config.repl: # loaded from config.py\n if \"flags\" in d:\n s = re.sub(d[\"ptrn\"], d[\"repl\"], s, flags=d[\"flags\"])\n else:\n s = re.sub(d[\"ptrn\"], d[\"repl\"], s)\n return s", "def replace(file, current_line, new_line):\n with fileinput.input(file, inplace=True) as f:\n for line in f:\n if current_line in line:\n line = new_line\n sys.stdout.write(line)", "def replace(self, key, val) :\n \n # search for a line '<key> : <val>' \n # loop over lines in output file:\n found = False\n for iline in range(len(self.outfile)) :\n # extract:\n line = self.outfile[iline]\n # skip lines that are no key:value pair for sure ...\n if ':' not in line : continue\n # split once at first ':'\n k, v = line.split(':', 1)\n # match ?\n if k.strip() == key :\n # replace line in original file:\n self.outfile[iline] = '%s : %s\\n' % (k, str(val))\n # replace value:\n self.values[key] = val\n # set flag:\n found = True\n # found, thus no need to continue:\n break\n #endif\n #endfor # lines\n # not found ?\n if not found :\n logging.error('could not replace key : %s' % key)\n raise Exception\n #endif\n \n # ok\n return", "def __PerformSubstitutions(self, text):\n\n for substitution in self.substitutions:\n pattern, replacement = self.SplitValue(substitution)\n text = re.compile(pattern,re.M).sub(replacement, text)\n return text", "def update_feature(selfs, k, v, cfg_path):\n with open(cfg_path, 'r') as cfg:\n file_dict = yaml.safe_load(cfg)\n # overprint the entries with the new config_dict\n file_dict['{}'.format(k)] = v\n with open(cfg_path, 'w') as w_file:\n w_file.write(yaml.dump(file_dict))", "def update_config_file(commands_to_add, commands_to_remove):\r\n\r\n # Parse the config.txt file contents\r\n config_file_contents = {}\r\n if os.path.exists(utils.CONFIG_FILE_PATH):\r\n config_file_string = utils.get_config_file()\r\n first_line = True\r\n for line in config_file_string.split('\\n'):\r\n if first_line:\r\n first_line = False\r\n continue\r\n if not line.strip(): continue\r\n if not line.startswith('#'):\r\n line = line.split('\\t')\r\n config_file_contents[line[0]] = line\r\n\r\n # Remove the specified contents\r\n for cmd in commands_to_remove:\r\n config_file_contents.pop(cmd, None)\r\n\r\n # Add the specified contents\r\n for cmd in commands_to_add:\r\n config_file_contents[cmd] = [cmd] + ['none']*3\r\n\r\n # Archive old config.txt\r\n if os.path.exists(utils.CONFIG_FILE_PATH):\r\n current_time = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')\r\n obs_ending = '-obsolete-{0}.txt'.format(current_time)\r\n obs_path = obs_ending.join(utils.CONFIG_FILE_PATH.rsplit('.txt',1))\r\n os.rename(utils.CONFIG_FILE_PATH, obs_path)\r\n\r\n # Print new sorted config.txt\r\n out_handle = open(utils.CONFIG_FILE_PATH, 'w')\r\n out_handle.write('cmd_name\\texecute\\tload_module\\tunload_module')\r\n out_handle.write('\\n')\r\n for cmd, line in sorted(config_file_contents.iteritems()):\r\n out_handle.write('\\t'.join(line))\r\n out_handle.write('\\n')\r\n out_handle.close()", "def load_replace_candidate(self, filename=None, config=None):\n raise NotImplementedError", "def Modify_lines(oldFile, newFile, modifydic):\r\n fd = open(oldFile, \"r\")\r\n lines = fd.readlines()\r\n fd.close()\r\n for key in modifydic.keys():\r\n for i in range(len(lines)):\r\n if lines[i].find(key) != -1:\r\n lines[i]=lines[i].replace(key, modifydic[key])\r\n fd = open(newFile, \"w\")\r\n fd.writelines(lines)\r\n fd.close()", "def append_after(filename=\"\", search_string=\"\", new_string=\"\"):\n z = []\n with open(filename, 'r+') as x:\n \"\"\"Read to new string\"\"\"\n for eachline in x:\n z.append(eachline)\n if search_string in eachline:\n \"\"\"I love 'in' but do not like the .action format\"\"\"\n z.append(new_string)\n with open(filename, 'w+') as a:\n a.write(\"\".join(z))", "def replaceLine(oldFile, string1, string2, newString, newFile = \"TempFile\", mvFile = True):\n with open(oldFile, \"r\") as oldfile, open(newFile, \"w\") as newfile:\n oldfile_read = oldfile.readlines()\n for line in oldfile_read:\n line_number = oldfile_read.index(line)\n if string1 in line and string2 in line:\n oldfile_read[line_number] = replaceString(oldfile_read[line_number],newString)\n newfile.writelines(oldfile_read[line_number])\n else:\n newfile.writelines(oldfile_read[line_number])\n\n if mvFile == True:\n shutil.move(newFile, oldFile)", "def __replaceFiles(self):\n self.ui.showReplaceFilesDialog(self.textForFind())", "def in_place_replace(fname, text):\n\n path = os.path.dirname(fname)\n tmp = tempfile.NamedTemporaryFile(dir=path, mode=\"wt\", delete=False)\n tmp.write(text)\n tmp.close()\n move(tmp.name, fname)", "def adjust_nml_file(fname, replacements):\n f = open(fname, 'r')\n param_str = f.read()\n f.close()\n new_str = replace_keys(param_str, replacements)\n fd, path = tempfile.mkstemp()\n os.write(fd, str.encode(new_str))\n os.close(fd)\n shutil.copy(path, fname)\n os.remove(path)", "def update_file(filename, items):\n # TODO: Implement something in the templates to denote whether the value\n # being replaced is an XML attribute or a value. Perhaps move to dyanmic\n # XML tree building rather than string replacement.\n should_escape = filename.endswith('addon.xml')\n\n with open(filename, 'r') as inp:\n text = inp.read()\n\n for key, val in items.items():\n if should_escape:\n val = saxutils.quoteattr(val)\n text = text.replace('{%s}' % key, val)\n output = text\n\n with open(filename, 'w') as out:\n out.write(output)", "def update_variables(old_contents):\n new_contents = []\n\n for line in old_contents:\n words = line.split()\n\n for word in words:\n # Using the whitespace split above, the keys in the yaml file will\n # have a : at the end, so we need to strip that off before\n # replacing\n if word.endswith(':'):\n word = word[:-1]\n\n if word in VAR_MAPPINGS.keys():\n line = line.replace(word, VAR_MAPPINGS[word])\n\n new_contents.append(line)\n\n return new_contents", "def _replace(yaml_config, path_to_key, replacement_value, start=0, nested_level=0):\n nested_path_to_replace = path_to_key.split(\"/\")\n\n # our regex looks for a specific number of spaces to ensure correct\n # level of nesting. It matches to the end of the line\n search_string = (\n \" \" * nested_level + \".*\" + nested_path_to_replace[0] + \"(')?(\\\")?:.*\\n\"\n )\n matches = re.search(search_string, yaml_config[start:])\n\n # early return if we haven't found anything\n if not matches:\n return yaml_config\n\n # if we're on the last item in the path, we need to get the value and\n # replace it in the original file\n if len(nested_path_to_replace) == 1:\n # replace the current key:value with the new replacement value\n match_start = start + matches.start(0) + len(\" \" * nested_level)\n match_end = start + matches.end(0)\n yaml_config = (\n yaml_config[:match_start]\n + \"{}: {}\\n\".format(\n nested_path_to_replace[0],\n _get_yaml_replacement_value(replacement_value, nested_level),\n )\n + yaml_config[match_end:]\n )\n\n return yaml_config\n\n # set new start point to past current match and move on to next match\n start = matches.end(0)\n nested_level += 1\n del nested_path_to_replace[0]\n\n return _replace(\n yaml_config,\n \"/\".join(nested_path_to_replace),\n replacement_value,\n start,\n nested_level,\n )", "def replace_tag(tag, value, file):\r\n with open(file, \"r\") as origin:\r\n with open(file+\".replaced\", \"w\") as dest:\r\n dest.write(origin.read().replace(tag, str(value)))\r\n return file+\".replaced\"" ]
[ "0.7213954", "0.6664111", "0.6658277", "0.644173", "0.63886225", "0.6382801", "0.6320942", "0.6153013", "0.60404235", "0.6038611", "0.6027211", "0.592698", "0.5926587", "0.5894178", "0.58916545", "0.5873578", "0.58602273", "0.5843004", "0.5838578", "0.57836527", "0.57670426", "0.574493", "0.57447517", "0.5743892", "0.57411265", "0.5739366", "0.57386684", "0.571823", "0.5715496", "0.5692162", "0.5674408", "0.56641006", "0.5648631", "0.56250113", "0.5616631", "0.5609704", "0.5600434", "0.55905193", "0.55869013", "0.556512", "0.556025", "0.555939", "0.5556171", "0.55432165", "0.5529993", "0.5529808", "0.5509637", "0.5496529", "0.54891676", "0.5481689", "0.5474149", "0.5473241", "0.54665655", "0.546195", "0.5455071", "0.5447293", "0.54467875", "0.5443125", "0.5424975", "0.5419359", "0.5417846", "0.54162633", "0.5410289", "0.5405118", "0.54015476", "0.53915834", "0.5387704", "0.538485", "0.5352427", "0.5351071", "0.53469306", "0.5345213", "0.53387153", "0.5337736", "0.53369373", "0.5328714", "0.53203404", "0.5303437", "0.52968246", "0.52904415", "0.52876025", "0.528633", "0.5283084", "0.52800465", "0.5277356", "0.52705634", "0.5264551", "0.5253429", "0.5253281", "0.524996", "0.5241847", "0.52324003", "0.52196485", "0.52184224", "0.52175707", "0.5211592", "0.52072847", "0.5204258", "0.5173452", "0.5168929" ]
0.6405321
4
To read the config file, modify the threshold content and truncate the file
def modify_config_file(config_file, search_config, replace_config): with open(config_file, 'r+') as f: content = f.read() f.seek(0) f.write(content.replace(search_config, replace_config)) f.truncate() f.close()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def reten_log(path):\n try:\n file = open(path, 'r+')\n lines = file.readlines()\n if lines > 200:\n file.truncate()\n file.close()\n else:\n file.close()\n except:\n pass", "def __write_thresholds_off_config(self, path):\n self.__write_fw_config(path=path, template_name=\"thresholds_off.template\", macros={})", "def sanitize_new_config(self):\n config_log = self._load_config_log()\n if 'new' in config_log:\n for cfg in config_log['new']:\n with open(cfg, 'r+') as f:\n data = yaml.load(f)\n f.seek(0)\n yaml.safe_dump(data, f, default_flow_style=False)\n f.truncate()\n del config_log['new']\n\n self._save_config_log(config_log)", "def conf_load_skeleton_threshold(fin,skeleton):\n spec = fin.readline().split(':')\n if len(spec) != 2 or spec[0] != 'THRESHOLD':\n raise EnvironmentError(err_msg)\n skeleton.threshold = spec[1].strip()", "def read(cls):\n GC.read()\n if os.path.exists(CONFIG_OVERWRITE):\n cls.overwrite(CONFIG_OVERWRITE)", "def reload_tailed_file(self):\n try:\n self.file_ = open(self.tailed_file, \"r\")\n self.size = os.path.getsize(self.tailed_file)\n\n # Go to the head of file\n self.file_.seek(0, 1)\n\n return True\n except:\n return False", "def config_edits(configfile):\n try:\n\n # Read in the file\n filedata = None\n with open(configfile, 'r') as file:\n filedata = file.read()\n\n # Replace the target string\n filedata = filedata.replace(\n '/home/scratch01/sradanov/A2C2/NCEP/', '').replace('/home/estimr2/sradanov/Operational/', '')\n\n # Write the file out again\n with open(configfile, 'w') as file:\n file.write(filedata)\n\n LOGGER.info('configfile modified')\n except Exception:\n LOGGER.exeption('Failed to modify configfile:')\n\n return configfile", "def setup_configuration_file(self):\n\n with open(self.config_path, \"w+\") as f_config:\n\n f_config.write(get_configuration_file_form())", "def use_config_file(self):\n self.config_file = self.find_config_file()\n if self.config_file:\n self.apply_config_file(self.config_file)", "def __update(self):\n if self.__file:\n target_file = open(self.__file)\n for attr in dir(self):\n if not attr.startswith(\"_\") and \\\n (self.__overwrite or (attr not in self.__exclude)) \\\n and not self.__is_attr_callable(attr):\n try:\n delattr(self, attr)\n except AttributeError:\n pass\n pool = yaml.load(target_file)\n target_file.close()\n if pool: # could be None\n for key, val in pool.iteritems():\n if not key.startswith(\"_\") and \\\n (self.__overwrite or (key not in self.__exclude)) \\\n and not self.__is_attr_callable(key):\n setattr(self, key, val)\n if hasattr(self, 'log_config_file_changes')\\\n and self.log_config_file_changes:\n logging.getLogger(__name__).info(\"Config file has updated.\")", "def _configloader(self, configfile):\n with open(os.path.join(b3.getConfPath(), configfile), 'r') as f:\n for line in f:\n if not line.startswith('//') and not line.startswith('\\r\\n'):\n self.console.write(line, maxRetries=5)\n time.sleep(1)", "def reset_values(self):\n self.parse_config_file()", "def tmp_config_file() -> int:\n # erase anything that exists\n if config_file.is_file():\n config_file.unlink()\n\n # create a new file\n config = ConfigParser()\n config.add_section(\"sepal-ui\")\n config.set(\"sepal-ui\", \"locale\", \"fr-FR\")\n config.write(config_file.open(\"w\"))\n\n yield 1\n\n # flush it\n config_file.unlink()\n\n return", "def trimming(input_file, threshold =30):\n output_file = \"trimmed{}.fq\".format(threshold)\n if os.path.exists(output_file): \n return\n command = 'fastq_quality_trimmer -Q64 -t {} -i {} -o{}'\\\n .format(threshold,input_file,output_file)\n e =subprocess.check_output(command,shell = True)\n return output_file", "def write_config_file(self):\n for opt, opt_desc in self.opt_dict.iteritems():\n if 'permanent' in opt_desc and opt_desc['permanent'] == True:\n enabled = 'Always'\n else:\n enabled = opt_desc['enabled'].__str__()\n\n self.file_parser.set(opt, 'enabled', enabled)\n self.file_parser.set(opt, 'implementation',\n opt_desc['selected_imp'])\n self.file_parser.set(opt, 'optype', opt_desc['imptype'])\n\n for config, config_desc in self.config_dict.iteritems():\n enabled = config_desc['enabled'].__str__()\n self.file_parser.set(config, 'enabled', enabled)\n\n scratch_file = self.config_filename + '.scratch'\n with open(scratch_file, 'w') as cfile:\n for config in sorted(self.config_dict.keys()):\n self.write_section(cfile, config)\n\n for opt in sorted(self.opt_dict.keys()):\n self.write_section(cfile, opt)\n\n for imp in sorted(self.imp2opt_dict.keys()):\n self.write_section(cfile, imp)\n\n cfile.write(\"\\n\")\n\n os.rename(scratch_file, self.config_filename)", "def check_size():\n\n if os.path.getsize(config['LOG_PATH']) > config['LOG_MAXSIZE']:\n os.remove(config['LOG_PATH'])", "def update_tempest_conf_file(conf_file, rconfig):\n with open(TEMPEST_CONF_YAML) as yfile:\n conf_yaml = yaml.safe_load(yfile)\n if conf_yaml:\n sections = rconfig.sections()\n for section in conf_yaml:\n if section not in sections:\n rconfig.add_section(section)\n sub_conf = conf_yaml.get(section)\n for key, value in sub_conf.items():\n rconfig.set(section, key, value)\n\n with open(conf_file, 'wb') as config_file:\n rconfig.write(config_file)", "def update_tempest_conf_file(conf_file, rconfig):\n with open(TempestCommon.tempest_conf_yaml, encoding='utf-8') as yfile:\n conf_yaml = yaml.safe_load(yfile)\n if conf_yaml:\n sections = rconfig.sections()\n for section in conf_yaml:\n if section not in sections:\n rconfig.add_section(section)\n sub_conf = conf_yaml.get(section)\n for key, value in sub_conf.items():\n rconfig.set(section, key, value)\n\n with open(conf_file, 'w', encoding='utf-8') as config_file:\n rconfig.write(config_file)", "def loadThresholds (self, threshFile):\n file1 = open (threshFile, 'r')\n lignes = file1.readlines()\n file1.close()\n for sent in lignes:\n sent = sent.rstrip('\\n')\n if (len(sent) > 1):\n phone=sent.split(' ')[0]\n thresh=sent.split(' ')[1]\n # print str(len(sent)) + phone + ' ' + thresh\n self.thresholds[phone] = float(thresh)", "def clean_config(job_cfg_path, fixtures_path):\n tree = ET.parse(job_cfg_path)\n root = tree.getroot()\n inject_tree = ET.parse(fixtures_path)\n for node in inject_tree.getroot():\n srcnode = root.find(\"./%s\" % node.tag)\n if srcnode is not None:\n root.remove(srcnode)\n tree.write(job_cfg_path)", "def conf(config_file):\n\n global cf\n\n cf_parser = SafeConfigParser()\n\n # etc dir at the same level of the bin dir containing this script\n close_etc_path = os.path.realpath( os.path.realpath(os.path.dirname(__file__)) + \"/../etc\" )\n\n # Try to open configuration file (read() can get a list of files as well)\n conf_file_ok = True\n if len(cf_parser.read(config_file)) == 0:\n logging.warning(\"Cannot read configuration file %s\" % config_file)\n conf_file_ok = False\n\n for sec_name,sec_content in cf.iteritems():\n\n for key,val in sec_content.iteritems():\n\n try:\n new_val = cf_parser.get(sec_name, key) # --> [sec_name]\n try:\n new_val = float(new_val)\n except ValueError:\n pass\n cf[sec_name][key] = new_val\n logging.info(\"Configuration: %s.%s = %s (from file)\", sec_name, key, str(new_val))\n except Exception, e:\n logging.info(\"Configuration: %s.%s = %s (default)\", sec_name, key, str(val))\n\n return conf_file_ok", "def update_flagfile(flags_path, new_threshold):\n if abs(new_threshold) > 1:\n raise ValueError(\"Invalid new percentile for resign threshold\")\n with tf.gfile.GFile(flags_path) as f:\n lines = f.read()\n if new_threshold > 0:\n new_threshold *= -1\n if not RESIGN_FLAG_REGEX.search(lines):\n print(\"Resign threshold flag not found in flagfile {}! Aborting.\".format(flags_path))\n sys.exit(1)\n old_threshold = RESIGN_FLAG_REGEX.search(lines).groups(1)\n lines = re.sub(RESIGN_FLAG_REGEX, \"--resign_threshold={:.3f}\".format(new_threshold), lines)\n\n print(\"Updated percentile from {} to {:.3f}\".format(old_threshold, new_threshold))\n with tf.gfile.GFile(flags_path, 'w') as f:\n f.write(lines)", "def write_configuration_file(self, content):\n with open(self.configuration_file_path, 'w') as configuration_file:\n configuration_file.write(content)", "def truncate(submission_file_name, out_file_name):\n with open(submission_file_name, \"r\") as f:\n with open(out_file_name, \"w\") as f_out:\n f_out.write(\"Id,Prediction\\n\")\n for line in f:\n m = re.match(\"(.*,)(-?\\d+)\", line)\n if m == None:\n print(\"No match found for \"+ line)\n continue\n rating = int(m.group(2))\n if rating < 1:\n rating = 1\n elif rating > 5:\n rating = 5\n f_out.write(m.group(1) + str(rating)+\"\\n\")", "def truncateFile(fileNameString, tooBig=50000):\n if os.path.getsize(fileNameString) > tooBig:\n fh = open(fileNameString, 'rb+')\n fh.seek(-tooBig, 2) \n data = fh.read()\n fh.seek(0) # rewind\n fh.write(data)\n fh.truncate()\n fh.close()", "def reloadConfig():\n print \"---=== load cfg ===---\"\n cfgFile = open(\"../cfg/config.cfg\", \"r\")\n lines = cfgFile.readlines()\n \n for l in lines:\n l = l.replace(\"\\n\", \"\")\n if (l.count(\"#\") and l.index(\"#\") == 0) or l.count(\":\") == 0:\n continue\n tmp = l.split(\":\")\n tmp[0] = tmp[0].replace(\"trim\", \"\")\n tmp[0] = tmp[0].replace(\"max\", \"\")\n tmp[0] = tmp[0].replace(\"slew\", \"\")\n tmp[0] = tmp[0].replace(\"min\", \"\")\n \n if tmp[0] in var.min:\n var.min[tmp[0]] = float(tmp[1])\n print tmp[0], \" = \", tmp[1]\n if tmp[0] in var.trim:\n var.trim[tmp[0]] = int(tmp[1])\n print tmp[0], \" = \", tmp[1]\n if tmp[0] in var.max:\n var.max[tmp[0]] = int(tmp[1])\n print tmp[0], \" = \", tmp[1]\n if tmp[0] in var.slew:\n var.slew[tmp[0]] = int(tmp[1])\n print tmp[0], \" = \", tmp[1]\n \n print \"---=== loaded cfg ===---\"", "def test_large_file(self):\n\t\tfixedgenerator.GenerateFixedWidthFile().generate()\n\t\tmain.Main(['input/large.txt']).run()\n\t\tself.assertTrue(filecmp.cmp('output/output.csv', 'output/large.csv'))\n\t\tos.remove('input/large.txt')\n\t\tos.remove('output/large.csv')", "def clean():\n Log.d(DEBUG_TAG, \"Delete config file...\")\n try:\n os.remove(CONFIG_FILE)\n except os.error as e:\n Log.e(DEBUG_TAG, \"Delete config file%s error, reason:%s\"%(CONFIG_FILE, e))", "def process_config_file(config, logger):\n # if the config file was not specified, just set default config values\n if not os.path.exists(config.config_file):\n setattr(config, \"watch_masks\", [])\n setattr(config, \"ignore_list\", [])\n logger.info(\"Specified config file '%s' does not exist, using \"\n \"default values.\" % config.config_file)\n return config\n logger.info(\"Processing config file '%s' ...\" % config.config_file)\n # Raw - doesn't do any interpolation\n parser = RawConfigParser()\n # by default it seems that value names are converted to lower case,\n # this way they should be case-sensitive\n parser.optionxform = str\n # does not fail even on a non-existing file\n parser.read(config.config_file)\n try:\n for (name, value) in parser.items(\"general\"):\n # assumes that ',' is the separator of configuration values\n values = value.split(',')\n # trim white spaces\n val_trimmed = [val.strip() for val in values]\n # entry will always be a list\n setattr(config, name, val_trimmed)\n except (ValueError, IndexError) as ex:\n msg = \"Error while processing configuration file, reason: %s\" % ex\n helpers.print_msg_exit(msg=msg, exit_code=1)\n return config", "def _load_conf(self, conf):\n f = open(self.file, \"w\")\n f.write(conf)\n f.close()", "def remove(ctx, name, project_root):\n\n if name == 'logme':\n raise LogmeError(\"'logme' master logger configuration cannot be removed!\")\n\n with ensure_conf_exist(project_root) as logme_conf:\n\n config = read_config(logme_conf)\n config.remove_section(name)\n\n with logme_conf.open('w+') as conf:\n config.write(conf)", "def cleaning_file():\n f = open (\"report_for_judy_part2.txt\", \"w\")\n f.close()", "def test_write_config(self):\n config = Config()\n config.config = test_config\n config.config_file = \"./config\"\n config.write_config()\n with open(config.config_file) as config_file:\n data = config_file.read()\n self.assertTrue(data)\n os.remove(config.config_file)", "def trim_silence_file(file_path, noise_threshold=150):\n rate, audio = scipy.io.wavfile.read(file_path)\n trimmed_audio = trim_silence(audio, noise_threshold=noise_threshold)\n print()\n scipy.io.wavfile.write(file_path, rate, trimmed_audio)", "def test_default_config_old_data():\n clean_tables()\n config = set_configuration() \n assert config['age']['value'] == \"72\" \n assert config['retainUnsent']['value'] == \"False\"\n \n insert_into_reading()\n row_count = get_count()\n min_id, max_id = min_max_id() \n update_timestamp_values(min_id=min_id, max_id=max_id) \n last_object_id = update_last_object(min_id=min_id, max_id=max_id)\n\n total_purged, unsent_purged = purge(config, _READING_TABLE)\n log = get_log()\n \n assert total_purged == row_count\n assert total_purged == log['rowsRemoved'] \n assert unsent_purged == max_id - last_object_id\n assert unsent_purged == log['unsentRowsRemoved'] \n assert log['failedRemovals'] == 0 \n assert log['rowsRemaining'] == row_count - total_purged \n clean_tables()", "def fix_rinex(f):\n num_data = 132\n with open(f, 'r+') as file:\n d = file.readlines()\n if len(d) <= 23: # Ensure file has more than just header, otherwise delete\n os.remove(f)\n else:\n file.seek(0)\n ind = [i for i, s in enumerate(d) if '>' in s]\n ind_lastmeas = ind[-1]\n for i in d[:ind_lastmeas]: # Write all but last measurement\n file.write(i)\n try:\n numsats = int(d[ind_lastmeas].split()[-1])\n except ValueError:\n file.truncate()\n return\n # Ensure last measurement is good before writing it.\n if numsats == (len(d)-ind_lastmeas-1) and len(d[-1]) == num_data:\n for i in d[ind_lastmeas:]:\n file.write(i)\n file.truncate() # Delete unwritten data", "def _update_config_from_file(config, cfg_file):\n config.defrost()\n with open(cfg_file, 'r') as infile:\n yaml_cfg = yaml.load(infile, Loader=yaml.FullLoader)\n for cfg in yaml_cfg.setdefault('BASE', ['']):\n if cfg:\n _update_config_from_file(\n config, os.path.join(os.path.dirname(cfg_file), cfg)\n )\n config.merge_from_file(cfg_file)\n config.freeze()", "def set_threshold(self, cat, t):\n self.con.execute(\"update ct set threshold=%f where category='%s'\" \n % (t, cat))", "def read_config(self, config_filename):", "def write(self, fn):\n with open(fn, 'w') as f:\n self.config.write(f)", "def xml_truncate(index, original, tmp):\n \n with open(original, 'r') as xmlfile:\n content = xmlfile.read().splitlines()\n truncated_content = '\\n'.join(content[:-index or None])\n with open(tmp, 'w') as xmlfile:\n xmlfile.write(str(truncated_content))\n\n return", "def add_fixed_parameters_from_config_file(self, config_file):\n pass", "def _prepare_input_file(self, filename, numlines, maxvalue):\n with open(filename, 'a') as f:\n for _ in range(numlines):\n f.write(str(randrange(maxvalue)) + '\\n')\n self.filepath = f.name", "def read_config_file(path, warn=False):\n lineno = 0\n result = {}\n fh = open(path, \"r\")\n for line in fh:\n lineno += 1\n try:\n record = pfilter(line)\n except CfgLineIsGarbage as g:\n if warn and not g.isblank:\n print >> sys.stderr, (\"Debug: %s: line %d: column %d: \"\n \"garbage in\\n%s\" % (path, lineno, g.col, g))\n continue\n result.update(record)\n fh.close()\n return result", "def update_config(config_file, config_base=None):\n if config_base is None:\n config_base = def_config_file\n assert(os.path.isfile(config_base))\n if not os.path.isfile(config_file):\n shutil.copy(config_base, config_file)\n cp = CisConfigParser()\n cp.read(config_file)\n miss = []\n if platform._is_win: # pragma: windows\n miss += update_config_windows(cp)\n with open(config_file, 'w') as fd:\n cp.write(fd)\n for sect, opt, desc in miss: # pragma: windows\n warnings.warn((\"Could not locate option %s in section %s.\"\n + \"Please set this in %s to: %s\")\n % (opt, sect, config_file, desc))", "def write_config(self):\n logging.debug(\"Writing configuration file: %s\" % self.config_file)\n f = open(self.config_file, \"w\")\n self.config.write(f)\n f.close()", "def clean_file(file_to_clean):\n\n logging.info(f'Cleaning file = {file_to_clean}')\n new_file_name = (file_to_clean.split('/')[-1]).split('.')[0]\n text = open(f\"{new_file_name}\", \"w\")\n lecture = open(f\"{file_to_clean}\", \"r\")\n for line in lecture:\n if not ('[' in line):\n line = line.replace('#', '')\n text.write(line)\n text.close()\n lecture.close()\n logging.info(f'File = {file_to_clean} Cleaned')\n synthesis_file(new_file_name)", "def overwrite(cls, config_file_overwrite: str):\n conf_overwrite: dict = GC.read_conf(config_file_overwrite)\n for sec, attr in conf_overwrite.items():\n for key, val in attr.items():\n try:\n _ = GC.conf[sec][key]\n GC.conf[sec][key] = val\n except KeyError:\n print(\"Overwrite config file has section/key that \"\n \"don't exist in base config!!! Abort!!!\")\n sys.exit(1)", "def update(self):\n self.save_config_file()", "def apply_config(self, filename=None, verify_nginx=True):\n self.write_config(filename=filename)\n\n app_path = f\"{self._nginx_path}/sbin/nginx\"\n if verify_nginx:\n NginxUtil.nginx_config_verify(self._node, app_path)", "def write(self, config_path=CONFIG_PATH):\n\n with open(self.full_path(config_path), 'w') as conf_fh:\n conf_fh.write(self.local_config)", "def set_config(self, file_path_name):\n level = logging.DEBUG\n format = '%(asctime)s %(levelname)-8s %(message)s' \n datefmt = '%a, %d %b %Y %H:%M:%S'\n filemode = 'a'\n \n\n logging.basicConfig(level = level,\n format = format,\n datefmt = datefmt,\n filename = file_path_name,\n filemode = filemode)", "def truncate(self):\n for file_name in os.listdir(self.path):\n if file_name[0:4] == 'data':\n os.remove(self.path + '/' + file_name)\n self.current_row = 0", "def set_rsyslog_new_configuration():\n with open(rsyslog_conf_path, \"rt\") as fin:\n with open(\"tmp.txt\", \"wt\") as fout:\n for line in fin:\n if \"imudp\" in line or \"imtcp\" in line:\n # Load configuration line requires 1 replacement\n if \"load\" in line:\n fout.write(line.replace(\"#\", \"\", 1))\n # Port configuration line requires 2 replacements\n elif \"port\" in line:\n fout.write(line.replace(\"#\", \"\", 2))\n else:\n fout.write(line)\n else:\n fout.write(line)\n command_tokens = [\"sudo\", \"mv\", \"tmp.txt\", rsyslog_conf_path]\n write_new_content = subprocess.Popen(command_tokens, stdout=subprocess.PIPE)\n time.sleep(3)\n o, e = write_new_content.communicate()\n if e is not None:\n handle_error(e,\n error_response_str=\"Error: could not change Rsyslog.conf configuration in -\" + rsyslog_conf_path)\n return False\n print_ok(\"Rsyslog.conf configuration was changed to fit required protocol - \" + rsyslog_conf_path)\n return True", "def write_config_file(new_config_file=None):\n global config_file\n if new_config_file:\n config_file = new_config_file\n try:\n logging.debug(\"Writing config to %s\", config_file_fp)\n fd = file(config_file_fp, \"w\")\n rcp.write(fd)\n fd.close()\n \n except IOError, err:\n logging.error(\"Unable to write config file %s\", config_file_fp)\n logging.error(str(err))", "def threshold(self,thresholdValue):\n # TO DO\n pass", "def test_invalid_crawl_interval_configuration(self):\n self.write_configuration_file(\n '[spider]\\n'\n 'crawl_interval: 0\\n'\n ) \n with self.assertRaises(mini_spider.ConfigurationException):\n mini_spider.parse_configuration(self.configuration_file_path)", "def adjust_threshold(file_bw, buffer_db, threshold_base):\n rf_decimation = int(constants.FILE_FS / file_bw)\n fft_bin_decimation = constants.OBS_INPUT_NFFT / rf_decimation / constants.OBS_OUTPUT_NFFT\n\n threshold_scale_factor = constants.OBS_INPUT_NFFT / constants.OBS_OOB_NFFT / fft_bin_decimation\n\n threshold = (threshold_base + 10 * np.log10(constants.FILE_FS)\n - 20*np.log10(threshold_scale_factor) + buffer_db)\n\n return threshold", "async def write_config(\n self,\n path: str | os.PathLike[str],\n applyall: bool = False,\n poweron: bool = False,\n timeout: float = 1,\n notifier: Optional[Callable[[str], None]] = None,\n ):\n notifier = notifier or (lambda x: None)\n\n notifier(\"Reading configuration file\")\n\n if not os.path.exists(path):\n raise ArchonError(f\"File {path} does not exist.\")\n\n c = configparser.ConfigParser()\n c.read(path)\n if not c.has_section(\"CONFIG\"):\n raise ArchonError(\"The config file does not have a CONFIG section.\")\n\n # Undo the INI format: revert \\ to / and remove quotes around values.\n config = c[\"CONFIG\"]\n lines = list(\n map(\n lambda k: k.upper().replace(\"\\\\\", \"/\") + \"=\" + config[k].strip('\"'),\n config,\n )\n )\n\n notifier(\"Clearing previous configuration\")\n if not (await self.send_command(\"CLEARCONFIG\", timeout=timeout)).succeeded():\n self.status = ControllerStatus.ERROR\n raise ArchonError(\"Failed running CLEARCONFIG.\")\n\n notifier(\"Sending configuration lines\")\n\n cmd_strs = [f\"WCONFIG{n_line:04X}{line}\" for n_line, line in enumerate(lines)]\n done, failed = await self.send_many(cmd_strs, max_chunk=200, timeout=timeout)\n if len(failed) > 0:\n ff = failed[0]\n self.status = ControllerStatus.ERROR\n raise ArchonError(f\"Failed sending line {ff.raw!r} ({ff.status.name})\")\n\n notifier(\"Sucessfully sent config lines\")\n\n if applyall:\n notifier(\"Sending APPLYALL\")\n cmd = await self.send_command(\"APPLYALL\", timeout=5)\n if not cmd.succeeded():\n self.status = ControllerStatus.ERROR\n raise ArchonError(f\"Failed sending APPLYALL ({cmd.status.name})\")\n\n if poweron:\n notifier(\"Sending POWERON\")\n cmd = await self.send_command(\"POWERON\", timeout=timeout)\n if not cmd.succeeded():\n self.status = ControllerStatus.ERROR\n raise ArchonError(f\"Failed sending POWERON ({cmd.status.name})\")\n\n self.status = ControllerStatus.IDLE", "def remove_templates_under_threshold(path, identity_file, threshold):\n\n path_to_pbds = path + 'Modeling/cleaned_template_pdbs/'\n path_to_fastas = path + 'Modeling/cleaned_template_fastas/'\n path_to_alignment = path + 'Modeling/fasta_alns_and_identities/' + identity_file\n to_remove = []\n with open(path_to_alignment) as identities:\n identity = 'empty'\n while identity[0]:\n identity = identities.readline().strip().split(' ')\n if identity[0]:\n if float(identity[1]) < threshold:\n to_remove.append(identity[0])\n for i in to_remove:\n os.remove(path_to_pbds + i + '.pdb')\n os.remove(path_to_fastas + i + '.fasta')", "def clean_sfd(sfd_to_clean, nkeep=30):\n with sfd_to_clean.open(sfd_to_clean.filename) as f:\n txt = f.readlines()\n # remove all lines after nkeep lines\n txt2 = txt[:nkeep]\n # add note to end of file\n txt2 += [u'WARNING: REST OF FILE WAS CLEANED SO SAVE SPACE!!!\\n']\n # overwrite file\n with sfd_to_clean.open(sfd_to_clean.filename, 'w') as fnew:\n fnew.writelines(txt2)", "def setThreshold(self, threshold): # real signature unknown; restored from __doc__\n pass", "async def cleanup_file(self, short_text, file_name):\n await asyncio.sleep(1)\n # cache short texts\n if len(short_text) > self.config.get('cache_max_letters', 100):\n os.remove(file_name)", "def write_config(self, filename):\n self.config.filename = filename\n self.config.write()", "def checkConfig(self, tClass, tConfig, filePath):\n tConfig.save(filePath)\n loadConfig = tConfig.__class__()\n loadConfig.load(filePath)\n transform = tClass(loadConfig)\n self.checkBasics(transform)", "def update_config_file(**kwargs):\n config_file = try_read_file()\n config_file.update(kwargs)\n config_file = {key: value for key, value in config_file.items() if value is not None}\n logging.info('open config file %s', config_file_path)\n with open(config_file_path, 'w') as f:\n logging.info('begin io %s', config_file_path)\n json.dump(config_file, f, indent=4)\n logging.info('end io %s', config_file_path)", "def load_file(self, update=True): # type: (bool) -> None\n if os.path.isfile(self._file):\n logging.info('Cyra is reading your config from %s' % self._file)\n\n with open(self._file, 'r') as f:\n toml_str = f.read()\n self.load_toml(toml_str)\n else:\n self._modified = True\n\n # Write file if non existent or modified\n if update:\n self.save_file()", "def test_invalid_crawl_timeout_configuration(self):\n self.write_configuration_file(\n '[spider]\\n'\n 'crawl_timeout: 0\\n'\n ) \n with self.assertRaises(mini_spider.ConfigurationException):\n mini_spider.parse_configuration(self.configuration_file_path)", "def nextcloud_rescue_mode(*, enable=True):\n nextcloud_config_file = f'{NEXTCLOUD_ROOT_FOLDER}/config/config.php'\n nextcloud_back_up_file = nextcloud_config_file + '.old'\n\n with open(nextcloud_config_file, 'r') as f: # Open for read\n lines = f.readlines() # Read file into memory\n\n # Back up the file in case the rest operations go wrong\n with open(nextcloud_back_up_file, 'w') as recovery:\n recovery.writelines(lines)\n\n for index, line in enumerate(lines):\n # Look for the file's line with property 'maintenance'\n if line.find('maintenance') != -1:\n if enable:\n text, replace = 'false', 'true'\n else:\n text, replace = 'true', 'false'\n\n lines[index] = line.replace(text, replace) # Change property\n break # Exit loop\n\n with open(nextcloud_config_file, 'w') as f: # Open for write\n f.writelines(lines) # Write from memory into the file\n\n if f.closed: # Remove back up file if everything run successfully\n os.remove(nextcloud_back_up_file)", "def clear_config():\n check_config()\n fs.truncate(PYWS_DIR_BIN)", "def monitor(self, filename):\n self.do_tail(filename, 0)", "def read_file_simple(self,filename):\n\n freqlim = config.cutoff*self.cutoff\n exceed_freqlim = False\n freqfile = open(filename)\n freqfile.readline() # skip head\n mode_temp = []\n for line in freqfile:\n line = line.strip()\n columns = line.split()\n n = int(columns[1])\n freq = utilities.to_float(columns[2])\n # remove frequencies above AIMS_configure.cutoff*nu_{cut-off}\n if (freq > freqlim):\n exceed_freqlim = True\n continue\n if (config.npositive and (n < 0)): continue # remove g-modes if need be\n mode_temp.append((n,int(columns[0]),freq,utilities.to_float(columns[4])))\n freqfile.close()\n self.modes = np.array(mode_temp,dtype=modetype)\n\n return exceed_freqlim", "def reload(self):\n self.read(self._cfg_path)", "def on_load_configuration(self, config):\n logger.info(\"On 'load configuration' callback.\")\n \n # Log location\n try:\n self._log_location = config.get(self.GENERAL_CONFIG_SECTION,\n self.GENERAL_LOG_LOCATION_CONFIG_PROP)\n except Exception:\n pass\n if not self._log_location:\n raise Exception(\n \"Log location not found in configuration file: {0}\"\n .format(self._app_config_path))\n \n logger.info(\"Vormetric log file location=\" + self._log_location)\n\n # Log check interval\n try:\n self._log_check_interval = int(config.get(self.GENERAL_CONFIG_SECTION,\n self.GENERAL_LOG_CHECK_INTERVAL_CONFIG_PROP))\n except Exception:\n pass\n if not self._log_check_interval:\n raise Exception(\n \"Log check interval not found in configuration file: {0}\"\n .format(self._app_config_path))\n \n logger.info(\"Vormetric log check interval=\" + str(self._log_check_interval))", "def clean_conf(self):\r\n return self._arm.clean_conf()", "def copy(fname):\n outfname = fname + '.NEW.csv'\n bakfname = fname + '.BAK'\n permfname = fname.rsplit('.',1)[0] + '_cropped.csv'\n inf = open(fname, 'rt')\n outf = open(outfname, 'w')\n truncated = False\n print '\\n\\nProcessing %s ...' % (fname)\n\n title = inf.readline()\n outf.write(title)\n\n headers = inf.readline()\n extractor = ValueExtractor(headers)\n outf.write(headers)\n\n # copy until we reach bad data\n for line in inf:\n if not line:\n continue # skip blanks\n ts, temp, rh, batt = extractor.extract(line)\n if batt and batt < MIN_VOLTAGE:\n print 'Low voltage detected, truncating. ' + line,\n truncated = True\n break\n if rh and rh <= 1.0:\n print 'Bad RH detected, truncating. ' + line,\n truncated = True\n break\n if temp is not None and temp > MAX_TEMP:\n print 'High temperature detected, truncating. ' + line,\n truncated = True\n break\n if temp is not None and temp < MIN_TEMP:\n print 'Low temperature detected, truncating. ' + line,\n truncated = True\n break\n outf.write(line)\n\n # back up original and rename new truncated file\n outf.close()\n inf.close()\n if not truncated:\n os.remove(outfname)\n else:\n print 'Renaming truncated %s to %s .' % (os.path.basename(fname), os.path.basename(permfname))\n os.rename(fname, bakfname)\n os.rename(outfname, permfname)", "def test_default_config():\n clean_tables() \n config = set_configuration() \n assert config['age']['value'] == \"72\"\n assert config['retainUnsent']['value'] == \"False\" \n \n insert_into_reading()\n row_count = get_count() \n min_id, max_id = min_max_id() \n update_last_object(min_id=min_id, max_id=max_id)\n total_purged, unsent_purged = purge(config, _READING_TABLE)\n\n log = get_log() \n\n assert total_purged == 0\n assert total_purged == log['rowsRemoved']\n assert unsent_purged == 0 \n assert unsent_purged == log['unsentRowsRemoved'] \n assert log['failedRemovals'] == 0 \n assert log['rowsRemaining'] == row_count - total_purged \n clean_tables()", "def merge_configs(configFile:str, oldSampleFile:str, newSampleFile:str, unsafeAttributesFile:str, filetype:str):\n upgrade_config(configFile, oldSampleFile, newSampleFile, unsafeAttributesFile, filetype)", "def Truncate(self, f, fcut, below=True):\n fout = copy.copy(f)\n ind = thresh(f,fcut)\n if below:\n fout = fout[0:ind]\n else:\n fout = fout[ind:]\n \n keys=['Gxx','Gyy','Gxy']\n\n for curkey in keys:\n curitem = colwise(getattr(self,curkey))\n\n if below:\n curitem = curitem[0:ind,:]\n else:\n curitem = curitem[ind:,:]\n \n setattr(self,curkey,squeeze(curitem))\n return fout", "def overwrite_file(self):\n\n new_file = open(self.temp_filename, 'r')\n file = open(self.filename, 'w')\n file.writelines(new_file.readlines())\n new_file.close()\n file.close()\n os.remove(self.temp_filename)", "def write_config_file():\n\tif not config_parser:\n\t\tprint \"Config module not loaded. I don't save anything.\"\n\t\treturn\n\n\tf = file(config_file, \"w\")\n\tconfig_parser.write(f)\n\tf.close()", "def setup_metrics_file(self):\n\n with open(self.metrics_path, \"w+\") as f_metrics:\n\n f_metrics.write(get_metrics_file_form())", "async def _msgvote_threshold(self, ctx, threshold: int):\n\n if threshold < 0:\n await self.bot.say(\"Invalid threshold. Must be a positive \"\n \"integer, or 0 to disable.\")\n elif threshold == 0:\n self.settings[\"threshold\"] = threshold\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Message deletion disabled.\")\n else:\n self.settings[\"threshold\"] = threshold\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Messages will be deleted if [downvotes - \"\n \"upvotes] reaches {}.\".format(threshold))", "def conf_update(self):\n pass", "def load_and_clean_file(self, path):\n pass", "def write_config(self, config_file):\n \n # write root paths\n \n # write reference data\n \n # write tool paths\n \n pass", "def _write_elasticluster_config(config, out_file):\n orig_file = os.path.join(sys.prefix, \"share\", \"bcbio-vm\", \"elasticluster\", \"config\")\n if not os.path.exists(os.path.dirname(out_file)):\n os.makedirs(os.path.dirname(out_file))\n if os.path.exists(out_file):\n bak_file = out_file + \".bak%s\" % datetime.datetime.now().strftime(\"%Y-%m-%d-%H-%M-%S\")\n shutil.move(out_file, bak_file)\n with open(orig_file) as in_handle:\n with open(out_file, \"w\") as out_handle:\n for line in in_handle:\n if line.startswith(tuple(config.keys())):\n name, val = line.strip().split(\"=\")\n out_handle.write(\"%s=%s\\n\" % (name, config[name]))\n else:\n out_handle.write(line)\n return out_file", "def tokenize_descriptions_with_threshold(input_file_path, output_file_path):\n if os.path.exists(output_file_path):\n print(\"Tokenized descriptions found. Will not be generated.\")\n return\n\n print(\"Generating tokenized descriptions\")\n f = open(output_file_path, 'a')\n with open(input_file_path, 'r') as file:\n word_count_threshold = 4\n word_counts = {}\n sequences = []\n for line in file:\n if line.strip():\n sequence = line.strip().replace(\" '\",\"'\").split()\n sequence[1:] = clean_tokens(sequence[1:])\n sequence.insert(1, '<START>')\n sequence.append('<END>')\n sequences.append(sequence)\n for w in sequence[1:]:\n word_counts[w] = word_counts.get(w, 0) + 1\n vocab = [w for w in word_counts if word_counts[w] >= word_count_threshold]\n for sequence in sequences:\n sequence[1:] = ['<UNK>' if x not in vocab else x for x in sequence[1:]]\n f.write(\",\".join(sequence) + \"\\n\") \n f.close()\n print(\"Finished generating tokenized descriptions\")", "def save_config() -> None:\n with open(_config_file, \"w\", newline=\"\") as config_file:\n json.dump(_config, config_file, indent=4)\n config_file.truncate()", "def new_config_file(filepath):\n config_container = gather_default()\n ind = \" \"\n try:\n\n if _pix.write_data_to_file(config_container, filepath, ind):\n return filepath\n\n except PermissionError:\n\n # NOTE: as config.txt is crucial for running blender tools we have to warn user (even in 3D viewport)\n # so he can not miss the problem with creation of config file; solution also provided in message\n lprint(\"E Cannot create configuration file (permission denied), please ensure read/write permissions for:\\n\\t %r\\n\\n\\t \"\n \"Without configuration file Blender Tools might not work as expected!\",\n (os.path.dirname(filepath),),\n report_errors=1,\n report_warnings=1)\n\n return None", "async def before_cleanup(self, invoker: PluginInvoker):\n config_file = invoker.files[\"config\"]\n try:\n config_file.unlink()\n except FileNotFoundError:\n pass\n logging.debug(f\"Deleted configuration at {config_file}\")", "def apply(self, opened_file):", "def update_from_file(self):\n config_path = os.environ.get('MINDINSIGHT_CONFIG', '')\n if not config_path:\n return\n\n config_module = None\n\n # python:full.path.for.config.module\n if config_path.startswith('python:'):\n config_module = import_module(config_path[len('python:'):])\n\n # file:full/path/for/config.py\n elif config_path.startswith('file:'):\n config_path = config_path[len('file:'):]\n module_name = '__mindinsightconfig__'\n config_module = types.ModuleType(module_name)\n machinery = import_module('importlib.machinery')\n loader = machinery.SourceFileLoader(module_name, config_path)\n loader.exec_module(config_module)\n\n if config_module is None:\n return\n\n for setting in dir(config_module):\n if setting.isupper() and setting in self._default_settings:\n setting_value = getattr(config_module, setting)\n setattr(self, setting, setting_value)\n self._explicit_settings.add(setting)", "def read_config_file(self, filename):\n self.LO1_pwr = []\n self.LO1_freq_pwr = []\n fd = open(filename, \"r\")\n for line in fd:\n if line[0:8] == \"LO1pwr.N\":\n trash, num = line.split(\"=\")\n self.num_pwr_readings = string.atoi(num)\n break\n for i in range(0, self.num_pwr_readings):\n self.LO1_pwr.append(0)\n self.LO1_freq_pwr.append(0)\n\n for line in fd:\n if line[0:7] == \"LO1pwr[\":\n ind = line.find(\"frequency\")\n if ind != -1:\n trash, num = line.split(\"[\")\n index, trash = num.split(\"]\")\n trash, freq = line.split(\"=\")\n index = string.atoi(index)\n freq.lstrip()\n self.LO1_freq_pwr[index] = string.atof(freq)\n else:\n ind = line.find(\"power\")\n if ind != -1:\n trash, num = line.split(\"[\")\n index, trash = num.split(\"]\")\n index = string.atoi(index)\n trash, pwr = line.split(\"=\")\n pwr.lstrip()\n self.LO1_pwr[index] = string.atof(pwr)", "def setconfig(filepath, param, value):\n\n with open(filepath, 'rb') as f:\n lines = f.readlines()\n with open(filepath, 'wb') as f:\n updated = False\n for line in lines:\n if line.strip().startswith('#') or '=' not in line:\n # keep comments and other non informative lines unchanged\n f.write(line)\n continue\n k, v = line.split('=', 1)\n if k.strip() == param:\n # update with new value\n f.write('%s=%s\\n' % (param, value))\n updated = True\n else:\n # keep line unchanged\n f.write(line)\n if not updated:\n # append the new param at the end of the file\n f.write('%s=%s\\n' % (param, value))", "def _write_config(self, config_path: Path):\n with open(config_path, \"w\") as f:\n json.dump(self.config_overrides, f)", "def test_writing(self):\n with contextlib.closing(RiggedDailyLogFile(self.name, self.dir)) as log:\n log.write(\"123\")\n log.write(\"456\")\n log.flush()\n log.write(\"7890\")\n\n with open(self.path) as f:\n self.assertEqual(f.read(), \"1234567890\")", "def refresh_config(self):\n with open(config_name, 'rb') as f:\n self.CONFIG = simplejson.load(f)\n\n return self", "def file_and_malware_syslog_config(self, file_and_malware_syslog_config):\n\n self._file_and_malware_syslog_config = file_and_malware_syslog_config", "def handle_file(self, f, settings):\n ext = splitext(f.dest_file)[1][1:]\n if not ext.lower() == \"xml\": # Not a XML file\n # Ignore the file. It is not an XML file.\n return\n\n path = f.get_patched_file_path()\n if not path: # Ignore the file.\n return\n\n notAllowedWords = settings['ReservedWords'].split(',')\n attributesToCheckLenght = settings['AttributesToCheck'].split(',')\n maxLength = settings['MaxLength']\n headerIdentifierRegex = settings['LiquibaseHeaderRegex']\n isLiquibaseFile = False\n\n with open(path, 'rb') as content_test:\n for line in content_test:\n if re.findall(headerIdentifierRegex, line):\n isLiquibaseFile = True\n if not isLiquibaseFile:\n # isNotALiquibaseFile\n return\n with open(path, 'rb') as content:\n line_num = 0\n for line in content:\n line_num += 1\n self.checkLine(f, line, line_num, attributesToCheckLenght,\n notAllowedWords, maxLength)" ]
[ "0.63270384", "0.60728574", "0.57777673", "0.5709095", "0.52786064", "0.51557845", "0.5135187", "0.5053703", "0.5041154", "0.5027755", "0.5015283", "0.5009702", "0.49824095", "0.49750274", "0.49732378", "0.49518126", "0.49454418", "0.49447972", "0.49405065", "0.49233353", "0.49216795", "0.4916753", "0.49041644", "0.4902163", "0.4885355", "0.4884826", "0.4877091", "0.48757622", "0.48717347", "0.48696283", "0.4863006", "0.48627517", "0.48447827", "0.48403448", "0.4834889", "0.48313788", "0.48297983", "0.48233858", "0.48036334", "0.47951692", "0.47937337", "0.47924054", "0.47869846", "0.47862282", "0.47828266", "0.4779825", "0.47712123", "0.47630313", "0.47613415", "0.47588754", "0.47512126", "0.47508568", "0.474808", "0.47360313", "0.47180617", "0.47148475", "0.4712992", "0.46986407", "0.46964324", "0.4693388", "0.4684574", "0.46820784", "0.46803665", "0.46603006", "0.46591735", "0.4655229", "0.465177", "0.46491504", "0.46460867", "0.4629997", "0.46263924", "0.46251214", "0.46177134", "0.46164605", "0.461281", "0.4611691", "0.46002126", "0.45968542", "0.45917505", "0.4589358", "0.45872706", "0.4586634", "0.45831543", "0.45739353", "0.45726126", "0.4567329", "0.45630276", "0.4560371", "0.45561662", "0.4553101", "0.45501444", "0.45480505", "0.45420265", "0.4541693", "0.45318258", "0.45300898", "0.4525396", "0.4523285", "0.45223218", "0.4521187" ]
0.55548596
4
Return sny vowels founded in a supplied word.
def search4vowels(word): vowels = set('aeiou') found = vowels.intersection(set(word)) #return found for vowels in found: print(vowels)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _rv_standard(self, word, vowels):\n rv = \"\"\n if len(word) >= 2:\n if word[1] not in vowels:\n for i in range(2, len(word)):\n if word[i] in vowels:\n rv = word[i + 1 :]\n break\n\n elif word[0] in vowels and word[1] in vowels:\n for i in range(2, len(word)):\n if word[i] not in vowels:\n rv = word[i + 1 :]\n break\n else:\n rv = word[3:]\n\n return rv", "def find_words_using_all_vowels():\n pass", "def find_vowels(s):\n \"*** YOUR CODE HERE ***\"", "def censor_vowels(word):\n\n chars = []\n\n for letter in word:\n if letter in \"aeiou\":\n chars.append(\"*\")\n chars.append(letter)\n\n return \"\".join(vowels)", "def censor_vowels(word):\n\n chars = []\n\n for letter in word:\n if letter in \"aeiou\":\n chars.append(\"*\")\n else:\n chars.append(letter)\n \n return \"\".join(chars)", "def replace_vowels(word):\n variants = []\n for c in word:\n if c in vowels:\n for vowel in vowels:\n variants.append(word.replace(c, vowel))\n return variants", "def fry(word):\n\n # looks for a Y or y which will be (captured) followed and ended by an 'ou'\n match_you = re.match('([Yy])ou$', word)\n\n # First group will be the (captured) group so either 'Y' or 'y'\n if match_you:\n return match_you.group(1) + \"'all\"\n\n # looks for anyword ending in 'ing'\n match_ing = re.search('(.+)ing$', word)\n\n # checks if vowel exists before the 'ing'\n if match_ing:\n vowel_check = re.search('[aeiouy]', match_ing.group(1))\n # First group will be the (captured) group so everything before the 'ing'\n if vowel_check:\n return match_ing.group(1) + \"in'\"\n\n return word", "def main():\n word = input(\"Give me a word! \\n\\n\")\n vowels = ['a', 'e', 'i', 'o', 'u']\n if word[0].lower() in vowels:\n print(f\"\\n\\nPig latin: {word}way\")\n else:\n print(f\"\\n\\nPig latin: {word[1:]}{word[0]}ay\")", "def stem(self, word):\n word = word.lower()\n\n if word in self.__special_words:\n return self.__special_words[word]\n\n # Map the different apostrophe characters to a single consistent one\n word = (word.replace(u(\"\\u2019\"), u(\"\\x27\"))\n .replace(u(\"\\u2018\"), u(\"\\x27\"))\n .replace(u(\"\\u201B\"), u(\"\\x27\")))\n\n if word.startswith(u(\"\\x27\")):\n word = word[1:]\n\n if word.startswith(\"y\"):\n word = \"\".join((\"Y\", word[1:]))\n\n for i in range(1, len(word)):\n if word[i - 1] in self.__vowels and word[i] == \"y\":\n word = \"\".join((word[:i], \"Y\", word[i + 1:]))\n\n step1a_vowel_found = False\n step1b_vowel_found = False\n\n r1 = \"\"\n r2 = \"\"\n\n if word.startswith((\"gener\", \"commun\", \"arsen\")):\n if word.startswith((\"gener\", \"arsen\")):\n r1 = word[5:]\n else:\n r1 = word[6:]\n\n for i in range(1, len(r1)):\n if r1[i] not in self.__vowels and r1[i - 1] in self.__vowels:\n r2 = r1[i + 1:]\n break\n else:\n r1, r2 = self._r1r2_standard(word, self.__vowels)\n\n # STEP 0\n for suffix in self.__step0_suffixes:\n if word.endswith(suffix):\n word = word[:-len(suffix)]\n r1 = r1[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n break\n\n # STEP 1a\n for suffix in self.__step1a_suffixes:\n if word.endswith(suffix):\n\n if suffix == \"sses\":\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n\n elif suffix in (\"ied\", \"ies\"):\n if len(word[:-len(suffix)]) > 1:\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n else:\n word = word[:-1]\n r1 = r1[:-1]\n r2 = r2[:-1]\n\n elif suffix == \"s\":\n for letter in word[:-2]:\n if letter in self.__vowels:\n step1a_vowel_found = True\n break\n\n if step1a_vowel_found:\n word = word[:-1]\n r1 = r1[:-1]\n r2 = r2[:-1]\n break\n\n # STEP 1b\n for suffix in self.__step1b_suffixes:\n if word.endswith(suffix):\n if suffix in (\"eed\", \"eedly\"):\n\n if r1.endswith(suffix):\n word = \"\".join((word[:-len(suffix)], \"ee\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ee\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ee\"))\n else:\n r2 = \"\"\n else:\n for letter in word[:-len(suffix)]:\n if letter in self.__vowels:\n step1b_vowel_found = True\n break\n\n if step1b_vowel_found:\n word = word[:-len(suffix)]\n r1 = r1[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n\n if word.endswith((\"at\", \"bl\", \"iz\")):\n word = \"\".join((word, \"e\"))\n r1 = \"\".join((r1, \"e\"))\n\n if len(word) > 5 or len(r1) >= 3:\n r2 = \"\".join((r2, \"e\"))\n\n elif word.endswith(self.__double_consonants):\n word = word[:-1]\n r1 = r1[:-1]\n r2 = r2[:-1]\n\n elif ((r1 == \"\" and len(word) >= 3 and\n word[-1] not in self.__vowels and\n word[-1] not in \"wxY\" and\n word[-2] in self.__vowels and\n word[-3] not in self.__vowels)\n or\n (r1 == \"\" and len(word) == 2 and\n word[0] in self.__vowels and\n word[1] not in self.__vowels)):\n\n word = \"\".join((word, \"e\"))\n\n if len(r1) > 0:\n r1 = \"\".join((r1, \"e\"))\n\n if len(r2) > 0:\n r2 = \"\".join((r2, \"e\"))\n break\n\n # STEP 1c\n if (len(word) > 2\n and word[-1] in \"yY\"\n and word[-2] not in self.__vowels):\n word = \"\".join((word[:-1], \"i\"))\n if len(r1) >= 1:\n r1 = \"\".join((r1[:-1], \"i\"))\n else:\n r1 = \"\"\n\n if len(r2) >= 1:\n r2 = \"\".join((r2[:-1], \"i\"))\n else:\n r2 = \"\"\n\n # STEP 2\n for suffix in self.__step2_suffixes:\n if word.endswith(suffix):\n if r1.endswith(suffix):\n if suffix == \"tional\":\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n\n elif suffix in (\"enci\", \"anci\", \"abli\"):\n word = \"\".join((word[:-1], \"e\"))\n\n if len(r1) >= 1:\n r1 = \"\".join((r1[:-1], \"e\"))\n else:\n r1 = \"\"\n\n if len(r2) >= 1:\n r2 = \"\".join((r2[:-1], \"e\"))\n else:\n r2 = \"\"\n\n elif suffix == \"entli\":\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n\n elif suffix in (\"izer\", \"ization\"):\n word = \"\".join((word[:-len(suffix)], \"ize\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ize\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ize\"))\n else:\n r2 = \"\"\n\n elif suffix in (\"ational\", \"ation\", \"ator\"):\n word = \"\".join((word[:-len(suffix)], \"ate\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ate\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ate\"))\n else:\n r2 = \"e\"\n\n elif suffix in (\"alism\", \"aliti\", \"alli\"):\n word = \"\".join((word[:-len(suffix)], \"al\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"al\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"al\"))\n else:\n r2 = \"\"\n\n elif suffix == \"fulness\":\n word = word[:-4]\n r1 = r1[:-4]\n r2 = r2[:-4]\n\n elif suffix in (\"ousli\", \"ousness\"):\n word = \"\".join((word[:-len(suffix)], \"ous\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ous\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ous\"))\n else:\n r2 = \"\"\n\n elif suffix in (\"iveness\", \"iviti\"):\n word = \"\".join((word[:-len(suffix)], \"ive\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ive\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ive\"))\n else:\n r2 = \"e\"\n\n elif suffix in (\"biliti\", \"bli\"):\n word = \"\".join((word[:-len(suffix)], \"ble\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ble\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ble\"))\n else:\n r2 = \"\"\n\n elif suffix == \"ogi\" and word[-4] == \"l\":\n word = word[:-1]\n r1 = r1[:-1]\n r2 = r2[:-1]\n\n elif suffix in (\"fulli\", \"lessli\"):\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n\n elif suffix == \"li\" and word[-3] in self.__li_ending:\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n break\n\n # STEP 3\n for suffix in self.__step3_suffixes:\n if word.endswith(suffix):\n if r1.endswith(suffix):\n if suffix == \"tional\":\n word = word[:-2]\n r1 = r1[:-2]\n r2 = r2[:-2]\n\n elif suffix == \"ational\":\n word = \"\".join((word[:-len(suffix)], \"ate\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ate\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ate\"))\n else:\n r2 = \"\"\n\n elif suffix == \"alize\":\n word = word[:-3]\n r1 = r1[:-3]\n r2 = r2[:-3]\n\n elif suffix in (\"icate\", \"iciti\", \"ical\"):\n word = \"\".join((word[:-len(suffix)], \"ic\"))\n\n if len(r1) >= len(suffix):\n r1 = \"\".join((r1[:-len(suffix)], \"ic\"))\n else:\n r1 = \"\"\n\n if len(r2) >= len(suffix):\n r2 = \"\".join((r2[:-len(suffix)], \"ic\"))\n else:\n r2 = \"\"\n\n elif suffix in (\"ful\", \"ness\"):\n word = word[:-len(suffix)]\n r1 = r1[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n\n elif suffix == \"ative\" and r2.endswith(suffix):\n word = word[:-5]\n r1 = r1[:-5]\n r2 = r2[:-5]\n break\n\n # STEP 4\n for suffix in self.__step4_suffixes:\n if word.endswith(suffix):\n if r2.endswith(suffix):\n if suffix == \"ion\":\n if word[-4] in \"st\":\n word = word[:-3]\n r1 = r1[:-3]\n r2 = r2[:-3]\n else:\n word = word[:-len(suffix)]\n r1 = r1[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n break\n\n # STEP 5\n if r2.endswith(\"l\") and word[-2] == \"l\":\n word = word[:-1]\n elif r2.endswith(\"e\"):\n word = word[:-1]\n elif r1.endswith(\"e\"):\n if len(word) >= 4 and (word[-2] in self.__vowels or\n word[-2] in \"wxY\" or\n word[-3] not in self.__vowels or\n word[-4] in self.__vowels):\n word = word[:-1]\n\n word = word.replace(\"Y\", \"y\")\n return word", "def count_syllables(word):\n vowels = \"aeiouy\"\n count = 0\n last_was_vowel = False\n for letter in word:\n found_vowel = False\n for v in vowels:\n if v == letter:\n if not last_was_vowel: count += 1 # don't count diphthongs\n found_vowel = last_was_vowel = True\n break\n if not found_vowel: # If full cycle and no vowel found, set last_was_vowel to false\n last_was_vowel = False\n\n\n if len(word) > 2 and word[-2:] == \"es\" and count > 1: # Remove es - it's \"usually\" silent (?)\n count -= 1\n\n if len(word) > 4 and word[-1:] == \"e\": # remove silent e\n count -= 1\n\n if len(word) > 1 and word[-2:] == \"ee\": # adds 1 for na\n count += 1\n\n if len(word) > 1 and word[-2:] == \"na\": # adds 1 for na\n count += 1\n\n # Check for special case words\n special_case = ['eloise','i']\n if word in special_case:\n count += 1\n\n return count", "def remove_vowels(phrase):\n vowels = ['a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U']\n cons_word = \"\".join([char for char in phrase if char not in vowels])\n return cons_word", "def count_vowels(word):\n\n vowels = re.compile(r'[aeouy]', re.IGNORECASE)\n return len(vowels.findall(word))", "def get_possible_vowels(self, word_set):\r\n \r\n vowels = \"\"\r\n for word in word_set:\r\n # Check if existing vowel is in word.\r\n if any(vowel in word for vowel in vowels):\r\n continue\r\n # Find most common letter and assume it's a vowel\r\n vowel, probability = '', 0\r\n for c in word:\r\n _, number = self.letters.get_value(c)\r\n if number > probability:\r\n vowel = c\r\n probability = number\r\n vowels += vowel\r\n return vowels", "def find_vowel(text: str) -> str:\r\n\r\n vowel = text.count('a') + text.count('o') + text.count('u') +\\\r\n text.count('i') + text.count('e') + text.count(\"y\") +\\\r\n text.count('A') + text.count('O') + text.count('U') +\\\r\n text.count('I') + text.count('E') + text.count('Y')\r\n\r\n return(vowel)", "def get_vowel_names():", "def calculate_construction(self, word):\r\n \r\n construction = \"\"\r\n for c in word.lower():\r\n if c in self.vowels:\r\n construction += \"v\"\r\n elif c in letters:\r\n construction += \"c\"\r\n return construction", "def needs_aou(word):\n return re.search(\"([aouAOU])[^yäöYÄÖ]*$\", word)", "def countsyllables_nlde(word):\r\n\tresult = 0\r\n\tprev_was_vowel = word[0] in VOWELS\r\n\tfor char in word[1:]:\r\n\t\tis_vowel = char in VOWELS\r\n\t\tif prev_was_vowel and not is_vowel:\r\n\t\t\tresult += 1\r\n\t\tprev_was_vowel = is_vowel\r\n\r\n\tif (len(word) > 1 and word[0] in VOWELS\r\n\t\t\tand word.endswith('e') and not word[-2] in VOWELS):\r\n\t\tresult += 1\r\n\treturn result or 1", "def find_words_no_vowels():\n f = open('session09/words.txt')\n num_no_e = 0\n num_words = 0\n for line in f:\n num_words += 1\n word = line.strip()\n if avoids(word, 'aeiou'):\n # print(word)\n num_no_e += 1\n # print(num_no_e, num_words)\n return num_no_e/num_words", "def search4vowels(phrase: str) -> set:\n vowels = set('aeiou')\n found = vowels.intersection(set(phrase))\n for vowel in found:\n print(vowel)", "def starts_with_vowel(word):\n return True if word[0] in 'aeiou' else False", "def _r1r2_standard(self, word, vowels):\n r1 = \"\"\n r2 = \"\"\n for i in range(1, len(word)):\n if word[i] not in vowels and word[i - 1] in vowels:\n r1 = word[i + 1 :]\n break\n\n for i in range(1, len(r1)):\n if r1[i] not in vowels and r1[i - 1] in vowels:\n r2 = r1[i + 1 :]\n break\n\n return (r1, r2)", "def analyse_vowels(self, source):\r\n\r\n word_set = set()\r\n with open(source) as f:\r\n for line in f:\r\n words = [word.lower().strip() for word in line.split()]\r\n for word in words:\r\n map(self.parse_character, word)\r\n stripped = ''.join(c for c in word if c in letters)\r\n if stripped:\r\n word_set.add(stripped)\r\n vowels = self.get_possible_vowels(word_set)\r\n return self.filter_vowels(vowels, word_set)", "def translate(self):\n\t\tvowels = \"aeiou\"\n\n\t\tif (self.word[0] not in vowels) and (self.word[1] in vowels):\n\t\t\tnew_word = self.word[1:] + self.word[0] + \"ay\"\n\t\telif self.word[0] in vowels:\n\t\t\tnew_word = self.word + \"way\"\n\t\telse:\n\t\t\tnew_word = self.word[2:] + self.word[:2] + \"ay\"\n\n\t\tprint(new_word)", "def estimate(word):\n parts = re.split(r'[^aeiouy]+', word)\n valid_parts = []\n\n for part in parts:\n if part != '':\n valid_parts.append(part)\n\n syllables = 0\n\n for p in re_subsyllables:\n if p.match(word):\n syllables -= 1\n\n for p in re_addsyllables:\n if p.match(word):\n syllables += 1\n\n syllables += len(valid_parts)\n\n if syllables <= 0:\n syllables = 1\n\n return syllables", "def step1c(self, word):\r\n\r\n if word.endswith('y'):\r\n result = word.rfind('y')\r\n base = word[:result]\r\n if self.containsVowel(base):\r\n word = base\r\n word += 'i'\r\n return word", "def disemvowel(string):\n to_return = ''\n for char in string:\n if char not in 'aeiouAEIOU':\n to_return += char\n return to_return", "def search_for_vowels(phrase:str) -> set:\n vowels = set('aeiou')\n return vowels.intersection(set(phrase))", "def equal_vowel_and_consonant(words):\n # print(words)\n vowel_consonant_equal = []\n for word in words:\n vowel_counts, consonant_counts = 0, 0\n for char in word:\n if char in 'aeiou':\n vowel_counts += 1\n elif char in 'bcdfghjklmnpqrstvwxyz':\n consonant_counts += 1\n\n if vowel_counts == consonant_counts:\n vowel_consonant_equal.append(word)\n return vowel_consonant_equal", "def search4vowels(phrase:str) -> set:\n vowels = set('aeiou')\n #word = input('Enter a word to search for vowels:')\n found = vowels.intersection(set(phrase))\n #for vowel in found:\n #print(vowel)\n\n return (found)", "def word_syllables(word):\n\n count = 0\n endings = '!@#$%^&*()_+[]{}:;,.eE\"'+\"'\"\n\n while word[-1] in endings:\n word = word[: -1]\n\n if len(word) <= 3:\n return 1\n\n vows = 'aeiouAEIOU'\n prev_char_vow = False\n for char in word:\n if char in vows:\n if not prev_char_vow:\n count = count + 1\n prev_char_vow = True\n else:\n prev_char_vow = False\n\n if word[-1] in 'Yy':\n count = count + 1\n\n return count", "def num_syllables(self, word):\n \"\"\"\n using the logic of vowel counting, count all vowels in the pronunciations\n \"\"\"\n dictionary = self._pronunciations;\n # check if word is present in the CMU dictionary\n if word in dictionary :\n word_pronunciations = dictionary[word.lower()]\n else :\n return 1\n \n vowels = ['A', 'E', 'I', 'O', 'U']\n \n ## find the shorter pronunciation for word\n shorter_arr = [];\n for pronunciation in word_pronunciations :\n if len(pronunciation) > len(shorter_arr) : shorter_arr = pronunciation\n \n num_length = 0\n \n for phoneme in shorter_arr :\n if phoneme[:1] in vowels : num_length += 1\n \n return num_length", "def most_repeating_vowels(words):\n return ' '.join([x for x in WORDS if len(x) == len(sv_count(words)[-1])])", "def has_more_vowels(word):\n\n# If the phrase is over half vowels, it should return True:\n\n # intialize a vowel count variable \n # Loop through the letters of the word:\n # if the letter is in the set of vowels, increment the vowel count\n # if vowel count is greater than length of the word divided by 2, return True\n # else return false\n\n\n vowel_count = 0\n\n for letter in word:\n if letter.lower() in {\"a\", \"e\", \"i\", \"o\", \"u\"}:\n vowel_count += 1\n\n if vowel_count > (len(word) / 2):\n return True\n\n return False", "def get_nyx(words, vowels):\n nyx_list = \"\"\n for word in words:\n word = word.lower()\n if not bool(vowels.intersection(word)) and (\"x\" in word or \"y\" in word) and \"'\" not in word:\n nyx_list += word.strip() + \", \"\n return nyx_list", "def stem(self, word):\n word = word.lower()\n\n step1_success = False\n\n # All acute accents are replaced by grave accents.\n word = (word.replace(u(\"\\xE1\"), u(\"\\xE0\"))\n .replace(u(\"\\xE9\"), u(\"\\xE8\"))\n .replace(u(\"\\xED\"), u(\"\\xEC\"))\n .replace(u(\"\\xF3\"), u(\"\\xF2\"))\n .replace(u(\"\\xFA\"), u(\"\\xF9\")))\n\n # Every occurrence of 'u' after 'q'\n # is put into upper case.\n for i in range(1, len(word)):\n if word[i - 1] == \"q\" and word[i] == \"u\":\n word = \"\".join((word[:i], \"U\", word[i + 1:]))\n\n # Every occurrence of 'u' and 'i'\n # between vowels is put into upper case.\n for i in range(1, len(word) - 1):\n if word[i - 1] in self.__vowels and word[i + 1] in self.__vowels:\n if word[i] == \"u\":\n word = \"\".join((word[:i], \"U\", word[i + 1:]))\n elif word[i] == \"i\":\n word = \"\".join((word[:i], \"I\", word[i + 1:]))\n\n r1, r2 = self._r1r2_standard(word, self.__vowels)\n rv = self._rv_standard(word, self.__vowels)\n\n # STEP 0: Attached pronoun\n for suffix in self.__step0_suffixes:\n if rv.endswith(suffix):\n if rv[-len(suffix) - 4:-len(suffix)] in (\"ando\", \"endo\"):\n word = word[:-len(suffix)]\n r1 = r1[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n rv = rv[:-len(suffix)]\n\n elif (rv[-len(suffix) - 2:-len(suffix)] in\n (\"ar\", \"er\", \"ir\")):\n word = \"\".join((word[:-len(suffix)], \"e\"))\n r1 = \"\".join((r1[:-len(suffix)], \"e\"))\n r2 = \"\".join((r2[:-len(suffix)], \"e\"))\n rv = \"\".join((rv[:-len(suffix)], \"e\"))\n break\n\n # STEP 1: Standard suffix removal\n for suffix in self.__step1_suffixes:\n if word.endswith(suffix):\n if suffix == \"amente\" and r1.endswith(suffix):\n step1_success = True\n word = word[:-6]\n r2 = r2[:-6]\n rv = rv[:-6]\n\n if r2.endswith(\"iv\"):\n word = word[:-2]\n r2 = r2[:-2]\n rv = rv[:-2]\n\n if r2.endswith(\"at\"):\n word = word[:-2]\n rv = rv[:-2]\n\n elif r2.endswith((\"os\", \"ic\")):\n word = word[:-2]\n rv = rv[:-2]\n\n elif r2 .endswith(\"abil\"):\n word = word[:-4]\n rv = rv[:-4]\n\n elif (suffix in (\"amento\", \"amenti\",\n \"imento\", \"imenti\") and\n rv.endswith(suffix)):\n step1_success = True\n word = word[:-6]\n rv = rv[:-6]\n\n elif r2.endswith(suffix):\n step1_success = True\n if suffix in (\"azione\", \"azioni\", \"atore\", \"atori\"):\n word = word[:-len(suffix)]\n r2 = r2[:-len(suffix)]\n rv = rv[:-len(suffix)]\n\n if r2.endswith(\"ic\"):\n word = word[:-2]\n rv = rv[:-2]\n\n elif suffix in (\"logia\", \"logie\"):\n word = word[:-2]\n rv = word[:-2]\n\n elif suffix in (\"uzione\", \"uzioni\",\n \"usione\", \"usioni\"):\n word = word[:-5]\n rv = rv[:-5]\n\n elif suffix in (\"enza\", \"enze\"):\n word = \"\".join((word[:-2], \"te\"))\n rv = \"\".join((rv[:-2], \"te\"))\n\n elif suffix == u(\"it\\xE0\"):\n word = word[:-3]\n r2 = r2[:-3]\n rv = rv[:-3]\n\n if r2.endswith((\"ic\", \"iv\")):\n word = word[:-2]\n rv = rv[:-2]\n\n elif r2.endswith(\"abil\"):\n word = word[:-4]\n rv = rv[:-4]\n\n elif suffix in (\"ivo\", \"ivi\", \"iva\", \"ive\"):\n word = word[:-3]\n r2 = r2[:-3]\n rv = rv[:-3]\n\n if r2.endswith(\"at\"):\n word = word[:-2]\n r2 = r2[:-2]\n rv = rv[:-2]\n\n if r2.endswith(\"ic\"):\n word = word[:-2]\n rv = rv[:-2]\n else:\n word = word[:-len(suffix)]\n rv = rv[:-len(suffix)]\n break\n\n # STEP 2: Verb suffixes\n if not step1_success:\n for suffix in self.__step2_suffixes:\n if rv.endswith(suffix):\n word = word[:-len(suffix)]\n rv = rv[:-len(suffix)]\n break\n\n # STEP 3a\n if rv.endswith((\"a\", \"e\", \"i\", \"o\", u(\"\\xE0\"), u(\"\\xE8\"),\n u(\"\\xEC\"), u(\"\\xF2\"))):\n word = word[:-1]\n rv = rv[:-1]\n\n if rv.endswith(\"i\"):\n word = word[:-1]\n rv = rv[:-1]\n\n # STEP 3b\n if rv.endswith((\"ch\", \"gh\")):\n word = word[:-1]\n\n word = word.replace(\"I\", \"i\").replace(\"U\", \"u\")\n return word", "def syllable_count(word):\n # Count the vowels in the word\n # Subtract one vowel from every dipthong\n count = len(re.findall(r'([aeiouyAEIOUY]+)', word))\n # Subtract any silent vowels\n if len(word) > 2:\n if word[-1] == 'e' and \\\n not is_vowel(word[-2]) and \\\n is_vowel(word[-3]):\n count = count - 1\n return count", "def remove_vowels(string: str) -> str:\n return \" \".join([word for word in string.split() if word[0].lower() not in ['a', 'e', 'i', 'o', 'u']])", "def pig_latinify(word):\n\n first_letter = word[0]\n\n if first_letter in VOWELS:\n output_word = word + \"yay\"\n else:\n #scan for vowel if word starts with a consonant\n for i in range(len(word)):\n individual_letter = word[i]\n if individual_letter in VOWELS:\n output_word = word[i:] + word[:i] + \"ay\"\n break\n else:\n continue\n\n return output_word", "def makePigLatin(word): \n m = len(word)\n vowels = \"a\", \"e\", \"i\", \"o\", \"u\", \"y\" \n # short words are not converted \n if m<3 or word==\"the\":\n return word\n else:\n for i in vowels:\n if word.find(i) < m and word.find(i) != -1:\n m = word.find(i)\n if m==0:\n return word+\"way\" \n else:\n return word[m:]+word[:m]+\"ay\"", "def inner(word):\n return word + '!!!'", "def pig_word(self, original):\n word = original.lower()\n if word[0] in \"aeiou\":\n new_word = word + 'ay'\n else:\n new_word = word[1:] + word[0] + 'ay'\n return new_word", "def _get_vowels(sequence: str) -> list:\n vowels = []\n for char in sequence:\n if char in VOWELS:\n vowels.append(char)\n return vowels", "def disemvowel(val):\n c_list = list(val)\n for char in list(val):\n if char.lower() in [\"a\", \"e\", \"i\", \"o\", \"u\"]:\n c_list.remove(char)\n return \"\".join(c_list)", "def search(self, word):", "def stem(self, word):\n word = word.lower()\n\n if word in self.stopwords:\n return word\n\n step1_success = False\n\n r1, r2 = self._r1r2_standard(word, self.__vowels)\n rv = self._rv_standard(word, self.__vowels)\n\n # STEP 0: Attached pronoun\n for suffix in self.__step0_suffixes:\n if not (word.endswith(suffix) and rv.endswith(suffix)):\n continue\n\n if (\n rv[: -len(suffix)].endswith(\n (\n \"ando\",\n \"ar\",\n \"er\",\n \"iendo\",\n \"ir\",\n )\n )\n ) or (\n rv[: -len(suffix)].endswith(\"yendo\")\n and word[: -len(suffix)].endswith(\"uyendo\")\n ):\n\n word = self.__replace_accented(word[: -len(suffix)])\n r1 = self.__replace_accented(r1[: -len(suffix)])\n r2 = self.__replace_accented(r2[: -len(suffix)])\n rv = self.__replace_accented(rv[: -len(suffix)])\n break\n\n # STEP 1: Standard suffix removal\n for suffix in self.__step1_suffixes:\n if not word.endswith(suffix):\n continue\n\n if suffix == \"amente\" and r1.endswith(suffix):\n step1_success = True\n word = word[:-6]\n r2 = r2[:-6]\n rv = rv[:-6]\n\n if r2.endswith(\"iv\"):\n word = word[:-2]\n r2 = r2[:-2]\n rv = rv[:-2]\n\n if r2.endswith(\"at\"):\n word = word[:-2]\n rv = rv[:-2]\n\n elif r2.endswith((\"os\", \"ic\", \"ad\")):\n word = word[:-2]\n rv = rv[:-2]\n\n elif r2.endswith(suffix):\n step1_success = True\n if suffix in (\n \"adora\",\n \"ador\",\n \"acion\",\n \"adoras\",\n \"adores\",\n \"aciones\",\n \"ante\",\n \"antes\",\n \"ancia\",\n \"ancias\",\n ):\n word = word[: -len(suffix)]\n r2 = r2[: -len(suffix)]\n rv = rv[: -len(suffix)]\n\n if r2.endswith(\"ic\"):\n word = word[:-2]\n rv = rv[:-2]\n\n elif suffix in (\"logia\", \"logias\"):\n word = suffix_replace(word, suffix, \"log\")\n rv = suffix_replace(rv, suffix, \"log\")\n\n elif suffix in (\"ucion\", \"uciones\"):\n word = suffix_replace(word, suffix, \"u\")\n rv = suffix_replace(rv, suffix, \"u\")\n\n elif suffix in (\"encia\", \"encias\"):\n word = suffix_replace(word, suffix, \"ente\")\n rv = suffix_replace(rv, suffix, \"ente\")\n\n elif suffix == \"mente\":\n word = word[: -len(suffix)]\n r2 = r2[: -len(suffix)]\n rv = rv[: -len(suffix)]\n\n if r2.endswith((\"ante\", \"able\", \"ible\")):\n word = word[:-4]\n rv = rv[:-4]\n\n elif suffix in (\"idad\", \"idades\"):\n word = word[: -len(suffix)]\n r2 = r2[: -len(suffix)]\n rv = rv[: -len(suffix)]\n\n for pre_suff in (\"abil\", \"ic\", \"iv\"):\n if r2.endswith(pre_suff):\n word = word[: -len(pre_suff)]\n rv = rv[: -len(pre_suff)]\n\n elif suffix in (\"ivo\", \"iva\", \"ivos\", \"ivas\"):\n word = word[: -len(suffix)]\n r2 = r2[: -len(suffix)]\n rv = rv[: -len(suffix)]\n if r2.endswith(\"at\"):\n word = word[:-2]\n rv = rv[:-2]\n else:\n word = word[: -len(suffix)]\n rv = rv[: -len(suffix)]\n break\n\n # STEP 2a: Verb suffixes beginning 'y'\n if not step1_success:\n for suffix in self.__step2a_suffixes:\n if rv.endswith(suffix) and word[-len(suffix) - 1 : -len(suffix)] == \"u\":\n word = word[: -len(suffix)]\n rv = rv[: -len(suffix)]\n break\n\n # STEP 2b: Other verb suffixes\n for suffix in self.__step2b_suffixes:\n if rv.endswith(suffix):\n word = word[: -len(suffix)]\n rv = rv[: -len(suffix)]\n if suffix in (\"en\", \"es\", \"eis\", \"emos\"):\n if word.endswith(\"gu\"):\n word = word[:-1]\n\n if rv.endswith(\"gu\"):\n rv = rv[:-1]\n break\n\n # STEP 3: Residual suffix\n for suffix in self.__step3_suffixes:\n if rv.endswith(suffix):\n word = word[: -len(suffix)]\n if suffix in (\"e\", \"\\xE9\"):\n rv = rv[: -len(suffix)]\n\n if word[-2:] == \"gu\" and rv.endswith(\"u\"):\n word = word[:-1]\n break\n\n word = self.__replace_accented(word)\n\n return word", "def vowel_with_for(character):\r\n\tif character in vowels:\r\n\t\tprint(\"Entered character is vowel..!\")\r\n\telse:\r\n\t\tprint(\"Not a Vowel\")", "def replaceOOV(word):\n if word in self.vocab: return word\n else: return self.oov", "def generate_vowel():\n return random.sample(['a', 'e', 'i', 'o', 'u', 'y'], 1)", "def vowels(self):\n vas = []\n file = self.read()\n words = re.sub(\"[aeiouAEIOU]\",\" \", file).split(\" \")\n for h_u in words:\n if h_u != \"\":\n vas.append(h_u)\n self.print(vas)\n self.write(vas)\n logging.debug(\"Starting with to\")\n return vas", "def number_of_vowels(find_vowels):\n v = [\"a\", \"e\", \"i\", \"o\", \"u\", \"y\"]\n new_vowels = find_vowels.lower()\n vowels = \"\"\n for x in new_vowels:\n if x in v:\n vowels += x\n return len(vowels)", "def avg_vowels(self, text):\n val = 0\n if text:\n text = text.replace(\"\\n\", \"\")\n text = text.replace(\",\", \"\")\n text = text.replace(\"'\", \"\")\n it = (map(text.lower().count, \"aeiouyæøå\"))\n word_count = len(text.split(\" \"))\n it_sum = 0\n for x in it:\n it_sum += +x\n if word_count == 0:\n return 0\n val = round(it_sum/word_count, 2)\n print(\"avg vowels returned\", val)\n return val", "def print_upper_words_e(words):\n\n for word in words:\n if(word[0] == 'e' or word[0] == 'E'):\n print(word.upper())", "def print_upper_words2(words):\n for word in words:\n if word.startswith('e') or word.startswith('E'):\n print(word.upper())", "def form_ing(word):\n\n # last char of the word\n last = word[-1]\n\n if last == 'e':\n return word[:-1] + 'ing'\n elif last == 'r':\n if word[-2] == 'a': \n return word + \"ring\"\n elif last in ['b', 'd', 'g', 'm', 'n', 'p', 't']:\n if _is_vowel(word[-2]) and not (_is_vowel(word[-3])):\n return word + word[-1] + \"ing\"\n\n return word + \"ing\"", "def vowelcount(s):\n s = s.lower()\n nv = 0\n for v in 'aeiou':\n nv += s.count(v)\n return nv", "def pig_latinify(word):\n result = \"\"\n if len(word) > 0 and word.isalpha():\n first = word[0]\n if is_vowel(first): # starts with a vowel\n result = str(word) + \"yay\"\n else: # starts with non-vowel\n cut = position_of_vowel(word) # where to cut the word\n if cut > 0: # \"street\"-->\"eet+str+ay\"\n result = word[cut:] + word[:cut] + \"ay\"\n else: # no vowel found\n result = word + \"ay\"\n else:\n result = 'Only letters allowed!'\n\n return result", "def filter_vowels(self, vowels, word_set, iterations=10):\r\n \r\n true_vowels = vowels\r\n for i in range(iterations):\r\n vowels = true_vowels\r\n # Go backwards as the last ones are least likely.\r\n for vowel in vowels[::-1]:\r\n uses = 0\r\n for word in word_set:\r\n if vowel not in word or len(word) < 4:\r\n continue\r\n word_ = word.replace(vowel, '')\r\n # Check if no other vowels are in this word.\r\n if not any(v in word_ for v in true_vowels):\r\n uses += 1\r\n if uses > i:\r\n break\r\n else:\r\n true_vowels = true_vowels.replace(vowel, '')\r\n return vowels", "def process_word(self, word: str) -> list[str]:\n d = self.d\n if not d:\n return None\n if d.check(word):\n return None\n # Speed doesn't matter here. The more we find, the more convenient.\n # Remove all digits.\n word = ''.join([i for i in word if not i.isdigit()])\n if d.check(word) or d.check(word.lower()):\n return None\n if word.find('_') > -1:\n # Snake case.\n words = word.split('_')\n for word2 in words:\n if not d.check(word2) and not d.check(word2.lower()):\n return d.suggest(word)\n return None\n words = g.unCamel(word)\n if words:\n for word2 in words:\n if not d.check(word2) and not d.check(word2.lower()):\n return d.suggest(word)\n return None\n return d.suggest(word)", "def process_word(self, word: str) -> list[str]:\n d = self.d\n if not d:\n return None\n if d.check(word):\n return None\n # Speed doesn't matter here. The more we find, the more convenient.\n # Remove all digits.\n word = ''.join([i for i in word if not i.isdigit()])\n if d.check(word) or d.check(word.lower()):\n return None\n if word.find('_') > -1:\n # Snake case.\n words = word.split('_')\n for word2 in words:\n if not d.check(word2) and not d.check(word2.lower()):\n return d.suggest(word)\n return None\n words = g.unCamel(word)\n if words:\n for word2 in words:\n if not d.check(word2) and not d.check(word2.lower()):\n return d.suggest(word)\n return None\n return d.suggest(word)", "async def owoify(self, ctx: Message, *, owome: str = None):\n\t\tif owome == None:\n\t\t\treturn await self.send(\n\t\t\t f\"{ctx.author.mention} Missing Required Argument - Usage: d!owoify <message> • Example: d!owoify dont hurt me!\"\n\t\t\t)\n\t\telse:\n\t\t\towome = owome.replace(\"r\", \"w\")\n\t\t\towome = owome.replace(\"l\", \"w\")\n\t\t\towome = owome.replace(\"g\", \"w\")\n\t\t\towome = owome.replace(\"R\", \"W\")\n\t\t\towome = owome.replace(\"L\", \"W\")\n\t\t\towome = owome.replace(\"G\", \"W\")\n\t\t\towome = owome.replace(\"ove\", \"uv\")\n\n\t\t\treplacing_words = 'aeiou'\n\t\t\tupperreplacing_words = 'AEIOU'\n\t\t\treplacewithuwu = '!?/_.+'\n\n\t\t\tuwufaces = [\n\t\t\t \"ᓀ˵▾˵ᓂ\", \">_<\", \"^▽^\", \"❛ ᴗ ❛\", \"UwU\", \"OwO\", \"QwQ\", \"≧▽≦\"\n\t\t\t]\n\t\t\tfor i in owome:\n\t\t\t\tif i in replacewithuwu:\n\t\t\t\t\towomee = owome.replace(i, random.choice(uwufaces))\n\n\t\t\tfor i in owome:\n\t\t\t\tif i in upperreplacing_words:\n\t\t\t\t\towomee = owome.replace(i, f\"Y{i}\")\n\n\t\t\tfor i in owome:\n\t\t\t\tif i in replacing_words:\n\t\t\t\t\towomee = owome.replace(i, f\"y{i}\")\n\n\t\t\tawait self.send(f\"{owomee}ㅤㅤ• {ctx.author.mention}\")", "def countsyllables_en(word):\r\n\tif not word:\r\n\t\treturn 0\r\n\r\n\t# Remove final silent 'e'\r\n\tif word[-1] == \"e\":\r\n\t\tword = word[:-1]\r\n\r\n\t# Check for a cached syllable count\r\n\tif word in fallback_cache:\r\n\t\treturn fallback_cache[word]\r\n\r\n\t# Count vowel groups\r\n\tresult = 0\r\n\tprev_was_vowel = False\r\n\tfor char in word:\r\n\t\tis_vowel = char in VOWELS or char == 'y'\r\n\t\tif is_vowel and not prev_was_vowel:\r\n\t\t\tresult += 1\r\n\t\tprev_was_vowel = is_vowel\r\n\r\n\t# Add & subtract syllables\r\n\tfor r in fallback_addsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult += 1\r\n\tfor r in fallback_subsyl:\r\n\t\tif r.search(word):\r\n\t\t\tresult -= 1\r\n\r\n\t# Cache the syllable count\r\n\tfallback_cache[word] = result\r\n\r\n\treturn result", "def replace_vowels(chars):\n\n replaced = []\n vowels = {'a', 'e', 'i', 'o', 'u'}\n\n for char in chars:\n if char.lower() in vowels:\n replaced.append('*')\n else:\n replaced.append(char)\n\n return replaced", "def is_english_vowel(c):\n # y was included in the vowel set guided by the tests.\n return c in 'aeiouyAEIOUY'", "def is_vowel(text):\n return text.lower() in AVRO_VOWELS", "def check(word):\n if 'ie' in word:\n print('{} doesn\\'t follow the rule'.format(word))\n elif 'cie' in word:\n print('{} doesn\\'t follow the rule'.format(word))\n else:\n print('{} does follow the rule'.format(word))", "def test_one_disemvowel_code_wars():\n from disemvowel_trolls import disemvowel\n tests = [(\"This website is for losers LOL!\", \"Ths wbst s fr lsrs LL!\"),\n (\"No offense but,\\nYour writing is among the worst I've everread\",\n \"N ffns bt,\\nYr wrtng s mng th wrst 'v vrrd\"),\n (\"What are you, a communist?\", \"Wht r y, cmmnst?\")]\n\n for case in tests:\n assert disemvowel(case[0]) == case[1]", "def removeVowels(self, S: str) -> str:\n \n vowel_list = ['a', 'e', 'i', 'o', 'u']\n output = \"\"\n \n for letter in S:\n \n if letter not in vowel_list:\n \n output += letter\n \n else:\n continue\n \n return output", "def eval_word(word):\n\tbasis = Generators.standard_basis((2,1))\n\tproduct = Automorphism(basis, basis)\n\tassert product.is_identity()\n\twhile len(word) >= 2:\n\t\tletter, sub, word = word[0], int(word[1]), word[2:]\n\t\taut = genrs[sub]\n\t\tif letter == 'y':\n\t\t\taut = ~aut\n\t\tproduct *= aut\n\treturn product", "def count_vowels(string):\n \n vowel = 0\n \n for i in string:\n if i.lower() in 'aeiou':\n vowel = vowel + 1\n \n return vowel", "def upper_vowel(s):\n for k, v in REPLACED_MAP.iteritems():\n s = s.replace(k, v)\n return s", "def position_of_vowel(s):\n for i in range(len(s)):\n if is_vowel(s[i]):\n return i\n return -1 # no vowel at all", "def does_it_have_3_vowels(word):\n vowel_count = 0\n vowels = \"aeiou\"\n for char in word:\n if char in vowels:\n vowel_count += 1\n\n if vowel_count > 2:\n return True\n else:\n return False", "def get_value(word):\n letters = 'abcdefghijklmnopqrstuvwxyz'\n sum = 0\n for letter in word:\n letter_value = letters.find(letter)\n if letter_value == -1:\n letter_value = 0\n sum += letter_value\n return sum", "def count_syllables_in_word(word):\n\n count = 0\n\n endings = '!,;.?:'\n last_char = word[-1]\n\n if last_char in endings:\n processed_word = word[0:-1]\n else:\n processed_word = word\n\n\n if len(processed_word) <= 3:\n return 1\n if processed_word[-1] in 'Ee':\n processed_word = processed_word[0:-1]\n\n vowels = 'aeiouAEIOU'\n prev_char_was_vowel = False\n\n for char in processed_word:\n if char in vowels:\n if not prev_char_was_vowel:\n count += 1\n prev_char_was_vowel = True\n\n else:\n prev_char_was_vowel = False\n\n if processed_word[-1] in 'yY':\n count += 1\n \n\n return count", "def a_or_an(s):\n if s[0].lower() in 'aeiou':\n return 'an'\n return 'a'", "def get_word(w):\n return ''.join(c for c in w if c.isalpha()).lower()", "def disambiguate(self, word):\n matches = re.match(r'^pen([cdjz])(.*)$', word)\n if matches:\n return matches.group(1) + matches.group(2)", "def filter(self, word):\n \n word = word.lower()\n try:\n self.engine.fetch(word)\n except socket.error:\n raise LemmaAPIError\n part_of_speeches = self.engine.part_of_speeches\n\n \n self.basic_form = word\n for part in part_of_speeches:\n if part == 'verb':\n if self.engine.is_verb_conjugated():\n if not self.conEngine.is_verb_regular(word, self.engine.get_basic_verb()):\n self.basic_form = self.engine.get_basic_verb()\n return word\n else:\n self.basic_form = self.engine.get_basic_verb()\n\n elif part == 'noun':\n if self.engine.is_noun_plural():\n if not self.conEngine.is_noun_regular(word, self.engine.get_singular_noun()):\n self.basic_form = self.engine.get_singular_noun() \n return word\n else:\n self.basic_form = self.engine.get_singular_noun()\n\n return self.basic_form", "def vowelcount (x):\n x.lower ()\n print (x.count('a')+x.count('e')+x.count('i')+x.count('o')+x.count('u'))", "def num_vowels(s):\n if s == '':\n return 0\n else:\n num_in_rest = num_vowels(s[1:])\n if s[0] in 'aeiou':\n return 1 + num_in_rest\n else:\n return 0 + num_in_rest", "def learn_vowels(self, data=None):\n #pdb.set_trace()\n if not data:\n data = self.memory\n # find acoustic prototypes by clustering over stored acoustic reps\n raw_data = data.reshape(4 * len(self.stems), 2)\n ac_vowels, ac_spread = vq.kmeans(raw_data, 4)\n # find articulatory reps by comparing synthesized output vowels to\n # acoustic prototypes\n # start with candidate list of \"all possible\" articulations\n tmp_ar = N.empty((1, 3))\n rd = 0.0\n for hi in [0.0, 1.0]:\n for bk in [0.0, 1.0]:\n tmp_ar = N.vstack((tmp_ar, N.array([hi, bk, rd])))\n tmp_ar = tmp_ar[1:]\n while len(self.vowel_map) < 4:\n # no noise (since this shouldn't be running through the \"mouth\")\n tmp_ac = self.perceive(self.acoustify(tmp_ar))\n for v in ac_vowels:\n dists = N.sqrt(N.sum((v - tmp_ac)**2, axis=1))\n d = 0\n while True:\n if dists[d] < (2 * ac_spread):\n # found an articulatory prototype\n self.vowel_map[tuple(v)] = tmp_ar[d]\n # remove it from the candidate list\n tmp_ar = N.vstack((tmp_ar[:d], tmp_ar[d + 1:]))\n tmp_ac = N.vstack((tmp_ac[:d], tmp_ac[d + 1:]))\n break\n d += 1\n if d == len(dists):\n # take the best of the bad ones\n index = N.argmin(dists)\n self.vowel_map[tuple(v)] = tmp_ar[index]\n break\n self.vowel_spread = ac_spread\n return self.vowel_map", "def last_char_to_vowel(word):\n assert isinstance(word, str)\n # We iterate over characters of the word, because the last might be a\n # punctuation, perhaps.\n for last in reversed(word):\n last = last.lower()\n for ch, prev in ((\"a\", \"a/+£\"),\n (\"e\", \"eébcçdgptvwz&*:.\"),\n (\"o\", \"ohk€å\"),\n (\"ä\", \"äflmnrsx§\"),\n (\"ö\", \"ö\"),\n (\"i\", \"ij%$\"),\n (\"u\", \"uq,\"),\n (\"y\", \"yü\")):\n if last in prev:\n return ch\n return \"e\"", "def main():\n sample_text = \"\\/\\/ |-| @ -|- ! $ -|- |-| ! $ ?\"\n vowel_frequencies = get_vowel_frequency(sample_text)\n print(vowel_frequencies)", "def reverse_vowels(s):\n\n phrase = \"\"\n vowels = []\n for letter in s:\n if letter.lower() in \"aeiou\":\n phrase += \"~\"\n vowels.append(letter)\n else: \n phrase += letter\n \n index = 0\n new_phrase = \"\"\n vowels = vowels[-1:-len(vowels)-1:-1]\n \n for letter in phrase:\n\n if letter == \"~\":\n new_phrase += vowels[index]\n index += 1\n else:\n new_phrase += letter\n\n return new_phrase", "def test_convert_single_vowel():\n for vowel in \"aeiou\":\n result = convert(vowel)\n assert result == vowel + \"way\"", "def good_word(self, word):\r\n return word.strip().lower()", "def map_word(self, word):\n for invariance in self.invariances:\n word = invariance.map_word(word)\n return word", "def verb_lemma(word):\n if word.endswith(\"ed\"):\n if word[:-2].endswith(\"v\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"at\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"it\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"et\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ut\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ac\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"i\"):\n return word[:-3].lower() + \"y\"\n elif word[:-2].endswith(\"ir\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"ag\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"nc\"):\n return word[:-2].lower() + \"e\"\n elif word[:-2].endswith(\"nu\"):\n return word[:-2].lower() + \"e\"\n else:\n return word[:-2].lower() \n elif word.endswith(\"ing\"):\n if word[:-3].endswith(\"v\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"at\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"it\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"et\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ut\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ac\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"i\"):\n return word[:-4].lower() + \"y\"\n elif word[:-3].endswith(\"ir\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"ag\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"nc\"):\n return word[:-3].lower() + \"e\"\n elif word[:-3].endswith(\"nu\"):\n return word[:-3].lower() + \"e\"\n else:\n return word[:-3].lower()\n elif re.match(r\"(does|did|done)\", word):\n return (\"do\")\n elif re.match(r\"(is|are|am|was|will|were|been)\", word):\n return (\"be\")\n elif word == (\"'s\"):\n return (\"be\")\n elif re.match(r\"(had|has|'ve)\", word):\n return (\"have\")\n else:\n return word.lower()", "def make_anki_cloze(sentence: str, word: str) -> str:\n\n return re.subn(\n r\"(^\\W*| \\W*)(%s)(\\W* |\\W*$)\" % word,\n r\"\\1{{c1::\\2}}\\3\",\n sentence,\n flags=re.IGNORECASE,\n )[0]", "def find_abecedarian_words():\n pass", "def calc_word_value(word):\n return sum([LETTER_SCORES.get(letter.upper(), 0) for letter in word])", "def words_with_3_or_more_vowel(words):\n return [vowel for vowel in words if re.match(r'(\\w*[aeiou]\\w*){3,}', vowel)]", "def profanity_word_handler(word):\n return word[0] + ''.join([settings.CENSOR_PROFANITY_REPLACEMENT_CHARACTER for I in range(len(word)-2)]) + word [-1]", "def test_find_word(self):\n self.assertEqual(find_word('GREEN'), [(1, 1), (1, 1), (0, 9)])\n self.assertEqual(find_word('ABSENT'), [])\n self.assertEqual(find_word('PW'), [(1, 7), (3, 7), (0, 8)])", "def analyze_word(s):\n\n a = {}\n a['word'] = s\n a['n_letters'] = len(s)\n a['n_vowels'] = count_vowels(s)\n \n return a", "def translate_leet(phrase):", "def correctWord (w):\n\n if len(re.findall(ur\"[а-я]\",w))>len(re.findall(ur\"[a-z]\",w)):\n return w.translate(eng_rusTranslateTable)\n else:\n return w.translate(rus_engTranslateTable)", "def _process(self, word: str) -> List[str]:\n # if a blank arrives from splitting, just return an empty list\n if len(word.strip()) == 0:\n return []\n word = self.convert_consonantal_i(word)\n my_word = \" \" + word + \" \"\n letters = list(my_word)\n positions = []\n for dipth in self.diphthongs:\n if dipth in my_word:\n dipth_matcher = re.compile(\"{}\".format(dipth))\n matches = dipth_matcher.finditer(my_word)\n for match in matches:\n (start, end) = match.span()\n positions.append(start)\n matches = self.kw_matcher.finditer(my_word)\n for match in matches:\n (start, end) = match.span()\n positions.append(start)\n letters = string_utils.merge_next(letters, positions)\n letters = string_utils.remove_blanks(letters)\n positions.clear()\n if not self._contains_vowels(\"\".join(letters)):\n return [\n \"\".join(letters).strip()\n ] # occurs when only 'qu' appears by ellision\n positions = self._starting_consonants_only(letters)\n while len(positions) > 0:\n letters = string_utils.move_consonant_right(letters, positions)\n letters = string_utils.remove_blanks(letters)\n positions = self._starting_consonants_only(letters)\n positions = self._ending_consonants_only(letters)\n while len(positions) > 0:\n letters = string_utils.move_consonant_left(letters, positions)\n letters = string_utils.remove_blanks(letters)\n positions = self._ending_consonants_only(letters)\n positions = self._find_solo_consonant(letters)\n while len(positions) > 0:\n letters = self._move_consonant(letters, positions)\n letters = string_utils.remove_blanks(letters)\n positions = self._find_solo_consonant(letters)\n positions = self._find_consonant_cluster(letters)\n while len(positions) > 0:\n letters = self._move_consonant(letters, positions)\n letters = string_utils.remove_blanks(letters)\n positions = self._find_consonant_cluster(letters)\n return letters", "def fun4vowels(value:str)->set:\r\n vowels = set('aeiou')\r\n return vowels.intersection(set(value))" ]
[ "0.74471885", "0.7248238", "0.71690327", "0.68447524", "0.6721554", "0.670467", "0.6704543", "0.64083433", "0.6355867", "0.6346289", "0.6267494", "0.62225914", "0.6183491", "0.61348367", "0.60796964", "0.604393", "0.60383445", "0.603059", "0.59944683", "0.5991076", "0.5983007", "0.59746933", "0.5971418", "0.5955748", "0.5945857", "0.59315956", "0.5917575", "0.5914967", "0.59126544", "0.5890969", "0.5883844", "0.58820546", "0.5863154", "0.58493847", "0.5814235", "0.58020264", "0.579921", "0.5781883", "0.5777419", "0.57649887", "0.5746775", "0.5745656", "0.5729128", "0.5727515", "0.5726614", "0.57235444", "0.570914", "0.5693651", "0.5664359", "0.56639755", "0.5614951", "0.5607894", "0.55961204", "0.5594828", "0.5587138", "0.5582752", "0.55791837", "0.5569781", "0.5546237", "0.5546237", "0.5530814", "0.5524702", "0.5518182", "0.55123246", "0.5506708", "0.550327", "0.5498983", "0.54936653", "0.54918367", "0.548504", "0.54781896", "0.54780483", "0.54553604", "0.5450381", "0.54429495", "0.5441067", "0.54331976", "0.54236174", "0.5423246", "0.5422364", "0.54086107", "0.5407726", "0.54056746", "0.5405594", "0.5389495", "0.53761625", "0.5373883", "0.53412354", "0.53375244", "0.532456", "0.53092676", "0.5308572", "0.53076404", "0.52976036", "0.52951694", "0.5291071", "0.5276347", "0.5275294", "0.52634907", "0.5258589" ]
0.6320444
10
Constructor. Create the settings objects
def __init__(self): self.s = QSettings() self.p = QgsProject.instance()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__( settings={} ):", "def __init__(self, settings):\n\n # store settings\n self.settings = settings", "def __init__(self, settings):\n \n # storing otmbs settings\n self.settings = settings", "def __init__(self, settings):\n self._settings = settings", "def __init__(self, settings):\n self._read_config(settings)", "def __init__(self, settings):\n self._settings = settings\n self._stats = None", "def __init__(self):\n\n self.logger = utils.get_logger()\n\n # set constants\n constants = models.get_asset_dicts('preferences')\n for key, value in constants.items():\n setattr(self, key, value)", "def create_settings():\n\n settings = {}\n\n settings['induction'] = {'type': 'DT'}\n\n settings['selection'] = {'type': 'Base',\n 'its': 1,\n 'param': 1}\n\n settings['prediction'] = {'type': 'MI',\n 'its': 0.1,\n 'param': 0.95}\n\n settings['queries'] = {}\n\n settings['metadata'] = {}\n\n settings['model_data'] = {}\n\n return settings", "def __init__(self):\n super(sppasPathSettings, self).__init__()\n\n sppas_dir = os.path.dirname(os.path.dirname(\n os.path.dirname(os.path.abspath(__file__))))\n\n self.__dict__ = dict(\n sppas=sppas_dir,\n cli=os.path.join(sppas_dir, \"bin\"),\n etc=os.path.join(sppas_dir, \"etc\"),\n po=os.path.join(sppas_dir, \"po\"),\n src=os.path.join(sppas_dir, \"src\"),\n plugins=os.path.join(os.path.dirname(sppas_dir), \"plugins\"),\n resources=os.path.join(os.path.dirname(sppas_dir), \"resources\"),\n samples=os.path.join(os.path.dirname(sppas_dir), \"samples\"),\n logs=os.path.join(os.path.dirname(sppas_dir), \".logs\"),\n wkps=os.path.join(os.path.dirname(sppas_dir), \"workspaces\"),\n trash=os.path.join(os.path.dirname(sppas_dir), \".trash\"),\n )", "def __init__(self, settings_xml):\n # The list of setting ids.\n #\n # XXX This is redundant. We could just get the ids from\n # getting the values of any of our dicts.\n #\n self.ids = []\n self.values = { }\n self.types = { }\n self.defaults = { }\n self.labels = { }\n\n if settings_xml:\n dom = parseString(settings_xml)\n s = dom.firstChild\n\n setting = first_child(s, \"setting\")\n while setting:\n setting_id = setting.getAttribute(\"id\")\n\n # I know the 'sep' setting has no id. I am not sure what it is\n # used for so I am just going to skip it.\n #\n if setting_id != \"\":\n self.ids.append(setting_id)\n self.labels[setting_id] = setting.getAttribute(\"label\")\n self.types[setting_id] = setting.getAttribute(\"type\")\n\n # For bool's actually set the default value to True or\n # False. otherwise it is all strings to us.\n #\n default = setting.getAttribute(\"default\")\n if self.types[setting_id] == \"bool\":\n self.defaults[setting_id] = (default.lower() == 'true')\n else:\n self.defaults[setting_id] = default\n\n # Settings start out with their default value.\n #\n self.values[setting_id] = self.defaults[setting_id]\n setting = next_sibling(setting, \"setting\")\n\n dom.unlink()\n dom = None\n\n # There is always an 'override' setting - \"override\", which is\n # set based on the Language Override setting in the scraper.\n #\n if 'override' not in self.ids:\n self.ids.append(\"override\")\n self.values[\"override\"] = False\n self.types[\"override\"] = \"bool\"\n self.defaults[\"override\"] = False\n self.labels[\"override\"] = \"Language Override\"\n\n # The default language for now is english!\n #\n if 'language' not in self.ids:\n self.ids.append(\"language\")\n self.values[\"language\"] = \"en\"\n self.types[\"language\"] = \"string\"\n self.defaults[\"language\"] = \"en\"\n self.labels[\"language\"] = \"Language\"\n\n return", "def __init__(self) -> None:\n self._settings = {}\n\n # Load values from global_settings (only uppercase)\n self.filter_and_set(global_settings)\n\n settings_env_value: str = os.environ.get(SETTINGS_ENV)\n if settings_env_value:\n # Load values from custom settings\n try:\n module = importlib.import_module(settings_env_value)\n except ModuleNotFoundError:\n msg = \"Can't import custom settings. Is it under PYTHONPATH?\"\n raise ModuleError(msg)\n self.filter_and_set(module)", "def __init__(self, settings, valid, defaults=None):\n\n try:\n with open(settings, 'r') as settings_file:\n self._settings = json.load(settings_file)\n except TypeError:\n self._settings = dict(settings)\n self._settings = Settings._inject_defaults(self._settings, defaults)\n Settings._validity_check(self._settings, valid)", "def __init__(self, values: dict):\n self.instantaneous = InstantaneousSettings\n self.infinite_duration = InfiniteDurationSettings\n self.fixed_duration = FixedDurationSettings", "def __init__(self, settings_folder):\n\t\tself.SettingsFolder = settings_folder\n\t\tself.__load_settings()", "def load_settings(self):\n\n self.std = settings.settings", "def __init__(self):\n self.settings = {}\n self.settings[\"user_agent\"] = u\"Cerabot/0.1 (Python/{0}; {1}; {2}; {3}; {4})\"\n self.settings[\"user_agent\"] = self.settings[\"user_agent\"].format(pyv(), dist()[0],\n version(), dist()[1], machine())\n self.settings[\"wiki\"] = {}\n self.settings[\"irc\"] = {}\n self.settings[\"sql\"] = {}\n self.settings[\"watcher\"] = {}\n\n #Wiki settings\n self.settings[\"wiki\"][\"user\"] = u\"Cerabot\"\n self.settings[\"wiki\"][\"site\"] = [(u\"wikipedia\", u\"en\")]\n self.settings[\"wiki\"][\"passwd\"] = u\"\"\n self.settings[\"wiki\"][\"passwd_file\"] = u\".passwd\"\n self.settings[\"wiki\"][\"run_base\"] = u\"User:Cerabot/Run/Task {task}\"\n self.settings[\"wiki\"][\"summary\"] = u\"Task {task}: {comment}. \"\n self.settings[\"wiki\"][\"summary\"] += \"([[User:Cerabot/Run/Task {task}|bot]])\"\n \n #IRC settings\n self.settings[\"irc\"][\"nick\"] = u\"Cerabot\"\n self.settings[\"irc\"][\"passwd\"] = u\"\"\n self.settings[\"irc\"][\"passwd_file\"] = u\".passwd\"\n self.settings[\"irc\"][\"server\"] = u\"irc.freenode.net\", 6667\n self.settings[\"irc\"][\"realname\"] = u\"IRC extension to Pyhton robot Cerabot.\"\n self.settings[\"irc\"][\"ident\"] = u\"cerabot\"\n self.settings[\"irc\"][\"channels\"] = [\"##cerabot\", \"##ceradon\"]\n\n #Watcher settings\n self.settings[\"watcher\"][\"nick\"] = u\"Cerabot\"\n self.settings[\"watcher\"][\"server\"] = u\"irc.wikimedia.org\", 6667\n self.settings[\"watcher\"][\"realname\"] = u\"IRC extension to Python robot Cerabot.\"\n self.settings[\"watcher\"][\"ident\"] = u\"cerabot\"\n self.settings[\"watcher\"][\"channels\"] = [\"#en.wikipedia\"]\n self.settings[\"watcher\"][\"report_chans\"] = [\"##ceradon-recent\"]\n\n #Database settings\n self.settings[\"sql\"][\"host\"] = u\"bots-bsql01\"\n self.settings[\"sql\"][\"port\"] = 1433\n self.settings[\"sql\"][\"user\"] = u\"ceradon\"\n self.settings[\"sql\"][\"password\"] = u\"\"", "def _define_settings(self):\n\n self.settings = {}\n\n ##### ORIGINALLY IN THE DOMAIN FILE #######\n\n # Maximum input in the C-Space : no constituent can be more than 100% present\n self.settings['maxInp'] = 1\n\n #### ORIGINALLY IN THE SETTINGS FILE #####\n self.settings[\"epochs\"] = 3 # Training epochs\n self.settings[\"tgtStd\"] = 12e-6\n self.settings['TInit'] = 1e-6\n self.settings[\"TMin\"] = 0\n self.settings[\"TDecayRate\"] = 0.05\n self.settings[\"lambdaInit\"] = 0.011387\n self.settings['lambdaMin'] = 0.0001\n self.settings[\"lambdaDecayRate\"] = 0.60\n self.settings[\"maxSteps\"] = 300000\n self.settings[\"emaSpeedTol\"] = 0.009\n self.settings[\"emaFactor\"] = .005\n self.settings[\"printInterval\"] = 3000\n self.settings[\"summary_file\"] = \"data/summary.txt\"\n mean = torch.ones(self.grammar.bind.nF,\n self.grammar.bind.nR)/self.grammar.bind.nF\n self.settings[\"initStateMean\"] = mean\n self.settings[\"initStateStdev\"] = .025\n self.settings['clamp'] = False\n\n if self.custom_settings is not None:\n for key, value in self.custom_settings.items():\n if key in self.settings:\n self.settings[key] = value", "def __init__(self, settings):\n\n self.r = redis.Redis(\n host=settings['hostname'],\n port=settings['port']\n )\n\n # set the redis list name for storing jobs\n self.joblist = settings['joblistname']", "def __init__(self, settings):\n\n \tself.redis = Redis(host=settings.redis.bind_address, port=settings.redis.port)\n \tself.store = Store(settings.content.path)\n self.get_all_pages() # page modification times\n self.get_all_aliases() # page aliases", "def __init__(self):\n for name, default in self.defaults.items():\n value = getattr(django.conf.settings, name, default)\n setattr(self, name, value)", "def settings() -> Settings:\n return Settings()", "def __init__(self, settings):\n\n #self.e_tree = settings.e_tree # Don't do this!!!! change all calls to e_tree to self.settings.e_tree!\n #self.k_tree = settings.k_tree\n self.settings = settings\n self.made_parents = {}\n self.added_files = {}\n self.edit_files = {}", "def __init__(self, configParserObject):\n section = 'settings'\n self.debug = configParserObject.getboolean(section, 'debug')\n self.application_path = configParserObject.get(section, 'application_path')\n self.log_path = configParserObject.get(section, 'log_path')\n self.chat_enabled = configParserObject.getboolean(section, 'chat_enabled')\n self.token_validation_enabled = configParserObject.getboolean(section, 'token_validation_enabled')\n if not os.path.isdir(self.application_path):\n raise ErrorLoadingData('Path %s does not exists' % self.application_path)\n if not os.path.isdir(self.log_path):\n raise ErrorLoadingData('Path %s does not exists' % self.log_path)", "def initialize(cls, settings: Settings) -> Settings:\n\n settings_obj = SettingsService.load_game_conf()\n\n for entry in SettingsService.GAME_SETTINGS:\n value = settings_obj.get(SettingsService.GAME_SETTINGS_ROOT, {}).get(\n entry, None\n )\n if value is None:\n raise RuntimeError(f\"Entry {entry} is missing in settings.\")\n\n setattr(settings, entry, value)\n\n for entry in SettingsService.INITIALS:\n value = settings_obj.get(SettingsService.INITIALS_ROOT, {}).get(entry, None)\n if value is None:\n raise RuntimeError(f\"Entry {entry} is missing in settings.\")\n\n settings.initials[entry] = value\n\n return settings", "def __init__(self, SETTINGS_FILE):\n try:\n self.__setup(SETTINGS_FILE)\n Loger.write(self)\n if self.data.user.enable:\n if self.data.user.timeEnabled:\n now = datetime.datetime.now().time().replace(second=0)\n if not (self.data.user.timeFrom < now < self.data.user.timeTo):\n raise UserDisabledTime(self.data.user)\n self.data.user.lastLogin = datetime.datetime.now()\n self.data.user.save()\n else:\n raise UserDisabled(self.data.user)\n self.blackHoleBrowser = gui.BlackHoleBrowser(self)\n BlackHole.instance = self\n except Exception as e:\n raise e", "def build_settings(self, settings):\n \n settings.add_json_panel(\"Network\", self.config, data=network_json)\n settings.add_json_panel(\"Camera\", self.config, data=camera_json)\n settings.add_json_panel(\"CV\", self.config, data=cv_json)\n settings.add_json_panel(\"Admin\", self.config, data=admin_json)", "def from_settings(settings):", "def __init__(self):\n\n # Initialize cache.\n self._cache = {}\n\n # Initialize default settings.\n defaults = {\n 'api_url': 'http://127.0.0.1:8000/api',\n 'client_id': '',\n 'client_secret': '',\n 'refresh_token': '',\n 'verbose': 'false',\n }\n self._defaults = Parser(defaults=defaults)\n self._defaults.add_section('general')\n\n # Initialize a parser for the user settings file.\n self._user = Parser()\n self._user.add_section('general')\n\n # If the user settings file exists, read it into the parser object.\n user_filename = os.path.expanduser('~/.mezzanine.cfg')\n self._user.read(user_filename)", "def __init__(self, _confFixture, _settings):\n self._conf = _confFixture\n self._settings = _settings", "def __init__(self, domain='com.markfickett.gors'):\n\t\tsettingsDir = os.path.expanduser(self.__SETTINGS_DIR)\n\t\tif not os.path.isdir(settingsDir):\n\t\t\tos.makedirs(settingsDir)\n\t\tself.__settingsFileName = os.path.join(settingsDir,\n\t\t\tdomain + '.plist')\n\t\tif os.path.isfile(self.__settingsFileName):\n\t\t\tself.__settings = plistlib.readPlist(\n\t\t\t\tself.__settingsFileName)\n\t\telse:\n\t\t\tself.clear()\n\t\tself.__currentGroupNames = []", "def __setup(self, SETTINGS_FILE):\n config = ConfigParser()\n try:\n config.read(SETTINGS_FILE)\n self.settings = Settings(config)\n self.data = Data()\n except IOError:\n raise FileMissing(SETTINGS_FILE)\n except Exception as e:\n raise e", "def __init__(self):\n\t\t\n\t\tsettings = configparser.SafeConfigParser(allow_no_value=True)\n\t\tlist=settings.read('data/settings.cfg')\n\t\tif not 'data/settings.cfg' in list:\n\t\t\tprint('no configuration file present.. making one')\n\t\t\tself.makeConfigFile(settings)\n\t\t\tshare = ['']\n\t\t\tself.nodes = []\n\t\telse:\n\t\t\tshare, nodes = self.openConfig(settings)\n\t\t\tself.nodes = nodes\n\t\t\n\t\t\n\t\tself.files = self.loadFiles(share)\t\t\n\t\tself.share = share\n\t\tself.kill= False\n\t\tself.downloads = {}\n\t\tself.currentVersion = (0,2,1)\n\t\tself.totalDownloads = 0\n\t\tself.current = 0\n\t\tself.config = settings", "def test_constructor(self):\n # Build the Settings objects\n self.assertEqual(self.extension, self.settings.extension)\n\n # Ensure that the registration settings dict gets\n # added to this Settings\n self.assertEqual(self.test_dict['test_key1'],\n self.settings['test_key1'])", "def __init__(self, settings):\n super().__init__(settings, self.player_info_url, Player)", "def __init__(self):\n self.config = {}", "def initialize(self):\n my_setting = self.settings.get('my_setting')", "def __init__(self, bot, name, default_settings=None):\n if default_settings is None:\n default_settings = {}\n self.bot = bot\n self.name = name\n self.default_settings = default_settings\n\n # set up storage for settings and load from persistent file\n self.settings_path = pathlib.Path(\".settings\", f\"{self.name}.yml\")\n self.id_dict = load_persistent_settings(self.settings_path)", "def build_settings(self, settings):\n settings.add_json_panel('Makesmith Settings', self.config, data=self.json)", "def __init__(self, settings):\n # Requirement ID: 7.0.0\n\n self.settings = settings\n self.ResetScore()\n self.game_active = False\n self.high_score = 0\n self.number_of_mega_bullets = settings.number_of_mega_bullets\n self.shot_bullets = 0\n self.remaining_bullets = settings.number_of_mega_bullets", "def settings(self):\r\n return settings.Settings(self)", "def settings(self):\r\n return SettingResource(self)", "def settings(self):\n return {}", "def settings(self):\n from hubspot3.settings import SettingsClient\n\n return SettingsClient(**self.auth, **self.options)", "def _init_key_settings(self):\n self.minKeySize = 1023\n self.maxKeySize = 8193\n self.rsaSigHashes = list(RSA_SIGNATURE_HASHES)\n self.rsaSchemes = list(RSA_SCHEMES)\n self.dsaSigHashes = list(DSA_SIGNATURE_HASHES)\n self.virtual_hosts = []\n # DH key settings\n self.eccCurves = list(CURVE_NAMES)\n self.dhParams = None\n self.dhGroups = list(ALL_DH_GROUP_NAMES)\n self.defaultCurve = \"secp256r1\"\n self.keyShares = [\"secp256r1\", \"x25519\"]\n self.padding_cb = None\n self.use_heartbeat_extension = True\n self.heartbeat_response_callback = None", "def __init__(\n self,\n settings: Optional[Union[Dict[str, Any], MpConfigFile, str]] = None,\n conf_filepath: str = None,\n ):\n self._lbl_loading = widgets.Label(value=\"Loading. Please wait.\")\n display(self._lbl_loading)\n if isinstance(settings, MpConfigFile):\n self.mp_conf_file = MpConfigFile(\n settings=settings.settings, file=conf_filepath\n )\n elif isinstance(settings, dict):\n self.mp_conf_file = MpConfigFile(settings=settings, file=conf_filepath)\n elif isinstance(settings, str):\n self.mp_conf_file = MpConfigFile(file=settings)\n else:\n # This is the default if neither settings nor conf_filepath are passed.\n self.mp_conf_file = MpConfigFile(file=conf_filepath)\n self.mp_conf_file.load_default()\n self.tool_buttons: Dict[str, widgets.Widget] = {}\n self._inc_loading_label()\n\n # Get the settings definitions and Config controls object\n mp_def_dict = get_mpconfig_definitions()\n self.mp_controls = MpConfigControls(mp_def_dict, self.mp_conf_file.settings)\n self._inc_loading_label()\n\n # Set up the tabs\n self.tab_ctrl = CompEditTabs(self._get_tab_definitions())\n self._inc_loading_label()\n\n self.txt_current_file = widgets.Text(\n description=\"Conf File\",\n value=self.current_config_file,\n layout=widgets.Layout(width=\"75%\"),\n )\n self.btn_save = widgets.Button(\n description=\"Save Settings\",\n tooltip=\"Save current settings to your config file.\",\n )\n self.btn_save.on_click(self._save_file)\n self.btn_validate = widgets.Button(\n description=\"Validate Settings\",\n tooltip=\"Run basic sanity checks on current settings.\",\n )\n self.btn_validate.on_click(self._validate_config)\n self.cb_backup = widgets.Checkbox(description=\"Create backup\", value=False)\n self.cb_refresh = widgets.Checkbox(description=\"Refresh on save\", value=True)\n vbox = widgets.VBox(\n [\n self.txt_current_file,\n widgets.HBox(\n [self.btn_save, self.cb_refresh, self.cb_backup, self.btn_validate]\n ),\n self.mp_conf_file.viewer,\n ]\n )\n self.layout = widgets.VBox([self.tab_ctrl.layout, vbox])\n self._lbl_loading.layout.visibility = \"hidden\"", "def __init__(self):\n self._create_options()\n self._create_sections()", "def settings(self) -> Dict[str, Any]:\n return {}", "def __init__(self, settings_file_name):\n with open(settings_file_name, 'r') as f:\n # load config file\n self.settings = yaml.load(f)\n\n # get key values\n sit_names = self.settings[HNF.Consts.SIT_NAMES]\n row_action_names = self.settings[HNF.Consts.ROW_ACT_NAMES]\n column_action_names = self.settings[HNF.Consts.COL_ACT_NAMES]\n name = self.settings[HNF.Consts.NAME]\n\n # init HNG object\n self.HNFOut = HNF.HNFInstance(sit_names, row_action_names, column_action_names, name)\n\n # set the values found in the settings\n self.__initFromFile()\n\n # calc the summary and expected utility\n self.HNFOut.initSummaryBelief()\n self.HNFOut.initExpectedUtility()\n self.HNFOut.calcHypergameExpectedUtility()\n self.HNFOut.calcModelingOpponentUtility()", "def settings_init(self):\n config_console = configparser.ConfigParser()\n config_console.read(CONFIG_FILE_NAME)\n self.logmode = config_console[\"LOG\"][\"log_mode\"]", "def __init__(self, settings: ActorSettings) -> None:\n super().__init__()\n self.settings = settings\n\n self._critic = None", "def __init__(self, *args, **kwargs):\r\n super().__init__()\r\n self._cfg = ConfigDict() # current configuration\r\n self._default_config = ConfigDict() # default configuration\r\n self._temp_config = OrderedDict() # temporary configuration\r\n self._path = Path() # current configuration path\r\n self._default_path = Path() # default configuration path\r\n self._conversion_dict = None\r\n self._auto_cast = None\r\n self._write_flags = None\r\n self._force_load = None\r\n self._load_empty = None\r\n self._ask_path = None\r\n self._search_in_default_config = None\r\n self._init_count = 0\r\n self._policies = defaultdict(bool) # by default every modification is forbidden # WIP\r\n if args or kwargs:\r\n self.init(*args, **kwargs)\r\n logger.debug(\"Config object created.\")", "def __init__(self):\n self.__load_settings()\n self.__load_history()\n self.__clear_context()\n\n if self.DELETE_ALL_ON_STARTUP:\n sublime.set_timeout_async(lambda: self.delete_all_history(), 0)\n elif self.CLEANUP_ON_STARTUP:\n sublime.set_timeout_async(lambda: self.clean_history(False), 0)", "def __init__(self, configs):\n\n self.__configs = configs", "def __init__(self, os_creds, keypair_settings):\n super(self.__class__, self).__init__(os_creds)\n\n self.keypair_settings = keypair_settings\n self.__delete_keys_on_clean = True\n\n # Attributes instantiated on create()\n self.__keypair = None", "def test_settings_instantiation(self):\n ## no settings passed on instantiation\n bd = BorrowDirect() # no settings info\n self.assertEqual(\n True, isinstance(bd, BorrowDirect) )\n ## dict settings\n settings_dict = {} ## empty dct\n bd = BorrowDirect( settings_dict )\n self.assertEqual(\n None, bd.UNIVERSITY_CODE )\n settings_dict = { 'UNIVERSITY_CODE': '123' } ## populated dct\n bd = BorrowDirect( settings_dict )\n self.assertEqual(\n '123', bd.UNIVERSITY_CODE )\n ## module settings\n s = imp.new_module( 'settings' ) ## empty module\n bd = BorrowDirect( s )\n self.assertEqual(\n None, bd.UNIVERSITY_CODE )\n s = imp.new_module( 'settings' ) ## populated module\n s.UNIVERSITY_CODE = '234'\n bd = BorrowDirect( s )\n self.assertEqual(\n '234', bd.UNIVERSITY_CODE )", "def initialize(cls, settings):\n\n settings_obj = SettingsService.load_game_conf()\n\n for entry in SettingsService.GAME_SETTINGS:\n value = settings_obj.get(SettingsService.GAME_SETTINGS_ROOT, {}).get(entry, None)\n if value is None:\n raise RuntimeError(f\"Entry {entry} is missing in settings.\")\n\n settings[entry] = float(value)\n\n return settings", "def config_init(self):\n\n game_opts = [\n\n # Execution Options\n ('debug',False), # Toggle Debug Messaging\n ('log_path',False), # Turn on logging (w/path)\n ('log_lvl',logging.DEBUG), # Set log level\n\n # World Generation Options\n ('flex_limit',3) # Sets the maximum variance\n\n ]\n\n # Attempts to pull each value from the configuration\n # if not in config, the default value defined above\n # is set instead\n for opt in game_opts:\n try:\n setattr(self,opt[0],self.conf.conf_dict[opt[0]])\n except:\n setattr(self,opt[0],opt[1])\n continue", "def __init__(self, *args):\n this = _libsbml.new_L3ParserSettings(*args)\n try: self.this.append(this)\n except: self.this = this", "def __init__(self, *args, **kwds):\n if args or kwds:\n super(KltSettings, self).__init__(*args, **kwds)\n #message fields cannot be None, assign default values for those that are\n if self.max_features is None:\n self.max_features = 0\n if self.window_size is None:\n self.window_size = 0\n if self.quality is None:\n self.quality = 0.\n if self.min_distance is None:\n self.min_distance = 0.\n if self.harris is None:\n self.harris = 0.\n if self.size_block is None:\n self.size_block = 0\n if self.pyramid_lvl is None:\n self.pyramid_lvl = 0\n if self.mask_border is None:\n self.mask_border = 0\n else:\n self.max_features = 0\n self.window_size = 0\n self.quality = 0.\n self.min_distance = 0.\n self.harris = 0.\n self.size_block = 0\n self.pyramid_lvl = 0\n self.mask_border = 0", "def __init__(self):\n # Read configuration into dictionary\n self.directories = general.config_directories()\n self.config = general.read_yaml_files(self.directories)", "def __init__(self, text=None, settings=None, style='General', language='en'):\n\n self._text = None\n self._settings = None\n self._style = None\n self._language = None\n\n self.text = text\n self.settings = settings\n self.style = style\n self.language = language", "def __init__(self):\n self.__parameters: ConfigParams = ConfigParams()", "def settings(self, settings):\n\n self._settings = settings", "def __init__(\n self,\n *,\n file_name: str,\n klass: Type[TSettingsType],\n ) -> None:\n self._file_name = file_name\n self._klass = klass", "def __init__(self):\n # Screen settings\n self.screen_width = 1860\n self.screen_height = 1020\n self.screen_size = self.screen_width, self.screen_height\n self.bg_color = 230, 230, 230\n\n # Ship static settings\n self.ship_limit = 3\n\n # Bullet static settings\n self.bullet_limit = 3\n self.bullet_width = 3\n self.bullet_height = 15\n self.bullet_color = 60, 60, 60\n\n # Alien static settings\n self.fleet_drop_speed = 10\n\n self.speed_up_scale = 1.1\n self.initialize_dynamic_settings()", "def __init__(self):\n\n # open json config file that reads in information\n config_path = open(\"config.json\", \"r\")\n config_json = config_path.read()\n config_dict = json.loads(config_json)\n\n # assign object variables\n self.project_id = config_dict[\"project-id\"]\n self.bucket_name = config_dict[\"bucket-name\"]\n self.location_id = config_dict[\"key-location\"]\n self.key_ring_id = config_dict[\"key-ring-id\"]\n self.crypto_key_id = config_dict[\"crypto-key-id\"]\n self.service_account_email = config_dict[\"service-account-email\"]\n\n # close the file\n config_path.close()", "def __init__(self, window, settings_dir, n_trials=0):\r\n self.stimuli = OrderedDict()\r\n self.triggers = {}\r\n\r\n self.settings_dir = settings_dir\r\n self.num_trials = n_trials\r\n self.win = window", "def __init__(self, settings, **kwargs):\n super().__init__(settings=settings, **kwargs)\n self.vr_status = 'off' #off, ready, waiting or recording\n for v_attr in self.saveable_defaults.keys():\n setattr(self, v_attr, self.resolve_attr(v_attr, settings, self.saveable_defaults[v_attr]))\n self.vr_splitter_port = None\n self.vr_recordcount = 0\n self.vr_lastrecording = 0\n self.vr_activefile = '-'\n self.vr_protect = threading.Lock()\n self.vr_monthread = None\n self.procthread = None\n self.vr_web_trigger = None\n self.vr_trig_queue = queue.Queue()\n # and this to support web browser front end\n self.camhand.add_url_rule('/flip-trigger', view_func=self.flip_record_trigger, methods=('REQUEST',))\n self.camhand.add_url_rule('/flip-record', view_func=self.record_now, methods=('REQUEST',))", "def __init__(self, values = None):\n TCInit(self)\n if values is not None:\n self.set_opts(values)", "def __init__(self):\n default_config = Config()\n query = Query(default_config)\n database = Database(default_config)\n common_util = CommonUtil(default_config, database)\n self.config = default_config\n self.query = query\n self.database = database\n self.common_util = common_util", "def __init__(self, argv: list, company: str, appname: str, Liststr=None):\n if company is None:\n company = self.__class__.__name__\n QSettings.setPath(QSettings.IniFormat, QSettings.UserScope, str(FOLDER.parent / \"settings\"))\n self.settings = QSettings(QSettings.IniFormat, QSettings.UserScope, company, appname)\n super().__init__(argv)", "def __init__(self, generateConf=False):\n super(Settings, self).__init__()\n if generateConf:\n self.writeSettingsFile(DEFAULT_CONF, path=DEFAULT_SETTINGS_PATH)\n else:\n try:\n self.settingsFilePath = DEFAULT_SETTINGS_PATH\n except IOError:\n self.writeSettingsFile(\n DEFAULT_CONF,\n path=DEFAULT_SETTINGS_PATH\n )\n util.write(\n \"No configuration file found, a default one has been generated at %s.\" % (DEFAULT_SETTINGS_PATH,),\n priority=9\n )\n sys.exit(0)", "def initialize_from_config(self):", "def __init__(self, settings={}, prefix=\"\"):\n self.settings = settings\n self.mapper = routes.Mapper()\n self.setup_handlers(self.mapper)", "def init_from_settings(self, settings):\n\n # Initializes screen.\n self.screen = pygame.display.set_mode(settings.resolution)\n self.screen.set_clip(settings.game_info)\n self.screen.fill(settings.colors[5])\n \n self.screen.blit(settings.help_text1, (165, 250))\n self.screen.blit(settings.help_text2, (165, 270))\n self.screen.blit(settings.help_text3, (165, 290))\n \n pygame.draw.rect(self.screen, settings.colors[6], start_rect, 0)\n pygame.draw.rect(self.screen, settings.colors[6], reset_rect, 0)\n pygame.draw.rect(self.screen, settings.colors[6], settings.box_display, 2)\n \n self.screen.blit(settings.start_text, (171, 188))\n self.screen.blit(settings.reset_text, (251, 188))\n \n self.screen.set_clip(settings.game_area)\n \n pygame.display.set_caption(settings.title)\n pygame.mouse.set_visible(settings.mouse_enabled)\n\n # Initializes background.\n background = pygame.Surface(self.screen.get_size())\n self.background = background.convert()\n self.background.fill(settings.background)\n\n pygame.display.update()", "def _generate_settings(self):\n settings = {}\n settings[\"api_client_id\"] = input(\"(OPTIONAL) Please enter your Twitch API Client ID: \") #Get API Client ID first so I can use API to get user ID\n #Save JSON\n fileIO.save_json(\"settings.json\", settings)\n name = False\n while not name: #While name not set\n name = input(\"Please enter the username of your Twitch account: \").lower()\n userID = self._get_user_id(name)\n if not userID:\n name = False\n settings[\"userid\"] = userID\n settings[\"oauth\"] = input(\"Please enter the oauth token for your Twitch account: \")\n if settings[\"oauth\"].startswith(\"oauth:\"): #If the oauth token starts with oauth:, remove it\n settings[\"oauth\"] = settings[\"oauth\"][6:]\n settings[\"error_webhook\"] = input(\"Please enter the Discord WebHook URL you would like errors to be sent to: \")\n #Save JSON\n fileIO.save_json(\"settings.json\", settings)", "def build_mail_settings():\n mail_settings = MailSettings()\n mail_settings.bcc_settings = BCCSettings(True, Email(\"[email protected]\"))\n mail_settings.bypass_list_management = BypassListManagement(True)\n mail_settings.footer_settings = FooterSettings(True, \"Footer Text\",\n (\"<html><body>Footer \"\n \"Text</body></html>\"))\n mail_settings.sandbox_mode = SandBoxMode(True)\n mail_settings.spam_check = SpamCheck(True, 1,\n \"https://spamcatcher.sendgrid.com\")\n return mail_settings", "def load_settings(self):\n LogConfiguration.initialize(self._db)\n self.analytics = Analytics(self._db)\n self.auth = Authenticator(self._db, self.analytics)\n\n self.setup_external_search()\n\n # Track the Lane configuration for each library by mapping its\n # short name to the top-level lane.\n new_top_level_lanes = {}\n # Create a CirculationAPI for each library.\n new_circulation_apis = {}\n\n # Potentially load a CustomIndexView for each library\n new_custom_index_views = {}\n\n # Make sure there's a site-wide public/private key pair.\n self.sitewide_key_pair\n\n for library in self._db.query(Library):\n lanes = load_lanes(self._db, library)\n\n new_top_level_lanes[library.id] = lanes\n\n new_custom_index_views[library.id] = CustomIndexView.for_library(\n library\n )\n\n new_circulation_apis[library.id] = self.setup_circulation(\n library, self.analytics\n )\n self.top_level_lanes = new_top_level_lanes\n self.circulation_apis = new_circulation_apis\n self.custom_index_views = new_custom_index_views\n self.shared_collection_api = self.setup_shared_collection()\n\n # Assemble the list of patron web client domains from individual\n # library registration settings as well as a sitewide setting.\n patron_web_domains = set()\n admin_web_domains = set()\n\n def get_domain(url):\n url = url.strip()\n if url == \"*\":\n return url\n scheme, netloc, path, parameters, query, fragment = urllib.parse.urlparse(\n url)\n if scheme and netloc:\n return scheme + \"://\" + netloc\n else:\n return None\n\n sitewide_patron_web_client_urls = ConfigurationSetting.sitewide(\n self._db, Configuration.PATRON_WEB_HOSTNAMES).value\n if sitewide_patron_web_client_urls:\n for url in sitewide_patron_web_client_urls.split('|'):\n domain = get_domain(url)\n if domain:\n patron_web_domains.add(domain)\n\n sitewide_admin_web_client_urls = ConfigurationSetting.sitewide(\n self._db, Configuration.ADMIN_WEB_HOSTNAMES).value\n if sitewide_admin_web_client_urls:\n for url in sitewide_admin_web_client_urls.split('|'):\n domain = get_domain(url)\n if domain:\n admin_web_domains.add(domain)\n\n from .registry import Registration\n for setting in self._db.query(\n ConfigurationSetting).filter(\n ConfigurationSetting.key == Registration.LIBRARY_REGISTRATION_WEB_CLIENT):\n if setting.value:\n patron_web_domains.add(get_domain(setting.value))\n\n self.patron_web_domains = patron_web_domains\n self.admin_web_domains = admin_web_domains\n self.setup_configuration_dependent_controllers()\n authentication_document_cache_time = int(\n ConfigurationSetting.sitewide(\n self._db, Configuration.AUTHENTICATION_DOCUMENT_CACHE_TIME\n ).value_or_default(0)\n )\n self.authentication_for_opds_documents = ExpiringDict(\n max_len=1000, max_age_seconds=authentication_document_cache_time\n )\n self.wsgi_debug = ConfigurationSetting.sitewide(\n self._db, Configuration.WSGI_DEBUG_KEY\n ).bool_value or False", "def __init__(\n self,\n settings=None,\n scenario=None,\n living_expenses_strategy=None,\n saving_strategy=None,\n withdrawal_strategy=None,\n tax_treatment=None\n ):\n # Set up instance:\n super().__init__()\n self.default_values = copy(DEFAULTVALUES)\n self.default_types = copy(DEFAULTTYPES)\n self.default_builders = copy(DEFAULTBUILDERS)\n # Store args as attributes:\n # For `settings` specifically, use the default values provided\n # by the class if none are provided explicitly.\n if settings is None:\n self.settings = Settings()\n else:\n self.settings = settings\n # For the rest, None is allowed:\n self.scenario = scenario\n self.living_expenses_strategy = living_expenses_strategy\n self.saving_strategy = saving_strategy\n self.withdrawal_strategy = withdrawal_strategy\n self.tax_treatment = tax_treatment\n # Some params aren't used to build Forecast and so are not\n # received as input to __init__. Create attrs for them here:\n self.allocation_strategy = None", "def __init__(self, *kwargs):\n self.session = requests.Session()\n self.config_path = os.path.join(\n os.path.dirname(__file__), 'config.json')\n self.load_config()\n if self.application_token == '':\n self.set_application_token()\n self.token = self.get_token()\n self.get_settings()", "def __init__(self):\n self.collectorName = COLLECTOR_NAME\n self.configCycleInterval = 20 # minutes\n self.cycleInterval = 5 * 60 # seconds\n\n # The configurationService attribute is the fully qualified class-name\n # of our configuration service that runs within ZenHub\n self.configurationService = 'NullConfig'\n\n # Will be filled in based on buildOptions\n self.options = None\n\n self.configCycleInterval = 20*60", "def _post_initialisations(self):\n # Init the settings module.\n self.dummy_for_settings = SectionConfig(self.app.id, self.__class__.__name__)\n global settings\n settings = self.dummy_for_settings\n\n self.dummy_for_options = OptionConfig(self.app.id)\n global options\n options = self.dummy_for_options\n\n # Bind message boxes.\n self.MessageBox = MessageBox(self)\n self.msg = self.MessageBox.Message\n self.are_you_sure = self.MessageBox.are_you_sure\n\n # Set previous size and state.\n width = settings.get('width', 350)\n height = settings.get('height', 350)\n self.set_title(self.app.localizedname)\n self.resize(width, height)\n if settings.get_bool('maximized', False):\n self.maximize()\n # Load any other settings here.\n self.load_xinput_devices()", "def __init__(self, config):\n\n self.mode9 = config[sC.PROJECT_DETAILS][sC.MODE] == '9'\n self.admins = eval(handler.config[sC.PROJECT_DETAILS][sC.ADMIN_IDS])\n self.approvers = eval(handler.config[sC.COUNTER_STRIKE_ADMINS][sC.APPROVERS])", "def __init__(self, configfile='settings.cfg'):\n \n self.configfile = configfile\n \n # Load parameters from config file\n config = ConfigParser.RawConfigParser()\n config.read(self.configfile)\n \n # Set parameters to default if not in config file \n self.title=config.get('Settings','title') if config.has_option(\n 'Settings','title') else 'REDPy Catalog'\n self.filename=config.get('Settings','filename') if config.has_option(\n 'Settings','filename') else 'redpytable.h5'\n self.groupName=config.get('Settings','groupName') if config.has_option(\n 'Settings','groupName') else 'default'\n self.groupDesc=config.get('Settings','groupDesc') if config.has_option(\n 'Settings','groupDesc') else 'Default Test Run'\n self.nsta=config.getint('Settings','nsta') if config.has_option(\n 'Settings','nsta') else 8 \n self.station=config.get('Settings','station') if config.has_option(\n 'Settings','station') else 'SEP,YEL,HSR,SHW,EDM,STD,JUN,SOS'\n self.channel=config.get('Settings','channel') if config.has_option(\n 'Settings','channel') else 'EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ,EHZ'\n self.network=config.get('Settings','network') if config.has_option(\n 'Settings','network') else 'UW,UW,UW,UW,UW,UW,UW,UW'\n self.location=config.get('Settings','location') if config.has_option(\n 'Settings','location') else '--,--,--,--,--,--,--,--'\n self.samprate=config.getfloat('Settings','samprate') if config.has_option(\n 'Settings','samprate') else 100.\n self.nstaC=config.getint('Settings','nstaC') if config.has_option(\n 'Settings','nstaC') else 5\n self.printsta=config.getint('Settings','printsta') if config.has_option(\n 'Settings','printsta') else 2\n self.server=config.get('Settings','server') if config.has_option(\n 'Settings','server') else 'IRIS'\n self.port=config.getint('Settings','port') if config.has_option(\n 'Settings','port') else 16017\n self.nsec=config.getint('Settings','nsec') if config.has_option(\n 'Settings','nsec') else 3600\n self.lwin=config.getfloat('Settings','lwin') if config.has_option(\n 'Settings','lwin') else 7.\n self.swin=config.getfloat('Settings','swin') if config.has_option(\n 'Settings','swin') else 0.8\n self.trigon=config.getfloat('Settings','trigon') if config.has_option(\n 'Settings','trigon') else 3.\n self.trigoff=config.getfloat('Settings','trigoff') if config.has_option(\n 'Settings','trigoff') else 2.\n self.kurtmax=config.getfloat('Settings','kurtmax') if config.has_option(\n 'Settings','kurtmax') else 80.\n self.kurtfmax=config.getfloat('Settings','kurtfmax') if config.has_option(\n 'Settings','kurtfmax') else 150.\n self.oratiomax=config.getfloat('Settings','oratiomax') if config.has_option(\n 'Settings','oratiomax') else 0.06\n self.kurtwin=config.getfloat('Settings','kurtwin') if config.has_option(\n 'Settings','kurtwin') else 5.\n self.winlen=config.getint('Settings','winlen') if config.has_option(\n 'Settings','winlen') else 1024\n self.fmin=config.getfloat('Settings','fmin') if config.has_option(\n 'Settings','fmin') else 1.\n self.fmax=config.getfloat('Settings','fmax') if config.has_option(\n 'Settings','fmax') else 10.\n self.filomin=config.getfloat('Settings','filomin') if config.has_option(\n 'Settings','filomin') else 1.\n self.filomax=config.getfloat('Settings','filomax') if config.has_option(\n 'Settings','filomax') else 2.5\n self.fiupmin=config.getfloat('Settings','fiupmin') if config.has_option(\n 'Settings','fiupmin') else 5.\n self.fiupmax=config.getfloat('Settings','fiupmax') if config.has_option(\n 'Settings','fiupmax') else 10.\n self.telefi=config.getfloat('Settings','telefi') if config.has_option(\n 'Settings','telefi') else -1.\n self.teleok=config.getint('Settings','teleok') if config.has_option(\n 'Settings','teleok') else 1 \n self.cmin=config.getfloat('Settings','cmin') if config.has_option(\n 'Settings','cmin') else 0.7\n self.ncor=config.getint('Settings','ncor') if config.has_option(\n 'Settings','ncor') else 4\n self.minorph=config.getfloat('Settings','minorph') if config.has_option(\n 'Settings','minorph') else 0.05\n self.maxorph=config.getfloat('Settings','maxorph') if config.has_option(\n 'Settings','maxorph') else 7.\n self.minplot=config.getint('Settings','minplot') if config.has_option(\n 'Settings','minplot') else 3\n self.dybin=config.getfloat('Settings','dybin') if config.has_option(\n 'Settings','dybin') else 1.\n self.hrbin=config.getfloat('Settings','hrbin') if config.has_option(\n 'Settings','hrbin') else 1.\n self.recplot=config.getfloat('Settings','recplot') if config.has_option(\n 'Settings','recplot') else 14.\n \n # Derived Settings\n self.ptrig=1.5*self.winlen/self.samprate\n self.atrig=3*self.winlen/self.samprate\n self.mintrig=self.winlen/self.samprate\n self.wshape = int((self.ptrig + self.atrig)*self.samprate) + 1", "def init_game_setting(self):\r\n pass", "def config(settings):\n\n #T = current.T\n\n # PrePopulate data\n settings.base.prepopulate += (\"SHARE/LK\",)\n settings.base.prepopulate_demo += (\"SHARE/Demo\",)\n\n # Finance settings\n settings.fin.currencies = {\n #\"EUR\" : \"Euros\",\n #\"GBP\" : \"Great British Pounds\",\n \"LKR\" : \"Sri Lanka Rupees\",\n \"USD\" : \"United States Dollars\",\n }\n settings.fin.currency_default = \"USD\"", "def init(self):\n\n if self.has_settings:\n print(\n TERM.bold_red('Error:'),\n 'Settings file already exists. Doing nothing.'\n )\n return\n\n new_settings = {\n 'strategy': self.ns.strategy,\n 'branch': self.ns.branch,\n 'scoring': self.ns.scoring,\n }\n\n with open(self.settings, 'w') as f:\n f.write(yaml.dump(new_settings, default_flow_style=False))\n\n print(\n TERM.bold_green('Yay!'),\n 'Wrote settings file {0}'.format(self.settings)\n )", "def __init__(self, config_file=None):\n\t\tself.options = {}\n\n\t\tif config_file:\n\t\t\tself.set_file(config_file)", "def get_settings():\n return SettingCollection.build()", "def default_settings(self, settings):\n return {}", "def __init__(self, setting):\n self.setting.update(setting)\n self.base = (self.setting.get('base') + '/').lower()\n self.client = Box(self.setting.get('access_token'))\n self.client.users_get_current_account()", "def __init__(self):\n self._init_key_settings()\n self._init_misc_extensions()\n self.minVersion = (3, 1)\n self.maxVersion = (3, 4)\n self.versions = [(3, 4), (3, 3), (3, 2), (3, 1)]\n self.cipherNames = list(CIPHER_NAMES)\n self.macNames = list(MAC_NAMES)\n self.keyExchangeNames = list(KEY_EXCHANGE_NAMES)\n self.cipherImplementations = list(CIPHER_IMPLEMENTATIONS)", "def _initialise(self, **settings):\n if settings:\n self.settings = self.clean_settings(settings)\n else:\n # same as self._fill_from_preset('commoner')\n self._initialise(name=\"Commoner\", alignment=\"neutral\",\n ac=10, hp=4,\n attack_parameters=[['club', 2, 0, 4]])\n print(\"EMPTY CREATURE GIVEN. SETTING TO COMMONER\")\n return 0\n\n # Mod of preexisting\n if 'base' in self.settings:\n # Sanify first and make victim\n if type(self.settings['base']) is str:\n # generate a preset and get its attributes. Seems a bit wasteful.\n victim = self.__class__(self.settings['base'])\n elif isinstance(self.settings['base'], self.__class__):\n victim = self.settings['base']\n else:\n raise TypeError\n # copy all\n # victim.ability_bonuses #in case the user provided with ability scores,\n # which are overridden by adbility bonues\n base = {x: getattr(victim, x) for x in dir(victim) if\n getattr(victim, x) and x.find(\"__\") == -1 and x.find(\"_\") != 0 and x != 'beastiary'}\n base['ability_bonuses'] = {}\n # base.update(**self.settings)\n for (k, v) in self.settings.items():\n if type(v) is dict:\n base[k].update(v)\n else:\n base[k] = v\n self.settings = base\n\n # Name etc.\n # subscript assigns it or self.settings if it has a value\n self['name'] = 'nameless'\n self['level'] = 0\n self['xp'] = 0\n # proficiency. Will be overridden if hp is provided?\n self['proficiency'] = 1 + round(self.level / 4) # TODO check maths on PH\n\n # set abilities\n self.set_ability_dice()\n\n # Get HD\n self.hit_die = None\n if 'hd' in self.settings.keys():\n if type(self.settings['hd']) is Dice:\n self.hit_die = self.settings['hd'] # we're dealing with a copy of a beastiary obj.\n else:\n self.hit_die = Dice(num_faces=int(self.settings['hd']),\n bonus=self.con.bonus,\n avg=True,\n role=\"hd\")\n elif 'size' in self.settings.keys():\n size_cat = {\"small\": 6, \"medium\": 8, \"large\": 10, \"huge\": 12}\n if self.settings['size'] in size_cat.keys():\n self.hit_die = Dice(bonus=self.con.bonus,\n num_faces=size_cat[self.settings['size']],\n avg=True,\n role=\"hd\")\n elif 'hp' in self.settings and 'level' in self.settings:\n # Guess based on hp and level. It is not that dodgy really as the manual does not use odd dice.\n # hp =approx. 0.5 HD * (level-1) + HD + con * level\n # HD * (0.5* (level-1)+1) = hp - con*level\n # HD = (hp - con*level)/(level+1)\n bestchoice = (int(self.settings['hp']) - self.con.bonus * int(self.settings['level'])) / (\n (int(self.settings['level']) + 1))\n print(int(self.settings['hp']), int(self.ability_bonuses['con']), int(self.settings['level']))\n print(\"choice HD...\", bestchoice)\n # print(\"diagnosis...\",self.ability_bonuses)\n self.log.warning('Unfinished case to guess HD. so Defaulting hit dice to d8 instead') # TODO finish\n self.hit_die = Dice(bonus=self.con.bonus, num_faces=8, avg=True, role=\"hd\")\n else:\n # defaulting to d8\n self.log.warning('Insufficient info: defaulting hit dice to d8')\n self.hit_die = Dice(bonus=self.con.bonus, num_faces=8, avg=True, role=\"hd\")\n\n # Get HP\n if 'hp' in self.settings.keys():\n self.hp = int(self.settings['hp'])\n self.starting_hp = self.hp\n elif self.settings['level']:\n self.set_level()\n else:\n raise Exception('Cannot make character without hp or hd + level provided')\n\n # AC\n if not 'ac' in self.settings.keys():\n self.settings['ac'] = 10 + self.dex.bonus\n self.ac = int(self.settings['ac'])\n\n # init\n if not 'initiative_bonus' in self.settings:\n self.settings['initiative_bonus'] = self.dex.bonus\n self.initiative = Dice(bonus=int(self.settings['initiative_bonus']),\n num_faces=20,\n role=\"initiative\")\n\n ##spell casting ability_bonuses\n if 'sc_ability' in self.settings:\n self.spellcasting_ability_name = self.settings['sc_ability'].lower()\n elif 'healing_spells' in self.settings or 'buff_spells' in self.settings:\n self.spellcasting_ability_name = max('wis', 'int', 'cha',\n key=lambda ab: self[ab].bonus) # Going for highest. seriously?!\n print(\n \"Please specify spellcasting ability of \" +\n self.name +\n \" next time, this time \" +\n self.spellcasting_ability_name +\n \" was used as it was biggest.\")\n else:\n self.spellcasting_ability_name = 'con' # TODO fix this botch up.\n if not 'healing_bonus' in self.settings:\n self.settings['healing_bonus'] = self[self.spellcasting_ability_name].bonus\n if 'healing_spells' in self.settings:\n self.starting_healing_spells = int(self.settings['healing_spells'])\n self.healing_spells = self.starting_healing_spells\n if not 'healing_dice' in self.settings:\n self.settings['healing_dice'] = 4 # healing word.\n self.healing = Dice(bonus=int(self.settings['healing_bonus']),\n num_faces=int(self.settings['healing_dice']),\n role=\"healing\") ##Healing dice can't crit or have adv.\n else:\n self.starting_healing_spells = 0\n self.healing_spells = 0\n # not a healer\n\n # attacks\n self.attacks = []\n self.hurtful = 0\n if not 'attack_parameters' in self.settings:\n # Benefit of doubt. Given 'em a dagger .\n self.settings['attack_parameters'] = 'dagger'\n if type(self.settings['attack_parameters']) is str:\n try:\n x = json.loads(self.settings['attack_parameters'].replace(\"*\", \"\\\"\"))\n self._attack_parse(x)\n self.attack_parameters = x\n except:\n # These have to be readable by _attack_parse\n weapons = {'club': 4, 'greatclub': 8,\n 'dagger': 4, 'shortsword': 6, 'longsword': 8, 'bastardsword': 10, 'greatsword': 12,\n 'rapier': 8, 'scimitar': 6, 'sickle': 4,\n 'handaxe': 6, 'battleaxe': 8, 'waraxe': 10, 'greataxe': 12,\n 'javelin': 6, 'spear': 6, 'flail': 8, 'glaive': 10, 'halberd': 10, 'lance': 12, 'pike': 10,\n 'trident': 6,\n 'war pick': 8,\n 'lighthammer': 4, 'mace': 6, 'warhammer': 8,\n 'quaterstaff': 6, 'morningstar': 8, 'punch': 1,\n 'whip': 4} # parsing of strings for dice not implemented yet, so punch is d1 for now.\n # TODO weapons removed as they gave trouble:\n # 'maul':[6,6],\n # 'brütal war pick': [8, 8], # okay, I could not resist it.\n\n # bastard sword and war axe are no more due to the versatile rule,\n # however they were kept here to keep it simple\n # ranged weapons are missing for now...\n for w in weapons.keys():\n if self.settings['attack_parameters'].lower().find(w) > -1:\n # TODO fix the fact that a it gives the finesse option to all.\n if self.dex.bonus > self.str.bonus:\n chosen_ab = 'dex'\n else:\n chosen_ab = 'str'\n self.attack_parameters = [[w, self.proficiency + chosen_ab, chosen_ab, weapons[w]]]\n self._attack_parse(self.attack_parameters)\n self.log += \"Weapon matched by str to {w}\\n\"\n break\n else:\n raise Exception(\"Cannot figure out what is: \" + self.settings['attack_parameters'] + str(\n type(self.settings['attack_parameters'])))\n elif type(self.settings['attack_parameters']) is list:\n self.attack_parameters = self.settings['attack_parameters']\n self._attack_parse(self.attack_parameters)\n else:\n raise Exception('Could not determine weapon')\n ##Weird bit needing upgrade.\n if 'alt_attack' in self.settings and type(self.settings['alt_attack']) is list:\n self.alt_attack = {'name': self.settings['alt_attack'][0],\n 'attack': Dice(bonus=self.settings['alt_attack'][1],\n num_faces=20)} # CURRENTLY ONLY NETTING IS OPTION!\n else:\n self.alt_attack = {'name': None, 'attack': None}\n # last but not least\n if 'alignment' not in self.settings:\n self.settings['alignment'] = \"unassigned mercenaries\" # hahaha!\n self.alignment = self.settings['alignment']\n # internal stuff\n self.tally = {'damage': 0, 'hits': 0, 'dead': 0, 'misses': 0, 'battles': 0, 'rounds': 0, 'hp': 0,\n 'healing_spells': 0}\n self.copy_index = 1\n self.condition = 'normal'\n\n self.dodge = 0\n self.concentrating = 0\n self.temp = 0\n\n self.buff_spells = None\n if 'buff_spells' in self.settings:\n self.buff_spells = int(self.settings['buff_spells'])\n self.conc_fx = getattr(self, self.settings['buff'])\n else:\n self.buff_spells = 0\n\n if 'cr' in self.settings:\n self.cr = self.settings['cr']\n elif 'level' in self.settings:\n # TODO check maths on MM.\n if int(self.settings['level']) > 1:\n self.cr = int(self.settings['level']) - 1\n else:\n self.cr = 0.5\n else:\n self.cr = None # vermin\n\n ##backdoor and overider\n self['custom'] = []\n for other in self.custom:\n if other == \"conc_fx\":\n getattr(self, self.settings['conc_fx'])\n else:\n self[other] = self.settings[other] # force it to be set.\n\n self.arena = None\n self.settings = {}", "def __init__(self):\n\n if Config._instance:\n raise Exception('Config singleton is already instantiated. User Config.get_instance() obtain it.')\n\n parser = configparser.ConfigParser()\n parser.read('C:\\\\Users\\\\Akatosh\\\\PythonProjects\\\\note-it\\\\config\\\\config.ini')\n\n self.sections = {}\n\n for section in parser:\n self.sections[section] = _Section(parser[section])\n\n Config._instance = self", "def setup_settings():\n # pylint: disable=import-outside-toplevel\n from django.conf import settings\n import tiny_erp.settings as defaults\n\n for name in dir(defaults):\n if name.isupper() and not hasattr(settings, name):\n setattr(settings, name, getattr(defaults, name))", "def load_settings(self):\n # Set the default settings. In case in a later version of this script the settings change, new default variables will be added automatically\n self.settings = {\n # Connection settings to OBS Studio websockets plugin\n \"host\": \"localhost\",\n \"port\": 4444,\n \"password\": \"\",\n \"update_frequency\": 1, # seconds, how often the script loads the SC2 UI location\n }\n if os.path.isfile(self.settings_path):\n with open(self.settings_path) as f:\n self.settings.update(json.load(f))", "def __init__(self):\n # Try to get the Bulma settings. The user may not have created this dict.\n try:\n self.bulma_settings = settings.BULMA_SETTINGS\n except AttributeError:\n self.bulma_settings = {}\n\n self.bulma_submodule_path = simple_bulma_path / \"bulma\" / \"sass\"\n self.custom_scss = self.bulma_settings.get(\"custom_scss\", [])\n self.variables = self.bulma_settings.get(\"variables\", {})\n self.output_style = self.bulma_settings.get(\"output_style\", \"nested\")\n self.storage = FileSystemStorage(simple_bulma_path)\n\n # Make a list of all the finders except this one.\n # We use this in the custom SCSS handler.\n other_finders = settings.STATICFILES_FINDERS.copy()\n other_finders.remove(\"django_simple_bulma.finders.SimpleBulmaFinder\")\n self.other_finders = [get_finder(finder) for finder in other_finders]", "def initialize_bot_settings(settings: Config) -> Config:\n\n settings.define_section(\"ctxreminders\", ContextualRemindersSection)\n\n settings.ctxreminders.configure_setting(\n \"persistence_dir\",\n \"In which folder do you want to store the reminders file?\",\n default=settings.core.homedir)\n\n settings.ctxreminders.configure_setting(\n \"context_capture_min_duration\",\n \"What is the minimum duration (in seconds) for reminders to save contextual chat logs? \" \\\n \"Default 30 days (2592000)\",\n default=2592000)\n\n settings.ctxreminders.configure_setting(\n \"context_capture_max_duration\",\n \"What is the minimum duration (in seconds) for reminders to save contextual chat logs? \" \\\n \"Default no limit (inf)\",\n default=math.inf)\n\n settings.ctxreminders.configure_setting(\n \"context_capture_chat_lines\",\n \"How many lines of chat to save with reminders that have context? Default 20\",\n default=20)\n\n settings.ctxreminders.configure_setting(\n \"pastebin_url\",\n \"What is the pastebin url (PrivateBin)? Default ''\",\n default=20)\n\n settings.ctxreminders.configure_setting(\n \"pastebin_expiration\",\n \"Expiration of pastebin pastes? Default 5min\",\n default=\"5min\")\n\n return settings", "def __init__(self, config):\n self._config = config\n self.logging = logging.getLogger(\"Settings\")\n self.logging.propagate = False\n level = logging.INFO\n if \"DEBUG\" in os.environ and (\n os.environ[\"DEBUG\"]\n or os.environ[\"DEBUG\"].lower() in (\"true\", \"t\", \"yes\", \"y\")\n ):\n level = logging.DEBUG\n self.logging.setLevel(level)\n handler = logging.StreamHandler()\n handler.setLevel(level)\n handler.setFormatter(logging.Formatter(\"%(asctime)s [Settings] %(message)s\"))\n self.logging.addHandler(handler)\n self.logging.debug(\"Running in debug mode.\")", "def __init__(self, filename=None, use_argv=True):\n self._init_filename = filename\n if use_argv:\n self.options, self.args = [self.get_parser().parse_args()] * 2\n else:\n self.options = self.args = None\n self._wrapped = self.load(file=self.settings_file)\n # build a special dynamic section for things the user wants,\n # ie, things that have been passed into the option\n # parser but are not useful in the .ini\n if not self.get_section('user'):\n self['user'] = {}\n if self.options is not None:\n self['user']['shell'] = self.options.shell and 'true' or ''\n else:\n self['user']['shell'] = ''" ]
[ "0.81398326", "0.8127764", "0.81223667", "0.8024526", "0.75850004", "0.7424904", "0.7297364", "0.727927", "0.7227605", "0.7210535", "0.7177579", "0.7173619", "0.71518964", "0.7086663", "0.695417", "0.6928098", "0.69154674", "0.6878284", "0.6834534", "0.6787074", "0.67606455", "0.6758912", "0.6748137", "0.6731232", "0.6717663", "0.6697846", "0.66729206", "0.66554177", "0.6643146", "0.6621014", "0.6619936", "0.659823", "0.6592204", "0.65862024", "0.65728766", "0.6546619", "0.6544933", "0.6533024", "0.6487919", "0.6480176", "0.6473921", "0.64459866", "0.64372915", "0.6423492", "0.6413758", "0.6411492", "0.63983417", "0.6369448", "0.63606113", "0.63606083", "0.63580656", "0.63486755", "0.63437945", "0.63424826", "0.63396674", "0.63156", "0.62853354", "0.62812006", "0.62799835", "0.6275392", "0.62692064", "0.6266638", "0.6265068", "0.62566024", "0.624254", "0.62355435", "0.6233606", "0.62323874", "0.62239194", "0.62191945", "0.62123764", "0.6195576", "0.61916375", "0.6187056", "0.6178486", "0.61685586", "0.6168261", "0.61567235", "0.61534995", "0.6152272", "0.6152243", "0.61508256", "0.6149036", "0.61482686", "0.6145459", "0.6136797", "0.61335075", "0.6128961", "0.61260414", "0.6113223", "0.6113038", "0.61114275", "0.6086795", "0.6084148", "0.60823786", "0.60778606", "0.6073026", "0.60699093", "0.605438", "0.6049942" ]
0.6513789
38
Transform x elementwise through an affine function y = exp(s)x + t where s = st[...,0] and t = st[...,1] with s.shape == x.shape == t.shape The Jacobian for this transformation is the coordinatewise product of the scaling factors J = prod(es[...,i],i)
def element_wise_affine(x, st, compute_jacobian=True): es = torch.exp(st[..., 0]) t = st[..., 1] logj = None if compute_jacobian: logj = torch.sum(torch.log(es), dim=-1) return es * x + t, logj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def inverse_element_wise_affine(x, st, compute_jacobian=True):\n es = torch.exp(-st[..., 0])\n t = st[..., 1]\n logj = None\n if compute_jacobian:\n logj = torch.sum(torch.log(es), dim=-1)\n\n return es * (x - t), logj", "def affine(params, x):\n return np.dot(params['w'], x) + params['b']", "def transform(fn):\n def _(vec, dt):\n return np.einsum(\n 'ji,i,ki,k...->j...',\n evecs, fn(evals, dt), evecs, vec, optimize=True)\n\n return _", "def affineTransform(x,output_dim):\n w=tf.get_variable(\"w\", [x.get_shape()[1], output_dim])\n b=tf.get_variable(\"b\", [output_dim], initializer=tf.constant_initializer(0.0))\n return tf.matmul(x,w)+b", "def affine_transform(trans_mat, p0):\r\n n_data, n_dim = np.shape(p0)\r\n p0 = np.hstack((p0, np.ones((n_data, 1))))\r\n #return np.transpose(np.dot(np.transpose(trans_mat), np.transpose(p0)))\r\n return np.dot(p0, trans_mat)", "def affine_transform(x, output_dim, name=None):\n\n w = tf.get_variable(name + \"_w\", [x.get_shape()[1], output_dim], initializer=tf.truncated_normal_initializer(stddev=0.02))\n b = tf.get_variable(name + \"_b\", [output_dim], initializer=tf.constant_initializer(0.0))\n\n return tf.matmul(x, w) + b", "def affine_transform(geom, matrix):\n if geom.is_empty:\n return geom\n if len(matrix) == 6:\n ndim = 2\n a, b, d, e, xoff, yoff = matrix\n if geom.has_z:\n ndim = 3\n i = 1.0\n c = f = g = h = zoff = 0.0\n matrix = a, b, c, d, e, f, g, h, i, xoff, yoff, zoff\n elif len(matrix) == 12:\n ndim = 3\n a, b, c, d, e, f, g, h, i, xoff, yoff, zoff = matrix\n if not geom.has_z:\n ndim = 2\n matrix = a, b, d, e, xoff, yoff\n else:\n raise ValueError(\"'matrix' expects either 6 or 12 coefficients\")\n\n def affine_pts(pts):\n \"\"\"Internal function to yield affine transform of coordinate tuples\"\"\"\n if ndim == 2:\n for x, y in pts:\n xp = a * x + b * y + xoff\n yp = d * x + e * y + yoff\n yield (xp, yp)\n elif ndim == 3:\n for x, y, z in pts:\n xp = a * x + b * y + c * z + xoff\n yp = d * x + e * y + f * z + yoff\n zp = g * x + h * y + i * z + zoff\n yield (xp, yp, zp)\n\n # Process coordinates from each supported geometry type\n if geom.type in ('Point', 'LineString', 'LinearRing'):\n return type(geom)(list(affine_pts(geom.coords)))\n elif geom.type == 'Polygon':\n ring = geom.exterior\n shell = type(ring)(list(affine_pts(ring.coords)))\n holes = list(geom.interiors)\n for pos, ring in enumerate(holes):\n holes[pos] = type(ring)(list(affine_pts(ring.coords)))\n return type(geom)(shell, holes)\n elif geom.type.startswith('Multi') or geom.type == 'GeometryCollection':\n # Recursive call\n # TODO: fix GeometryCollection constructor\n return type(geom)([affine_transform(part, matrix)\n for part in geom.geoms])\n else:\n raise ValueError('Type %r not recognized' % geom.type)", "def affine_mult(affine, coordinates):\n return np.dot(coordinates, affine[:3, :3].T) + affine[:3, -1]", "def apply_affine_transform(x, M):\n is1d = len(x.shape) == 1\n if is1d:\n x = np.expand_dims(x, axis=0)\n\n x_hom = np.concatenate(\n [x, np.ones((x.shape[0], 1), dtype=x.dtype)], axis=-1\n )\n x_out = x_hom @ M.T\n if is1d:\n x_out = np.squeeze(x_out, axis=0)\n return x_out", "def transformAffine(self, coords):\n coordsshape = coords.shape\n dims = coordsshape[0] + 1\n coords = coords.reshape((len(coords), -1))\n coords = np.concatenate((coords, np.ones((1, len(coords[0])))), 0)\n affine = np.eye(dims)\n # now transform first to center:\n meanvec = np.mean(coords, 1)\n center = np.eye(dims)\n center[:-1, -1] = -meanvec[:-1]\n affine = np.matmul(center, affine)\n\n if np.sum(self.shift):\n affine[:-1, -1] += (self.deformrandomstate.rand(dims - 1) - 0.5) * np.float32(self.shift)\n if np.max(self.scaling) > 1:\n scales = np.ones(dims)\n # scales[:-1] = (self.deformrandomstate.rand(dims-1)-0.5)*(self.scaling-1.0/self.scaling)+(self.scaling+1/self.scaling)/2\n scales[:-1] = self.scaling ** (self.deformrandomstate.rand(dims - 1) * 2 - 1)\n scales = np.diag(scales)\n # print(scales)\n affine = np.matmul(scales, affine)\n if np.sum(self.rotation):\n affine = self._rotate(affine)\n # move back to location:\n center[:-1, -1] = -center[:-1, -1]\n affine = np.matmul(center, affine)\n # now appyl to coords:\n coords = np.matmul(affine, coords)\n coords = coords[:-1]\n coords = coords.reshape(coordsshape)\n return coords", "def transAffine2D( iScale=(1, 1), iTrans=(0, 0), iRot=0, iShear=(0, 0) ): \n iRot = iRot * np.pi / 180\n oMatScale = np.matrix( ((iScale[0],0,0),(0,iScale[1],0),(0,0,1)) )\n oMatTrans = np.matrix( ((1,0,iTrans[0]),(0,1,iTrans[1]),(0,0,1)) )\n oMatRot = np.matrix( ((np.cos(iRot),-np.sin(iRot),0),\\\n (np.sin(iRot),np.cos(iRot),0),(0,0,1)) )\n oMatShear = np.matrix( ((1,iShear[0],0),(iShear[1],1,0),(0,0,1)) )\n # ustvari izhodno matriko\n oMat2D = oMatTrans * oMatShear * oMatRot * oMatScale\n return oMat2D", "def AffineTransform( from_pts, to_pts ):\n \n # check that there are match points\n if len(from_pts) != len(to_pts) or len(to_pts)<1:\n print \"from_pts and to_pts must be of same size.\"\n return False\n\n # check the dimensions\n dim = len(from_pts[0]) # num of dimensions\n if len(from_pts) < dim:\n print \"Too few points => under-determined system.\"\n return False\n elif len(from_pts) > dim + 1:\n print \"Too many points => over-determined system.\"\n return False\n\n \n #segregate the x and y coordinages\n from_pts_x, from_pts_y = zip(*from_pts)\n to_pts_x, to_pts_y = zip(*to_pts)\n \n #create the Matricies for processing\n I = np.matrix([from_pts_x, from_pts_y, [1,1,1]])\n P = np.matrix([to_pts_x, to_pts_y])\n \n #Calculate the 2D affine transform matrix (A)\n A = P * linalg.pinv(I) \n\n # Make a result object\n class Transformation:\n \"\"\"Result object that represents the transformation\n from affine fitter.\"\"\"\n\n def To_Str(self):\n res = \"\"\n for j in range(dim):\n str1 = \"x%d' = \" % j\n for i in range(dim):\n str1 +=\"x%d * %f + \" % (i, A[i][j+dim+1])\n str1 += \"%f\" % A[dim][j+dim+1]\n res += str1 + \"\\n\"\n return res\n\n def Transform(self, pt_x, pt_y):\n pt_vector = np.matrix([[pt_x], [pt_y], [1]])\n transformed_pt = A * pt_vector\n return map(itemgetter(0), transformed_pt.tolist())\n return Transformation()", "def estimate_stage_affine(t0, t1):\n src = np.array([t.tforms[0].translation for t in t0])\n dst = np.array([t.tforms[1].translation for t in t1])\n aff = renderapi.transform.AffineModel()\n aff.estimate(src, dst)\n return aff", "def affine_forward(x, W, b):\r\n x2d = np.reshape(x, (x.shape[0], -1)) # convert 4D input matrix to 2D \r\n out = np.dot(x2d, W) + b # linear transformation\r\n cache = (x, W, b) # keep for backward step (stay with us)\r\n return out, cache", "def calc_affine(df):\n\tx0 = df.columns[0]\n\ty0 = df.index[0]\n\tdx = df.columns[1] - df.columns[0]\n\tdy = df.index[1] - df.index[0]\n\t\n\tt = affine.Affine(dx, 0, x0 , 0, dy ,y0 - dy) \n\t# y0 - dy because anker point is in the south!\n\treturn t", "def temporal_affine_forward(x, w, b):\n N, T, D = x.shape\n M = b.shape[0]\n out = x.reshape(N * T, D).dot(w).reshape(N, T, M) + b\n cache = x, w, b, out\n return out, cache", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n t = np.reshape(x,(x.shape[0],np.prod(np.shape(x)[1:])))\n \n\n out = np.dot(t,w) + b\n \n #print(np.shape(out))\n\n pass\n\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def estimate_affine_matrix_3d_to_2d(X, x):\n assert x.shape[0] == X.shape[0]\n assert x.shape[0] >= 4\n X = X.T # (3, n)\n x = x.T # (2, n)\n n = x.shape[1]\n\n ###---- 1. normalization\n ## 2d points\n mean = np.mean(x, 1) # (2, )\n x = x - np.tile(mean[:, np.newaxis], [1, n]) # (2, n)\n average_norm = np.mean(np.sqrt(np.sum(x ** 2, 0)))\n scale = np.sqrt(2) / average_norm\n x = scale * x\n\n # T = [[scale, 0, -mean * scale], \n # [ 0, scale, -mean * scale], \n # [ 0, 0, 1 ]]\n T = np.zeros((3, 3), dtype=np.float32)\n T[0, 0] = T[1, 1] = scale\n T[:2, 2] = -mean * scale\n T[2, 2] = 1\n\n ## 3d points\n X_homo = np.vstack((X, np.ones((1, n)))) # (4, n)\n mean = np.mean(X, 1) # (3, )\n X = X - np.tile(mean[:, np.newaxis], [1, n]) # (3, n)\n m = X_homo[: 3, :] - X\n average_norm = np.mean(np.sqrt(np.sum(X ** 2, 0)))\n scale = np.sqrt(3) / average_norm\n X = scale * X\n\n U = np.zeros((4, 4), dtype=np.float32)\n U[0, 0] = U[1, 1] = U[2, 2] = scale\n U[: 3, 3] = -mean * scale\n U[3, 3] = 1\n\n ###---- 2. equations\n A = np.zeros((n * 2, 8), dtype=np.float32)\n X_homo = np.vstack((X, np.ones((1, n)))).T\n A[: n, : 4] = X_homo\n A[n: , 4: ] = X_homo\n b = np.reshape(x, [-1, 1]) # (2n, 1)\n\n ###---- 3.solution\n p_8 = np.linalg.pinv(A).dot(b) # (8, 2n) x (2n, 1) -> (8, 1)\n p = np.zeros((3, 4), dtype=np.float32)\n p[0, :] = p_8[:4, 0]\n p[1, :] = p_8[4:, 0]\n p[-1, -1] = 1\n\n ###---- 4. denormalization\n P_Affine = np.linalg.inv(T).dot(p.dot(U))\n return P_Affine", "def get_affine_matrix2d(\n translations: torch.Tensor,\n center: torch.Tensor,\n scale: torch.Tensor,\n angle: torch.Tensor,\n sx: Optional[torch.Tensor] = None,\n sy: Optional[torch.Tensor] = None,\n) -> torch.Tensor:\n transform: torch.Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def get_affine_matrix2d(\n translations: Tensor,\n center: Tensor,\n scale: Tensor,\n angle: Tensor,\n sx: Tensor | None = None,\n sy: Tensor | None = None,\n) -> Tensor:\n transform: Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def apply_affine_transform(x, theta=0, tx=0, ty=0, shear=0, zx=1, zy=1,\n row_axis=0, col_axis=1, channel_axis=2,\n fill_mode='nearest', cval=0., order=1):\n if scipy is None:\n raise ImportError('Image transformations require SciPy. '\n 'Install SciPy.')\n transform_matrix = None\n if theta != 0:\n theta = np.deg2rad(theta)\n rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],\n [np.sin(theta), np.cos(theta), 0],\n [0, 0, 1]])\n transform_matrix = rotation_matrix\n\n if tx != 0 or ty != 0:\n shift_matrix = np.array([[1, 0, tx],\n [0, 1, ty],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = shift_matrix\n else:\n transform_matrix = np.dot(transform_matrix, shift_matrix)\n\n if shear != 0:\n shear = np.deg2rad(shear)\n shear_matrix = np.array([[1, -np.sin(shear), 0],\n [0, np.cos(shear), 0],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = shear_matrix\n else:\n transform_matrix = np.dot(transform_matrix, shear_matrix)\n\n if zx != 1 or zy != 1:\n zoom_matrix = np.array([[zx, 0, 0],\n [0, zy, 0],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = zoom_matrix\n else:\n transform_matrix = np.dot(transform_matrix, zoom_matrix)\n\n if transform_matrix is not None:\n h, w = x.shape[row_axis], x.shape[col_axis]\n transform_matrix = transform_matrix_offset_center(\n transform_matrix, h, w)\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n\n channel_images = [ndimage.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=order,\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def __affine_geo_transformation(x, y, gtr):\n\n # https://gdal.org/user/raster_data_model.html#affine-geotransform\n # Affine transformation rewritten for rasterio:\n gtr_x = gtr[2] + (x + 0.5) * gtr[0] + (y + 0.5) * gtr[1]\n gtr_y = gtr[5] + (x + 0.5) * gtr[3] + (y + 0.5) * gtr[4]\n\n return gtr_x, gtr_y", "def fit_transform(self, x: Array2D) -> Array2D:", "def get_affine(x, m, c):\n x = m*x + c\n return x", "def affine_forward(x,w,b):\n out=None\n N=x.shape[0]\n x_row=x.reshape(N,-1)\n out=np.dot(x_row,w)+b\n cache=(x,w,b)\n return out,cache", "def affine_forward(X, W, b):\n return np.dot(X, W) + b", "def apply_affine(A: Affine, x: np.ndarray, y: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:\n\n shape = x.shape\n\n A = np.asarray(A).reshape(3, 3) # type: ignore[assignment]\n t = A[:2, -1].reshape((2, 1)) # type: ignore[index]\n A = A[:2, :2] # type: ignore[index]\n\n x, y = A @ np.vstack([x.ravel(), y.ravel()]) + t\n x, y = (a.reshape(shape) for a in (x, y))\n return (x, y)", "def affine_forward(x, w, b):\n #raise NotImplementedError\n #######################################################################\n # #\n # #\n # TODO: YOUR CODE HERE #\n # #\n # #\n #######################################################################\n out=np.dot(x,w)+b\n cache=(x,w,b)\n return(out, cache)", "def affine_forward(x, w, b):\n ############################################################################\n # TODO: Implement the affine forward pass. Store the result in 'out'. You #\n # will need to reshape the input into rows. #\n ############################################################################\n ############################################################################\n # START OF YOUR CODE #\n ############################################################################\n N = len(x)\n D,M = w.shape\n # reshape get a new x\n new_x = x.reshape(N,D)\n # get the output\n out = np.dot(new_x,w) + np.expand_dims(b,axis=0)\n \n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n return out", "def get_transform(ds):\n\n if 'transform' in ds.attrs:\n ds_trans = ds.attrs['transform']\n if isinstance(ds_trans, Affine):\n return ds_trans\n else:\n return Affine(*ds_trans)\n\n elif 'crs' in ds.data_vars and 'i2m' in ds.data_vars['crs'].attrs:\n transf_str = ds.data_vars['crs'].attrs['i2m']\n a = list(map(float, transf_str.split(',')))\n return Affine(a[0], a[2], a[4], a[1], a[3], a[5])\n\n else:\n resx, resy = get_resolution(ds)\n xoff = ds['x'].values.min()\n yoff = ds['y'].values.max()\n return Affine(resx, 0, xoff, 0, resy, yoff)", "def _apply_transform(self, x, transform_parameters):\n # x is a single image, so it doesn't have image number at index 0\n img_row_axis = self.row_axis - 1\n img_col_axis = self.col_axis - 1\n img_channel_axis = self.channel_axis - 1\n\n x = apply_affine_transform(x, transform_parameters.get('theta', 0),\n transform_parameters.get('tx', 0),\n transform_parameters.get('ty', 0),\n transform_parameters.get('shear', 0),\n transform_parameters.get('zx', 1),\n transform_parameters.get('zy', 1),\n row_axis=img_row_axis,\n col_axis=img_col_axis,\n channel_axis=img_channel_axis,\n fill_mode=self.fill_mode,\n cval=self.cval)\n\n if transform_parameters.get('channel_shift_intensity') is not None:\n x = apply_channel_shift(x,\n transform_parameters['channel_shift_intensity'],\n img_channel_axis)\n\n if transform_parameters.get('flip_horizontal', False):\n x = self._flip_axis(x, img_col_axis)\n\n if transform_parameters.get('flip_vertical', False):\n x = self._flip_axis(x, img_row_axis)\n\n if transform_parameters.get('brightness') is not None:\n x = apply_brightness_shift(x, transform_parameters['brightness'])\n\n return x", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n reshaped_inp = np.reshape(x,(int(x.shape[0]),int(np.prod(x.shape) / x.shape[0])))\n out = reshaped_inp.dot(w) + b\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def fit_transform(self, x):\n self.fit(x)\n\n if self.method == \"svd\":\n return self._u * self._s\n else:\n return self._transform_eig(x)", "def affine_transform(x, transform_matrix, channel_index=2, fill_mode='nearest', cval=0., order=1):\n # transform_matrix = transform_matrix_offset_center()\n # asdihasid\n # asd\n\n x = np.rollaxis(x, channel_index, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [\n ndi.interpolation.affine_transform(\n x_channel, final_affine_matrix, final_offset, order=order, mode=fill_mode, cval=cval\n ) for x_channel in x\n ]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_index + 1)\n return x", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n dim_size = x[0].shape\n X = x.reshape(x.shape[0], np.prod(dim_size))\n out = X.dot(w) + b\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n out = None\n ########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. #\n # You will need to reshape the input into rows. #\n ########################################################################\n\n x_reshaped = x.reshape(x.shape[:1] + (-1,))\n out = x_reshaped.dot(w) + b\n\n ########################################################################\n # END OF YOUR CODE #\n ########################################################################\n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n N = x.shape[0]\n\n # reshape input into rows\n output = x.reshape([N, -1]).dot(w) + b\n cache = (x, w, b)\n\n return output, cache", "def transforms_multiply(t0s, t1s):\r\n \r\n return ut.matrix_multiply(t0s, t1s)", "def affine_forward(x, w, b):\n out = None\n x_shape = x.shape\n x_reshaped = x.reshape(x_shape[0], np.prod(x_shape[1:]))\n out = np.dot(x_reshaped, w) + b\n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n out = None\n \n # reshape the input into (N, d_1 *...* d_k)\n input_shape = x.shape\n prod = 1\n for i in range(1,len(input_shape)):\n prod *= input_shape[i]\n\n a = x.reshape(x.shape[0],prod)\n out = np.dot(a,w) + b\n \n cache = (x, w, b)\n return out, cache", "def transform(self, x):\n return self._transform_eig(x)", "def elastic_transform(image, alpha, sigma, alpha_affine, random_state=None):\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n shape = image.shape\n shape_size = shape[:2]\n \n # Random affine\n center_square = np.float32(shape_size) // 2\n square_size = min(shape_size) // 3\n pts1 = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])\n pts2 = pts1 + random_state.uniform(-alpha_affine, alpha_affine, size=pts1.shape).astype(np.float32)\n M = cv2.getAffineTransform(pts1, pts2)\n image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101)\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dz = np.zeros_like(dx)\n\n x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2]))\n indices = np.reshape(y+dy, (-1, 1)), np.reshape(x+dx, (-1, 1)), np.reshape(z, (-1, 1))\n\n return map_coordinates(image, indices, order=1, mode='reflect').reshape(shape)", "def affine_trans(self):\n h, w, _ = self.img.shape\n\n \"\"\"\n pts1 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n pts2 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n \"\"\"\n\n pts1 = np.float32([[50, 50], [200, 50], [50, 200]])\n pts2 = np.float32([[10, 100], [200, 50], [100, 250]])\n\n M = cv2.getAffineTransform(pts1, pts2)\n\n self.img = cv2.warpAffine(self.img, M, (w, h))\n\n self.edits.append(\"affine\")\n return self", "def affine(img, angle, translate, scale, shear):\n if not _is_numpy(img):\n raise TypeError('img should be Numpy Image. Got {}'.format(type(img)))\n\n assert isinstance(translate, (tuple, list)) and len(translate) == 2, \\\n \"Argument translate should be a list or tuple of length 2\"\n\n assert scale > 0.0, \"Argument scale should be positive\"\n\n aug = iaa.Affine(scale=scale, rotate=angle, translate_px=translate, shear=shear)\n return aug.augment_image(img)", "def fit_transform(self, Xs, y=None):\n return self.fit(Xs, y).transform(Xs)", "def setrans(Bi, t):\n\n x,v=mat2set(Bi)\n Bo = set2mat((x+t,v))\n Bo = Bo.astype(Bi.dtype)\n return Bo", "def get_affine_transform(gps_coords, pdr_coords):\n # Compute similarity Xp = s A X + b\n X = np.array(pdr_coords)\n Xp = np.array(gps_coords)\n T = tf.superimposition_matrix(X.T, Xp.T, scale=True)\n\n A, b = T[:3, :3], T[:3, 3]\n s = np.linalg.det(A)**(1. / 3)\n A /= s\n return s, A, b", "def translateEuler(self,trans):\n return np.array([[1,0,0,trans[0]],[0,1,0,trans[1]],[0,0,1,trans[2]],[0,0,0,1]])", "def model_1exp(a, t, s, t0=0):\n a, t, s = physicond(a, t, s)\n\n # auxilary function taking as argument a time array.\n def aux(array_t, details=False):\n\n # applying time offset\n t_var = array_t - t0\n\n # model expression coming from symbolic calculation\n pulse = np.heaviside(t_var, 1.)\n ind = np.where(pulse != 0.)\n pulse[ind] = a * (np.exp(-t_var[ind]/t)-np.exp(-t_var[ind]/s))\n\n # same behavior as model_2exp and model_3exp\n if details == True:\n return pulse, pulse\n elif details == False:\n return pulse\n\n return aux", "def compute_e(f_mat, m_mat):\r\n return m_mat.T @ f_mat @ m_mat", "def convert_affine(ref, t, out):\n args = [\n transform_exe,\n '-d', '3',\n '-r', ref,\n '-t', '[{},0]'.format(t),\n '-o', '[{},1]'.format(out),\n '--float'\n ]\n subprocess.check_call(args)", "def elastic_transform(X, min_alpha=36, max_alpha=38, min_sigma=5, max_sigma=6, random_state=None, n_jobs=1):\n if random_state is None:\n rng = np.random\n else:\n rng = np.random.RandomState(random_state)\n alphas = rng.uniform(min_alpha, max_alpha, size=X.shape[0])\n sigmas = rng.uniform(min_sigma, max_sigma, size=X.shape[0])\n X_elas = Parallel(n_jobs=n_jobs)(delayed(elastic_transform_one)(X[i], alphas[i], sigmas[i]) for i in range(X.shape[0]))\n return np.array(X_elas, dtype='float32')", "def elastic_transform(image, alpha=1000, sigma=30, spline_order=1, mode='nearest', random_state=np.random):\n assert image.ndim == 2\n shape = image.shape[:2]\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma, mode=\"constant\", cval=0) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma, mode=\"constant\", cval=0) * alpha\n\n x, y = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')\n indices = [np.reshape(x + dx, (-1, 1)), np.reshape(y + dy, (-1, 1))]\n result = map_coordinates(\n image, indices, order=spline_order, mode=mode).reshape(shape)\n return result", "def eval(self, x):\n y = list(x)\n if not self.biased:\n y.insert(0, 1.0)\n y = np.array(y).reshape((self.Ws[0].shape[1], 1))\n for W, g in zip(self.Ws, self.gs):\n y = g(np.dot(W, y))\n return y.flatten()", "def map(self,Affine,i):\n map_x = np.zeros([self.num,self.d])\n for k in range(self.num):\n map_x[k,:] = Affine.apply(i,self.pick(k))\n Mapped = Model_Points(map_x)\n return Mapped", "def affine_transform(initialize=False, fixed=None, moments=True) :\n \n transform = itk.AffineTransform[itk.D, fixed.ndim].New()\n \n if initialize :\n fixed_itk = medipy.itk.medipy_image_to_itk_image(fixed, False)\n #moving_itk = medipy.itk.medipy_image_to_itk_image(moving, False)\n\n fixedIndex = fixed_itk.GetLargestPossibleRegion().GetIndex()\n fixedSize = fixed_itk.GetLargestPossibleRegion().GetSize()\n\n centerIndex = (int(fixedIndex[0] + fixedSize[0] / 2.0), \\\n int(fixedIndex[1] + fixedSize[1] / 2.0), \\\n int(fixedIndex[2] + fixedSize[2] / 2.0))\n\n rotationCenter = fixed_itk.TransformIndexToPhysicalPoint(centerIndex)\n\n transform.SetIdentity()\n transform.SetCenter(rotationCenter)\n\n #initial_transform = itk.VersorRigid3DTransform[itk.D].New()\n #initializer = itk.CenteredTransformInitializer[\n # initial_transform, fixed_itk, moving_itk].New(\n # Transform=initial_transform, FixedImage=fixed_itk, MovingImage=moving_itk) \n #if moments :\n # initializer.MomentsOn()\n #else :\n # initializer.GeometryOn()\n #initializer.InitializeTransform()\n #transform.SetCenter(initial_transform.GetCenter())\n #transform.SetOffset(initial_transform.GetOffset())\n \n return transform", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='nearest',\n cval=0.,\n interp_order=0):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:3, :3]\n final_offset = transform_matrix[:3, -1]\n channel_volumes = [ndi.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=interp_order, # NOTE: The order of the spline interpolation\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_volumes, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def affine_2Dtransform(img, t_mat, height, width, h_offset=0, w_offset=0, nh_flag=False, nw_flag=False):\n # transform matrix must be validated\n if(np.shape(t_mat) != (2, 2)):\n return img\n\n # implementing matrix multiplication to a default map of source data in order to apply transform\n # and to achieve coordination/location of transformed matrix according to source data(data map)\n coord_map = transform_calcualtion(\n height, width, t_mat, h_offset, w_offset, nh_flag, nw_flag)\n\n # transformed image data construction\n t_img = np.full((height+h_offset, width+w_offset, 3), 255, dtype='uint8')\n\n # applying new map to image inorder to complete the transform\n try:\n for i in range(height):\n for j in range(width):\n [i_new_coord, j_new_coord] = coord_map[i, j, :]\n # unhandled bound-jumpout\n t_img[i_new_coord, j_new_coord, :] = img[i, j, :]\n except:\n print(\"not enough offset/negative coordination pushed\")\n return img\n return t_img", "def translateEuler(trans):\n return np.array([[1,0,0,trans[0]],[0,1,0,trans[1]],[0,0,1,trans[2]],[0,0,0,1]])", "def affine_sigmoid(xin, m= 10, c= 3):\n if type(xin) != np.ndarray:\n x = np.array([xin])\n else:\n x = xin\n\n x = get_affine(x, m, c)\n output = get_sigmoid(x)\n\n if type(xin) != np.ndarray:\n return output[0]\n else:\n return output", "def transform(self, x):", "def _transform(\n self, x: \"torch.Tensor\", y: Optional[\"torch.Tensor\"], **kwargs\n ) -> Tuple[\"torch.Tensor\", Optional[\"torch.Tensor\"]]:\n import torch\n import torchvision.transforms.functional as F\n\n img_size = x.shape[:2]\n\n angle = float(\n torch.empty(1)\n .uniform_(float(self.degree_range[0]), float(self.degree_range[1]))\n .item()\n )\n\n max_dx = float(self.translate[0] * img_size[1])\n max_dy = float(self.translate[1] * img_size[0])\n tx = int(round(torch.empty(1).uniform_(-max_dx, max_dx).item()))\n ty = int(round(torch.empty(1).uniform_(-max_dy, max_dy).item()))\n translations = (tx, ty)\n\n scale = float(torch.empty(1).uniform_(self.scale[0], self.scale[1]).item())\n\n # x needs to have channel first\n x = x.permute(2, 0, 1)\n x = F.affine(\n img=x, angle=angle, translate=translations, scale=scale, shear=(0.0, 0.0)\n )\n x = x.permute(1, 2, 0)\n\n return torch.clamp(x, min=self.clip_values[0], max=self.clip_values[1]), y", "def jacobian(self, dt):\n if dt not in self._F_cache:\n d = self._dimension\n with torch.no_grad():\n F = eye_like(self.sa2, d)\n F[: d // 2, d // 2 :] = dt * eye_like(self.sa2, d // 2)\n self._F_cache[dt] = F\n\n return self._F_cache[dt]", "def __compose_transformation(self):\n s = self.scale\n rotR = self.rotation\n t = self.translation\n T = np.eye(4)\n T[0:3, 3] = t\n R = np.eye(4)\n R[0:3, 0:3] = rotR\n M = T.dot(R)\n if s == 1:\n M = T.dot(R)\n else:\n S = np.eye(4)\n S[0:3, 0:3] = np.diag([s, s, s])\n M = T.dot(R).dot(S)\n return M", "def seToSE( x ):\n x = asarray(x,dtype=float)\n if x.shape != (6,):\n raise ValueError(\"shape must be (6,); got %s\" % str(x.shape))\n #\n return expM(screw(x))", "def J_direct_transform(om, consts, taus):\n ndecay=len(consts) ; noms=len(om)\n Jmat = np.zeros( (ndecay, noms ) )\n for i in range(ndecay):\n Jmat[i] = consts[i]*taus[i] /(1 + (taus[i]*om)**2.)\n return Jmat.sum(axis=0)", "def exp(tensor):\n return _elementary_op(tensor, np.exp, np.exp)", "def affineTransform(img, pts, newPts):\n\ttmp = img.copy()\n\tif len(img.shape) is 3:\n\t\trows, cols, ch = img.shape\n\telse:\n\t\trows, cols = img.shape\n\tpts1 = np.float32(pts)\n\tpts2 = np.float32(newPts)\n\tM = cv2.getAffineTransform(pts1, pts2)\n\tdst = cv2.warpAffine(tmp, M, (cols, rows))\n\treturn dst", "def sfunc(self,x,y):\n return np.exp(-(x-self.x_0)**2.0-(y-self.y_0)**2.0)", "def apply_T(T, points):\n flipped = False\n if points.shape[0] != 3:\n assert points.shape[1] == 3, \"Points must be 3xN or Nx3\"\n points = points.T\n flipped = True\n points_h = np.vstack((points, np.ones_like(points[0, :])))\n points_transformed_h = np.dot(T, points_h)\n points_transformed = points_transformed_h[:-1]\n if flipped:\n return points_transformed.T\n return points_transformed", "def transform_and_compute_jacobian(self, xj):\n x = xj[:, :self.d].detach()\n log_j = xj[:, -1]\n\n x.requires_grad = True\n y = self.flow_(x)\n\n n_batch = xj.shape[0]\n\n jx = torch.zeros(n_batch, self.d, self.d).to(log_j.device)\n directions = torch.eye(self.d).to(log_j).unsqueeze(0).repeat(n_batch, 1, 1)\n\n for i in range(self.d):\n jx[:, i, :] = torch.autograd.grad(y, x, directions[:, i, :],\n allow_unused=True, create_graph=True, retain_graph=True)[0]\n x.requires_grad = False\n x.grad = None\n\n log_det_j = torch.log(torch.abs(torch.det(jx)))\n return torch.cat([y.detach(), (log_j + log_det_j).unsqueeze(1)], 1)", "def minimize_transform(params, points_dest, points_src, constraints):\n # initialize dof\n dof = [0, 0, 0, 0, 0, 0, 1, 1, 1]\n # initialize dictionary to relate constraints index to dof\n dict_dof = {'Tx': 0, 'Ty': 1, 'Tz': 2, 'Rx': 3, 'Ry': 4, 'Rz': 5, 'Sx': 6, 'Sy': 7, 'Sz': 8}\n # extract constraints\n list_constraints = constraints.split('_')\n # loop across constraints and update dof\n for i in range(len(list_constraints)):\n dof[dict_dof[list_constraints[i]]] = params[i]\n # convert dof to more intuitive variables\n tx, ty, tz, alpha, beta, gamma, scx, scy, scz = dof[0], dof[1], dof[2], dof[3], dof[4], dof[5], dof[6], dof[7], dof[8]\n # build rotation matrix\n rotation_matrix = matrix([[cos(alpha)*cos(beta), cos(alpha)*sin(beta)*sin(gamma)-sin(alpha)*cos(gamma), cos(alpha)*sin(beta)*cos(gamma)+sin(alpha)*sin(gamma)],\n [sin(alpha)*cos(beta), sin(alpha)*sin(beta)*sin(gamma)+cos(alpha)*cos(gamma), sin(alpha)*sin(beta)*cos(gamma)-cos(alpha)*sin(gamma)],\n [-sin(beta), cos(beta)*sin(gamma), cos(beta)*cos(gamma)]])\n # build scaling matrix\n scaling_matrix = matrix([[scx, 0.0, 0.0], [0.0, scy, 0.0], [0.0, 0.0, scz]])\n # compute rotation+scaling matrix\n rotsc_matrix = scaling_matrix * rotation_matrix\n # compute center of mass from moving points (src)\n points_src_barycenter = mean(points_src, axis=0)\n # apply transformation to moving points (src)\n points_src_reg = ((rotsc_matrix * (matrix(points_src) - points_src_barycenter).T).T + points_src_barycenter) + matrix([tx, ty, tz])\n # record SSE for later display\n sse_results.append(SSE(matrix(points_dest), points_src_reg))\n # return SSE\n return SSE(matrix(points_dest), points_src_reg)", "def affineSchur(self):\n return AffineSchurFunctions(self)", "def transformAffine(self, path=None, src=None, dst=None):\n if path is not None:\n landmarks = pd.read_csv(path, skiprows=1,engine=\"c\", na_filter=False, header=None, delim_whitespace=True, dtype=np.float32).as_matrix()\n dst = landmarks[:,3:5]\n src = landmarks[:,1:3]\n affine = transform.estimate_transform(\"affine\",src,dst)\n data = self.stormData[0][:,0:2]\n data = affine(data)\n self.stormData[0][:,0:2] = data", "def attrTransform(self, matrix, transform):\n for ttype, targs in self.reTransformFind.findall(transform):\n targs = list(map(lambda x: float(x), self.reNumberFind.findall(targs)))\n if ttype == 'matrix':\n newmatrix = [ targs[0], targs[1],\n targs[2], targs[3],\n targs[4], targs[5] ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'translate':\n tx = targs[0]\n ty = targs[1] if len(targs) > 1 else 0\n newmatrix = [ 1, 0, 0, 1, tx, ty ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'scale':\n sx = targs[0]\n sy = targs[1] if len(targs) > 1 else sx\n newmatrix = [ sx, 0, 0, sy, 0, 0 ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'rotate':\n if len(targs) == 1:\n alpha = targs[0]\n newmatrix = [ math.cos(alpha), math.sin(alpha),\n -math.sin(alpha), math.cos(alpha),\n 0, 0]\n self.matrixMul(matrix, newmatrix)\n else:\n alpha = targs[0]\n newmatrix = [ 1, 0, 0, 1, targs[1], targs[2] ]\n self.matrixMul(matrix, newmatrix)\n newmatrix = [ math.cos(alpha), math.sin(alpha),\n -math.sin(alpha), math.cos(alpha),\n 0, 0]\n self.matrixMul(matrix, newmatrix)\n newmatrix = [ 1, 0, 0, 1, -targs[1], -targs[2] ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'skewX' or ttype == 'skewY':\n self.alert(\"skewX and skewY transformations are not supported\", elem)\n else:\n print('unknown transform type: ', ttype)\n return matrix", "def _apply(self, x, **kwargs):\n return reduce(lambda x_i, tr: tr._apply(x_i), self.transforms, x)", "def translate(self, x=0, y=0, z=0):\n\t\ttranslation = np.identity(4)\n\t\ttranslation[0, 3] += x\n\t\ttranslation[1, 3] += y\n\t\ttranslation[2, 3] += z\n\t\t\n\t\tself.matrix = np.matmul(self.matrix, translation)", "def affine_matrix(self) -> np.ndarray:\n return self._tf_matrix", "def _apply_sx(self, state, axes, inverse=False):\n if inverse:\n return 0.5 * ((1 - 1j) * state + (1 + 1j) * self._apply_x(state, axes))\n\n return 0.5 * ((1 + 1j) * state + (1 - 1j) * self._apply_x(state, axes))", "def elastic_transform_approx(\n img: np.ndarray,\n alpha: float,\n sigma: float,\n alpha_affine: float,\n interpolation: int = cv2.INTER_LINEAR,\n border_mode: int = cv2.BORDER_REFLECT_101,\n value: Optional[ImageColorType] = None,\n random_state: Optional[np.random.RandomState] = None,\n) -> np.ndarray:\n height, width = img.shape[:2]\n\n # Random affine\n center_square = np.array((height, width), dtype=np.float32) // 2\n square_size = min((height, width)) // 3\n alpha = float(alpha)\n sigma = float(sigma)\n alpha_affine = float(alpha_affine)\n\n pts1 = np.array(\n [\n center_square + square_size,\n [center_square[0] + square_size, center_square[1] - square_size],\n center_square - square_size,\n ],\n dtype=np.float32,\n )\n pts2 = pts1 + random_utils.uniform(-alpha_affine, alpha_affine, size=pts1.shape, random_state=random_state).astype(\n np.float32\n )\n matrix = cv2.getAffineTransform(pts1, pts2)\n\n warp_fn = _maybe_process_in_chunks(\n cv2.warpAffine,\n M=matrix,\n dsize=(width, height),\n flags=interpolation,\n borderMode=border_mode,\n borderValue=value,\n )\n img = warp_fn(img)\n\n dx = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dx, (17, 17), sigma, dst=dx)\n dx *= alpha\n\n dy = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dy, (17, 17), sigma, dst=dy)\n dy *= alpha\n\n x, y = np.meshgrid(np.arange(width), np.arange(height))\n\n map_x = np.float32(x + dx)\n map_y = np.float32(y + dy)\n\n remap_fn = _maybe_process_in_chunks(\n cv2.remap,\n map1=map_x,\n map2=map_y,\n interpolation=interpolation,\n borderMode=border_mode,\n borderValue=value,\n )\n return remap_fn(img)", "def transform(self, x: Array2D) -> Array2D:", "def elastic_transform(self, image, random_state=None):\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n image = self.affine(image, random_state)\n #from ipdb import set_trace; set_trace()\n indices = self.stretch_indices(image, random_state)\n\n return map_coordinates(image, indices, order=1, mode='reflect').reshape(image.shape)", "def affine(self, image, random_state):\n shape = image.shape\n shape_size = shape[:2]\n transform_std = self.alpha_affine*image.shape[1]\n\n center_square = np.float32(shape[:2]) // 2\n square_size = min(shape[:2]) // 3\n\n source = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])\n destination = source + random_state.uniform(-transform_std, transform_std, size=source.shape).astype(np.float32)\n M = cv2.getAffineTransform(source, destination)\n\n return cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REPLICATE)", "def EvaluateJacobian(x):\n j = np.zeros((NOBSERVATIONS, 3))\n\n for i in range(NOBSERVATIONS):\n base = np.exp(-x[0] * t[i]) / (x[1] + x[2] * t[i])\n\n j[i][0] = t[i] * base\n j[i][1] = base / (x[1] + x[2] * t[i])\n j[i][2] = base * t[i] / (x[1] + x[2] * t[i])\n\n return j", "def f(self,t,svect):\n s1, vectors = spinlib.get_s_and_vectors(svect, self.N)\n heff = -spinlib.cycleLeft3D(s1,self.N) - spinlib.cycleRight3D(s1,self.N)\n \n # some transpose magic - basically want to matrix multiply the right things\n if len(vectors) == 0:\n retvect = vectors\n else:\n retvect = np.dot(self.jac(s1, heff), vectors).T.ravel()\n return self.J * np.concatenate((-spinlib.cross(s1, heff), retvect), axis = 0)", "def quantize_affine_given_quant_params(\n input: torch.Tensor,\n quantize_params: QuantizeAffineParams2,\n) -> torch.Tensor:\n return QuantizeAffineFunction.apply(input, quantize_params)", "def temperature_scaling(x,t):\n n,d = x.shape\n res = np.copy(x)\n\n res *= (1./t)\n for i in range(n):\n res[i] = softmax(res[i])\n return(res)", "def analytic(x, t, D, x0, xend, logx=False, c_s=1, use_log2=False):\n import scipy.special\n if t.ndim == 1:\n t = t.reshape((t.size, 1))\n expb = (lambda arg: 2**arg) if use_log2 else np.exp\n x = expb(x) if logx else x\n return c_s * scipy.special.erfc(x/(2*(D*t)**0.5))", "def transformation_matrix(self, s1, s2, s3, t1, t2, t3):\n\n s1 = np.array(s1)\n s2 = np.array(s2)\n s3 = np.array(s3)\n t1 = np.array(t1)\n t2 = np.array(t2)\n t3 = np.array(t3)\n\n Q = np.array(\n [\n [t2[0] - t1[0], t2[1] - t1[1], t2[2] - t1[2]],\n [t3[0] - t1[0], t3[1] - t1[1], t3[2] - t1[2]],\n ]\n )\n\n P = np.array([[s2[0] - s1[0], s2[1] - s1[1]], [s3[0] - s1[0], s3[1] - s1[1]]])\n\n try:\n # Invert the P matrix\n Pinv = inv(P)\n\n # Build the dot product\n T = np.dot(Pinv, Q)\n\n # Offset\n V0 = np.subtract(t2, np.transpose(s2[0:2]).dot(T))\n except Exception as e:\n self.log.error(\"An error occured during the transformation.\", exc_info=True)\n return -1, -1\n\n return T, V0", "def transform(self, x:generic_array, dense=True) -> generic_array:\n if type(x) == np.ndarray and not dense:\n warnings.warn(\"For Numpy transform it is best to use dense=True\")\n \n K_nq = self._pairwise_kernels(x, self.components_, dense=dense)\n x_new = K_nq @ self.normalization\n return x_new", "def nonlinear_eom_to_ss(aircraft, x_ss, u_ss, x_0, u_0, m, j, dx=0.1, du=0.1):\n \"\"\"return jacobians a, b wrt to x_ss and output matrices c, and d wrt u_ss.\"\"\"\n x = x_0\n u = u_0\n a = zeros((len(x_0), len(x_0)))\n b = zeros((len(x_0), len(u_0)))\n for ii in range(0, len(x_0)):\n x[ii] = x[ii] + dx\n c = c_f_m(aircraft, x, u_0)\n dxdt_1 = nonlinear_eom(x, m, j, c)\n\n x[ii] = x[ii] - dx\n c = c_f_m(aircraft, x, u_0)\n dxdt_2 = nonlinear_eom(x, m, j, c)\n ddx_dx = (dxdt_1 - dxdt_2)/(2*dx)\n a[:, ii] = transpose(ddx_dx)\n x = x_0\n\n for ii in range(0, len(u_0)):\n u[ii] = u[ii] + du\n c = c_f_m(aircraft, x_0, u)\n dxdt_1 = nonlinear_eom(x, m, j, c)\n\n u[ii] = u[ii] - du\n c = c_f_m(aircraft, x_0, u)\n dxdt_2 = nonlinear_eom(x, m, j, c)\n ddx_dx = (dxdt_1 - dxdt_2)/(2*du)\n b[:, ii] = transpose(ddx_dx)\n u = u_0\n\n a_out = a[x_ss, :]\n a_out = a_out[:, x_ss]\n\n b_out = b[x_ss, :]\n b_out = b_out[:, u_ss]\n\n c_out = identity(len(x_ss))\n d_out = zeros((len(x_ss), len(u_ss)))\n return a_out, b_out, c_out, d_out", "def elastic_transform(\n img: np.ndarray,\n alpha: float,\n sigma: float,\n alpha_affine: float,\n interpolation: int = cv2.INTER_LINEAR,\n border_mode: int = cv2.BORDER_REFLECT_101,\n value: Optional[ImageColorType] = None,\n random_state: Optional[np.random.RandomState] = None,\n approximate: bool = False,\n same_dxdy: bool = False,\n):\n height, width = img.shape[:2]\n\n # Random affine\n center_square = np.array((height, width), dtype=np.float32) // 2\n square_size = min((height, width)) // 3\n alpha = float(alpha)\n sigma = float(sigma)\n alpha_affine = float(alpha_affine)\n\n pts1 = np.array(\n [\n center_square + square_size,\n [center_square[0] + square_size, center_square[1] - square_size],\n center_square - square_size,\n ],\n dtype=np.float32,\n )\n pts2 = pts1 + random_utils.uniform(-alpha_affine, alpha_affine, size=pts1.shape, random_state=random_state).astype(\n np.float32\n )\n matrix = cv2.getAffineTransform(pts1, pts2)\n\n warp_fn = _maybe_process_in_chunks(\n cv2.warpAffine, M=matrix, dsize=(width, height), flags=interpolation, borderMode=border_mode, borderValue=value\n )\n img = warp_fn(img)\n\n if approximate:\n # Approximate computation smooth displacement map with a large enough kernel.\n # On large images (512+) this is approximately 2X times faster\n dx = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dx, (17, 17), sigma, dst=dx)\n dx *= alpha\n if same_dxdy:\n # Speed up even more\n dy = dx\n else:\n dy = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dy, (17, 17), sigma, dst=dy)\n dy *= alpha\n else:\n dx = np.float32(\n gaussian_filter((random_utils.rand(height, width, random_state=random_state) * 2 - 1), sigma) * alpha\n )\n if same_dxdy:\n # Speed up\n dy = dx\n else:\n dy = np.float32(\n gaussian_filter((random_utils.rand(height, width, random_state=random_state) * 2 - 1), sigma) * alpha\n )\n\n x, y = np.meshgrid(np.arange(width), np.arange(height))\n\n map_x = np.float32(x + dx)\n map_y = np.float32(y + dy)\n\n remap_fn = _maybe_process_in_chunks(\n cv2.remap, map1=map_x, map2=map_y, interpolation=interpolation, borderMode=border_mode, borderValue=value\n )\n return remap_fn(img)", "def affine_transform_2d(v, mapping, alpha = 1):\r\n p_wgt = vec2(0, 0)\r\n q_wgt = vec2(0, 0)\r\n w = len(mapping)*[None]\r\n w_sum = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n x = mp[0].x - v.x\r\n y = mp[0].y - v.y\r\n if (x == 0 and y == 0): return mp[1]\r\n w[i] = 1/((x*x + y*y) ** alpha)\r\n p_wgt += mp[0]*w[i]\r\n q_wgt += mp[1]*w[i]\r\n w_sum += w[i]\r\n p_wgt /= w_sum\r\n q_wgt /= w_sum\r\n M1 = mat2(0)\r\n M2 = mat2(0)\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n q_adj = mp[1] - q_wgt\r\n M1 += p_adj.transpose_multiply(p_adj)*w[i]\r\n M2 += p_adj.transpose_multiply(q_adj)*w[i]\r\n M1 = M1.inverse()\r\n M = M1*M2\r\n M = M.transpose()\r\n v_out = M*(v - p_wgt) + q_wgt\r\n return v_out", "def apply_transform_to_image(self,img, transform, center=None):\n \n if center is None:\n center = (np.array(img.shape)[::-1]-1)/2.0\n \n displacement = np.dot(transform, center)\n shift = center - displacement\n \n img_tf = ndimage.interpolation.affine_transform(img, transform, offset=shift, mode=\"constant\", order=3, cval=0.0)\n return img_tf", "def apply_transformation(self, points):\n assert (points.shape[0] == 3)\n n = points.shape[1]\n points_ = np.vstack((points, np.ones((1, n))))\n points_trans_ = np.matmul(self.pose_mat, points_)\n points_transformed = np.true_divide(points_trans_[:3, :], points_trans_[[-1], :])\n return points_transformed", "def transform(self) -> Affine:\n transform = (\n Affine.translation(*self.origin)\n * Affine.rotation(self.rotation)\n * Affine.scale(*self.res)\n )\n return transform", "def augmentAffine(img_in, seg_in, strength=0.05):\n B,C,D,H,W = img_in.size()\n affine_matrix = (torch.eye(3,4).unsqueeze(0) + torch.randn(B, 3, 4) * strength).to(img_in.device)\n\n meshgrid = F.affine_grid(affine_matrix,torch.Size((B,1,D,H,W)))\n\n img_out = F.grid_sample(img_in, meshgrid,padding_mode='border')\n seg_out = F.grid_sample(seg_in.float().unsqueeze(1), meshgrid, mode='nearest').long().squeeze(1)\n\n return img_out, seg_out", "def scaling(sx,sy,Mat):\r\n # SM is the Scaling Matrix ( 3 X 3 )\r\n SM = [[sx,0,0],[0,sy,0],[0,0,1]]\r\n Scaled = Multiply(SM,Mat)\r\n # Scaled[0][0] is the updated x coordinate\r\n # Scaled[1][0] is the updated y coordinate\r\n return Scaled[0][0],Scaled[1][0],Scaled[2][0]", "def get_affine_transform(center, scale, rot, output_size, shift=(0.0, 0.0), inv=False):\n assert len(center) == 2\n assert len(scale) == 2\n assert len(output_size) == 2\n assert len(shift) == 2\n scale_tmp = scale * 200.0\n shift = np.array(shift)\n src_w = scale_tmp[0]\n dst_w = output_size[0]\n dst_h = output_size[1]\n rot_rad = np.pi * rot / 180\n src_dir = rotate_point([0.0, src_w * -0.5], rot_rad)\n dst_dir = np.array([0.0, dst_w * -0.5])\n src = np.zeros((3, 2), dtype=np.float32)\n src[0, :] = center + scale_tmp * shift\n src[1, :] = center + src_dir + scale_tmp * shift\n src[2, :] = _get_3rd_point(src[0, :], src[1, :])\n dst = np.zeros((3, 2), dtype=np.float32)\n dst[0, :] = [dst_w * 0.5, dst_h * 0.5]\n dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir\n dst[2, :] = _get_3rd_point(dst[0, :], dst[1, :])\n if inv:\n trans = cv2.getAffineTransform(np.float32(dst), np.float32(src))\n else:\n trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))\n return trans", "def transformCoordinates(self,x,incoordsys=None,outcoordsys=None):\n if incoordsys is None:\n incoordsys = self.incoordsys\n if outcoordsys is None:\n outcoordsys = self._fcoordsys\n if incoordsys == outcoordsys:\n return x\n else:\n return self._inputtransforms[incoordsys][outcoordsys](*x)" ]
[ "0.7174713", "0.6328652", "0.6287944", "0.6152823", "0.6144677", "0.5968129", "0.5957975", "0.5921042", "0.5909272", "0.5798729", "0.5789589", "0.5777053", "0.56403214", "0.5585435", "0.55693203", "0.5553606", "0.5544687", "0.5482424", "0.5461267", "0.54540503", "0.54315287", "0.5431089", "0.54214513", "0.5414109", "0.541142", "0.5406962", "0.53979105", "0.53966075", "0.5387316", "0.53590506", "0.53474116", "0.531475", "0.53132176", "0.5309061", "0.53039515", "0.5298219", "0.52731067", "0.52626914", "0.5262444", "0.5262215", "0.52503246", "0.5246775", "0.5241218", "0.5227229", "0.5211313", "0.51990587", "0.519675", "0.5184051", "0.51559013", "0.5151995", "0.514902", "0.5138682", "0.5134257", "0.51302505", "0.51272625", "0.5122011", "0.5118679", "0.5098131", "0.5095631", "0.5091359", "0.50878257", "0.5085916", "0.50805473", "0.50737447", "0.5071047", "0.5056921", "0.50530255", "0.5050616", "0.50383174", "0.5037148", "0.5034865", "0.5009804", "0.50043106", "0.50017273", "0.49965462", "0.49963453", "0.49912074", "0.49882534", "0.49836805", "0.49808493", "0.49803478", "0.49787912", "0.49754673", "0.4973556", "0.49674076", "0.4966571", "0.49656722", "0.4965338", "0.4964089", "0.4963674", "0.49623898", "0.49605146", "0.49542803", "0.495026", "0.49478838", "0.49416018", "0.49410886", "0.4940748", "0.4935474", "0.49321038" ]
0.8046413
0
Transform x elementwise through an affine function y = exp(s)(x t) where s = st[...,0] and t = st[...,1] with s.shape == x.shape == t.shape This is the inverse of `element_wise_affine` above for the same set of parameters st The Jacobian for this transformation is the coordinatewise product of the scaling factors J = prod(es[...,i],i)
def inverse_element_wise_affine(x, st, compute_jacobian=True): es = torch.exp(-st[..., 0]) t = st[..., 1] logj = None if compute_jacobian: logj = torch.sum(torch.log(es), dim=-1) return es * (x - t), logj
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def element_wise_affine(x, st, compute_jacobian=True):\n es = torch.exp(st[..., 0])\n t = st[..., 1]\n logj = None\n if compute_jacobian:\n logj = torch.sum(torch.log(es), dim=-1)\n\n return es * x + t, logj", "def transform(fn):\n def _(vec, dt):\n return np.einsum(\n 'ji,i,ki,k...->j...',\n evecs, fn(evals, dt), evecs, vec, optimize=True)\n\n return _", "def affine(params, x):\n return np.dot(params['w'], x) + params['b']", "def affine_mult(affine, coordinates):\n return np.dot(coordinates, affine[:3, :3].T) + affine[:3, -1]", "def affineTransform(x,output_dim):\n w=tf.get_variable(\"w\", [x.get_shape()[1], output_dim])\n b=tf.get_variable(\"b\", [output_dim], initializer=tf.constant_initializer(0.0))\n return tf.matmul(x,w)+b", "def affine_transform(x, output_dim, name=None):\n\n w = tf.get_variable(name + \"_w\", [x.get_shape()[1], output_dim], initializer=tf.truncated_normal_initializer(stddev=0.02))\n b = tf.get_variable(name + \"_b\", [output_dim], initializer=tf.constant_initializer(0.0))\n\n return tf.matmul(x, w) + b", "def apply_affine_transform(x, M):\n is1d = len(x.shape) == 1\n if is1d:\n x = np.expand_dims(x, axis=0)\n\n x_hom = np.concatenate(\n [x, np.ones((x.shape[0], 1), dtype=x.dtype)], axis=-1\n )\n x_out = x_hom @ M.T\n if is1d:\n x_out = np.squeeze(x_out, axis=0)\n return x_out", "def affine_transform(geom, matrix):\n if geom.is_empty:\n return geom\n if len(matrix) == 6:\n ndim = 2\n a, b, d, e, xoff, yoff = matrix\n if geom.has_z:\n ndim = 3\n i = 1.0\n c = f = g = h = zoff = 0.0\n matrix = a, b, c, d, e, f, g, h, i, xoff, yoff, zoff\n elif len(matrix) == 12:\n ndim = 3\n a, b, c, d, e, f, g, h, i, xoff, yoff, zoff = matrix\n if not geom.has_z:\n ndim = 2\n matrix = a, b, d, e, xoff, yoff\n else:\n raise ValueError(\"'matrix' expects either 6 or 12 coefficients\")\n\n def affine_pts(pts):\n \"\"\"Internal function to yield affine transform of coordinate tuples\"\"\"\n if ndim == 2:\n for x, y in pts:\n xp = a * x + b * y + xoff\n yp = d * x + e * y + yoff\n yield (xp, yp)\n elif ndim == 3:\n for x, y, z in pts:\n xp = a * x + b * y + c * z + xoff\n yp = d * x + e * y + f * z + yoff\n zp = g * x + h * y + i * z + zoff\n yield (xp, yp, zp)\n\n # Process coordinates from each supported geometry type\n if geom.type in ('Point', 'LineString', 'LinearRing'):\n return type(geom)(list(affine_pts(geom.coords)))\n elif geom.type == 'Polygon':\n ring = geom.exterior\n shell = type(ring)(list(affine_pts(ring.coords)))\n holes = list(geom.interiors)\n for pos, ring in enumerate(holes):\n holes[pos] = type(ring)(list(affine_pts(ring.coords)))\n return type(geom)(shell, holes)\n elif geom.type.startswith('Multi') or geom.type == 'GeometryCollection':\n # Recursive call\n # TODO: fix GeometryCollection constructor\n return type(geom)([affine_transform(part, matrix)\n for part in geom.geoms])\n else:\n raise ValueError('Type %r not recognized' % geom.type)", "def affine_transform(trans_mat, p0):\r\n n_data, n_dim = np.shape(p0)\r\n p0 = np.hstack((p0, np.ones((n_data, 1))))\r\n #return np.transpose(np.dot(np.transpose(trans_mat), np.transpose(p0)))\r\n return np.dot(p0, trans_mat)", "def apply_affine(A: Affine, x: np.ndarray, y: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:\n\n shape = x.shape\n\n A = np.asarray(A).reshape(3, 3) # type: ignore[assignment]\n t = A[:2, -1].reshape((2, 1)) # type: ignore[index]\n A = A[:2, :2] # type: ignore[index]\n\n x, y = A @ np.vstack([x.ravel(), y.ravel()]) + t\n x, y = (a.reshape(shape) for a in (x, y))\n return (x, y)", "def affine_forward(x, W, b):\r\n x2d = np.reshape(x, (x.shape[0], -1)) # convert 4D input matrix to 2D \r\n out = np.dot(x2d, W) + b # linear transformation\r\n cache = (x, W, b) # keep for backward step (stay with us)\r\n return out, cache", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n t = np.reshape(x,(x.shape[0],np.prod(np.shape(x)[1:])))\n \n\n out = np.dot(t,w) + b\n \n #print(np.shape(out))\n\n pass\n\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def exp(tensor):\n return _elementary_op(tensor, np.exp, np.exp)", "def affine_forward(x,w,b):\n out=None\n N=x.shape[0]\n x_row=x.reshape(N,-1)\n out=np.dot(x_row,w)+b\n cache=(x,w,b)\n return out,cache", "def temporal_affine_forward(x, w, b):\n N, T, D = x.shape\n M = b.shape[0]\n out = x.reshape(N * T, D).dot(w).reshape(N, T, M) + b\n cache = x, w, b, out\n return out, cache", "def affine_forward(X, W, b):\n return np.dot(X, W) + b", "def affine_forward(x, w, b):\n #raise NotImplementedError\n #######################################################################\n # #\n # #\n # TODO: YOUR CODE HERE #\n # #\n # #\n #######################################################################\n out=np.dot(x,w)+b\n cache=(x,w,b)\n return(out, cache)", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n reshaped_inp = np.reshape(x,(int(x.shape[0]),int(np.prod(x.shape) / x.shape[0])))\n out = reshaped_inp.dot(w) + b\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n ############################################################################\n # TODO: Implement the affine forward pass. Store the result in 'out'. You #\n # will need to reshape the input into rows. #\n ############################################################################\n ############################################################################\n # START OF YOUR CODE #\n ############################################################################\n N = len(x)\n D,M = w.shape\n # reshape get a new x\n new_x = x.reshape(N,D)\n # get the output\n out = np.dot(new_x,w) + np.expand_dims(b,axis=0)\n \n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n return out", "def affine_forward(x, w, b):\n out = None\n \n # reshape the input into (N, d_1 *...* d_k)\n input_shape = x.shape\n prod = 1\n for i in range(1,len(input_shape)):\n prod *= input_shape[i]\n\n a = x.reshape(x.shape[0],prod)\n out = np.dot(a,w) + b\n \n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n out = None\n ###########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. You #\n # will need to reshape the input into rows. #\n ###########################################################################\n dim_size = x[0].shape\n X = x.reshape(x.shape[0], np.prod(dim_size))\n out = X.dot(w) + b\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n cache = (x, w, b)\n return out, cache", "def transformAffine(self, coords):\n coordsshape = coords.shape\n dims = coordsshape[0] + 1\n coords = coords.reshape((len(coords), -1))\n coords = np.concatenate((coords, np.ones((1, len(coords[0])))), 0)\n affine = np.eye(dims)\n # now transform first to center:\n meanvec = np.mean(coords, 1)\n center = np.eye(dims)\n center[:-1, -1] = -meanvec[:-1]\n affine = np.matmul(center, affine)\n\n if np.sum(self.shift):\n affine[:-1, -1] += (self.deformrandomstate.rand(dims - 1) - 0.5) * np.float32(self.shift)\n if np.max(self.scaling) > 1:\n scales = np.ones(dims)\n # scales[:-1] = (self.deformrandomstate.rand(dims-1)-0.5)*(self.scaling-1.0/self.scaling)+(self.scaling+1/self.scaling)/2\n scales[:-1] = self.scaling ** (self.deformrandomstate.rand(dims - 1) * 2 - 1)\n scales = np.diag(scales)\n # print(scales)\n affine = np.matmul(scales, affine)\n if np.sum(self.rotation):\n affine = self._rotate(affine)\n # move back to location:\n center[:-1, -1] = -center[:-1, -1]\n affine = np.matmul(center, affine)\n # now appyl to coords:\n coords = np.matmul(affine, coords)\n coords = coords[:-1]\n coords = coords.reshape(coordsshape)\n return coords", "def affine_forward(x, w, b):\n out = None\n ########################################################################\n # TODO: Implement the affine forward pass. Store the result in out. #\n # You will need to reshape the input into rows. #\n ########################################################################\n\n x_reshaped = x.reshape(x.shape[:1] + (-1,))\n out = x_reshaped.dot(w) + b\n\n ########################################################################\n # END OF YOUR CODE #\n ########################################################################\n cache = (x, w, b)\n return out, cache", "def affine_forward(x, w, b):\n N = x.shape[0]\n\n # reshape input into rows\n output = x.reshape([N, -1]).dot(w) + b\n cache = (x, w, b)\n\n return output, cache", "def affine_forward(x, w, b):\n out = None\n x_shape = x.shape\n x_reshaped = x.reshape(x_shape[0], np.prod(x_shape[1:]))\n out = np.dot(x_reshaped, w) + b\n cache = (x, w, b)\n return out, cache", "def AffineTransform( from_pts, to_pts ):\n \n # check that there are match points\n if len(from_pts) != len(to_pts) or len(to_pts)<1:\n print \"from_pts and to_pts must be of same size.\"\n return False\n\n # check the dimensions\n dim = len(from_pts[0]) # num of dimensions\n if len(from_pts) < dim:\n print \"Too few points => under-determined system.\"\n return False\n elif len(from_pts) > dim + 1:\n print \"Too many points => over-determined system.\"\n return False\n\n \n #segregate the x and y coordinages\n from_pts_x, from_pts_y = zip(*from_pts)\n to_pts_x, to_pts_y = zip(*to_pts)\n \n #create the Matricies for processing\n I = np.matrix([from_pts_x, from_pts_y, [1,1,1]])\n P = np.matrix([to_pts_x, to_pts_y])\n \n #Calculate the 2D affine transform matrix (A)\n A = P * linalg.pinv(I) \n\n # Make a result object\n class Transformation:\n \"\"\"Result object that represents the transformation\n from affine fitter.\"\"\"\n\n def To_Str(self):\n res = \"\"\n for j in range(dim):\n str1 = \"x%d' = \" % j\n for i in range(dim):\n str1 +=\"x%d * %f + \" % (i, A[i][j+dim+1])\n str1 += \"%f\" % A[dim][j+dim+1]\n res += str1 + \"\\n\"\n return res\n\n def Transform(self, pt_x, pt_y):\n pt_vector = np.matrix([[pt_x], [pt_y], [1]])\n transformed_pt = A * pt_vector\n return map(itemgetter(0), transformed_pt.tolist())\n return Transformation()", "def affine_transform(x, transform_matrix, channel_index=2, fill_mode='nearest', cval=0., order=1):\n # transform_matrix = transform_matrix_offset_center()\n # asdihasid\n # asd\n\n x = np.rollaxis(x, channel_index, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [\n ndi.interpolation.affine_transform(\n x_channel, final_affine_matrix, final_offset, order=order, mode=fill_mode, cval=cval\n ) for x_channel in x\n ]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_index + 1)\n return x", "def estimate_stage_affine(t0, t1):\n src = np.array([t.tforms[0].translation for t in t0])\n dst = np.array([t.tforms[1].translation for t in t1])\n aff = renderapi.transform.AffineModel()\n aff.estimate(src, dst)\n return aff", "def get_affine(x, m, c):\n x = m*x + c\n return x", "def elastic_transform(image, alpha, sigma, alpha_affine, random_state=None):\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n shape = image.shape\n shape_size = shape[:2]\n \n # Random affine\n center_square = np.float32(shape_size) // 2\n square_size = min(shape_size) // 3\n pts1 = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])\n pts2 = pts1 + random_state.uniform(-alpha_affine, alpha_affine, size=pts1.shape).astype(np.float32)\n M = cv2.getAffineTransform(pts1, pts2)\n image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101)\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dz = np.zeros_like(dx)\n\n x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2]))\n indices = np.reshape(y+dy, (-1, 1)), np.reshape(x+dx, (-1, 1)), np.reshape(z, (-1, 1))\n\n return map_coordinates(image, indices, order=1, mode='reflect').reshape(shape)", "def apply_affine_transform(x, theta=0, tx=0, ty=0, shear=0, zx=1, zy=1,\n row_axis=0, col_axis=1, channel_axis=2,\n fill_mode='nearest', cval=0., order=1):\n if scipy is None:\n raise ImportError('Image transformations require SciPy. '\n 'Install SciPy.')\n transform_matrix = None\n if theta != 0:\n theta = np.deg2rad(theta)\n rotation_matrix = np.array([[np.cos(theta), -np.sin(theta), 0],\n [np.sin(theta), np.cos(theta), 0],\n [0, 0, 1]])\n transform_matrix = rotation_matrix\n\n if tx != 0 or ty != 0:\n shift_matrix = np.array([[1, 0, tx],\n [0, 1, ty],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = shift_matrix\n else:\n transform_matrix = np.dot(transform_matrix, shift_matrix)\n\n if shear != 0:\n shear = np.deg2rad(shear)\n shear_matrix = np.array([[1, -np.sin(shear), 0],\n [0, np.cos(shear), 0],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = shear_matrix\n else:\n transform_matrix = np.dot(transform_matrix, shear_matrix)\n\n if zx != 1 or zy != 1:\n zoom_matrix = np.array([[zx, 0, 0],\n [0, zy, 0],\n [0, 0, 1]])\n if transform_matrix is None:\n transform_matrix = zoom_matrix\n else:\n transform_matrix = np.dot(transform_matrix, zoom_matrix)\n\n if transform_matrix is not None:\n h, w = x.shape[row_axis], x.shape[col_axis]\n transform_matrix = transform_matrix_offset_center(\n transform_matrix, h, w)\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n\n channel_images = [ndimage.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=order,\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def __affine_geo_transformation(x, y, gtr):\n\n # https://gdal.org/user/raster_data_model.html#affine-geotransform\n # Affine transformation rewritten for rasterio:\n gtr_x = gtr[2] + (x + 0.5) * gtr[0] + (y + 0.5) * gtr[1]\n gtr_y = gtr[5] + (x + 0.5) * gtr[3] + (y + 0.5) * gtr[4]\n\n return gtr_x, gtr_y", "def transAffine2D( iScale=(1, 1), iTrans=(0, 0), iRot=0, iShear=(0, 0) ): \n iRot = iRot * np.pi / 180\n oMatScale = np.matrix( ((iScale[0],0,0),(0,iScale[1],0),(0,0,1)) )\n oMatTrans = np.matrix( ((1,0,iTrans[0]),(0,1,iTrans[1]),(0,0,1)) )\n oMatRot = np.matrix( ((np.cos(iRot),-np.sin(iRot),0),\\\n (np.sin(iRot),np.cos(iRot),0),(0,0,1)) )\n oMatShear = np.matrix( ((1,iShear[0],0),(iShear[1],1,0),(0,0,1)) )\n # ustvari izhodno matriko\n oMat2D = oMatTrans * oMatShear * oMatRot * oMatScale\n return oMat2D", "def calc_affine(df):\n\tx0 = df.columns[0]\n\ty0 = df.index[0]\n\tdx = df.columns[1] - df.columns[0]\n\tdy = df.index[1] - df.index[0]\n\t\n\tt = affine.Affine(dx, 0, x0 , 0, dy ,y0 - dy) \n\t# y0 - dy because anker point is in the south!\n\treturn t", "def affine(img, angle, translate, scale, shear):\n if not _is_numpy(img):\n raise TypeError('img should be Numpy Image. Got {}'.format(type(img)))\n\n assert isinstance(translate, (tuple, list)) and len(translate) == 2, \\\n \"Argument translate should be a list or tuple of length 2\"\n\n assert scale > 0.0, \"Argument scale should be positive\"\n\n aug = iaa.Affine(scale=scale, rotate=angle, translate_px=translate, shear=shear)\n return aug.augment_image(img)", "def estimate_affine_matrix_3d_to_2d(X, x):\n assert x.shape[0] == X.shape[0]\n assert x.shape[0] >= 4\n X = X.T # (3, n)\n x = x.T # (2, n)\n n = x.shape[1]\n\n ###---- 1. normalization\n ## 2d points\n mean = np.mean(x, 1) # (2, )\n x = x - np.tile(mean[:, np.newaxis], [1, n]) # (2, n)\n average_norm = np.mean(np.sqrt(np.sum(x ** 2, 0)))\n scale = np.sqrt(2) / average_norm\n x = scale * x\n\n # T = [[scale, 0, -mean * scale], \n # [ 0, scale, -mean * scale], \n # [ 0, 0, 1 ]]\n T = np.zeros((3, 3), dtype=np.float32)\n T[0, 0] = T[1, 1] = scale\n T[:2, 2] = -mean * scale\n T[2, 2] = 1\n\n ## 3d points\n X_homo = np.vstack((X, np.ones((1, n)))) # (4, n)\n mean = np.mean(X, 1) # (3, )\n X = X - np.tile(mean[:, np.newaxis], [1, n]) # (3, n)\n m = X_homo[: 3, :] - X\n average_norm = np.mean(np.sqrt(np.sum(X ** 2, 0)))\n scale = np.sqrt(3) / average_norm\n X = scale * X\n\n U = np.zeros((4, 4), dtype=np.float32)\n U[0, 0] = U[1, 1] = U[2, 2] = scale\n U[: 3, 3] = -mean * scale\n U[3, 3] = 1\n\n ###---- 2. equations\n A = np.zeros((n * 2, 8), dtype=np.float32)\n X_homo = np.vstack((X, np.ones((1, n)))).T\n A[: n, : 4] = X_homo\n A[n: , 4: ] = X_homo\n b = np.reshape(x, [-1, 1]) # (2n, 1)\n\n ###---- 3.solution\n p_8 = np.linalg.pinv(A).dot(b) # (8, 2n) x (2n, 1) -> (8, 1)\n p = np.zeros((3, 4), dtype=np.float32)\n p[0, :] = p_8[:4, 0]\n p[1, :] = p_8[4:, 0]\n p[-1, -1] = 1\n\n ###---- 4. denormalization\n P_Affine = np.linalg.inv(T).dot(p.dot(U))\n return P_Affine", "def elastic_transform_approx(\n img: np.ndarray,\n alpha: float,\n sigma: float,\n alpha_affine: float,\n interpolation: int = cv2.INTER_LINEAR,\n border_mode: int = cv2.BORDER_REFLECT_101,\n value: Optional[ImageColorType] = None,\n random_state: Optional[np.random.RandomState] = None,\n) -> np.ndarray:\n height, width = img.shape[:2]\n\n # Random affine\n center_square = np.array((height, width), dtype=np.float32) // 2\n square_size = min((height, width)) // 3\n alpha = float(alpha)\n sigma = float(sigma)\n alpha_affine = float(alpha_affine)\n\n pts1 = np.array(\n [\n center_square + square_size,\n [center_square[0] + square_size, center_square[1] - square_size],\n center_square - square_size,\n ],\n dtype=np.float32,\n )\n pts2 = pts1 + random_utils.uniform(-alpha_affine, alpha_affine, size=pts1.shape, random_state=random_state).astype(\n np.float32\n )\n matrix = cv2.getAffineTransform(pts1, pts2)\n\n warp_fn = _maybe_process_in_chunks(\n cv2.warpAffine,\n M=matrix,\n dsize=(width, height),\n flags=interpolation,\n borderMode=border_mode,\n borderValue=value,\n )\n img = warp_fn(img)\n\n dx = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dx, (17, 17), sigma, dst=dx)\n dx *= alpha\n\n dy = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dy, (17, 17), sigma, dst=dy)\n dy *= alpha\n\n x, y = np.meshgrid(np.arange(width), np.arange(height))\n\n map_x = np.float32(x + dx)\n map_y = np.float32(y + dy)\n\n remap_fn = _maybe_process_in_chunks(\n cv2.remap,\n map1=map_x,\n map2=map_y,\n interpolation=interpolation,\n borderMode=border_mode,\n borderValue=value,\n )\n return remap_fn(img)", "def affine_transform(initialize=False, fixed=None, moments=True) :\n \n transform = itk.AffineTransform[itk.D, fixed.ndim].New()\n \n if initialize :\n fixed_itk = medipy.itk.medipy_image_to_itk_image(fixed, False)\n #moving_itk = medipy.itk.medipy_image_to_itk_image(moving, False)\n\n fixedIndex = fixed_itk.GetLargestPossibleRegion().GetIndex()\n fixedSize = fixed_itk.GetLargestPossibleRegion().GetSize()\n\n centerIndex = (int(fixedIndex[0] + fixedSize[0] / 2.0), \\\n int(fixedIndex[1] + fixedSize[1] / 2.0), \\\n int(fixedIndex[2] + fixedSize[2] / 2.0))\n\n rotationCenter = fixed_itk.TransformIndexToPhysicalPoint(centerIndex)\n\n transform.SetIdentity()\n transform.SetCenter(rotationCenter)\n\n #initial_transform = itk.VersorRigid3DTransform[itk.D].New()\n #initializer = itk.CenteredTransformInitializer[\n # initial_transform, fixed_itk, moving_itk].New(\n # Transform=initial_transform, FixedImage=fixed_itk, MovingImage=moving_itk) \n #if moments :\n # initializer.MomentsOn()\n #else :\n # initializer.GeometryOn()\n #initializer.InitializeTransform()\n #transform.SetCenter(initial_transform.GetCenter())\n #transform.SetOffset(initial_transform.GetOffset())\n \n return transform", "def elastic_transform(\n img: np.ndarray,\n alpha: float,\n sigma: float,\n alpha_affine: float,\n interpolation: int = cv2.INTER_LINEAR,\n border_mode: int = cv2.BORDER_REFLECT_101,\n value: Optional[ImageColorType] = None,\n random_state: Optional[np.random.RandomState] = None,\n approximate: bool = False,\n same_dxdy: bool = False,\n):\n height, width = img.shape[:2]\n\n # Random affine\n center_square = np.array((height, width), dtype=np.float32) // 2\n square_size = min((height, width)) // 3\n alpha = float(alpha)\n sigma = float(sigma)\n alpha_affine = float(alpha_affine)\n\n pts1 = np.array(\n [\n center_square + square_size,\n [center_square[0] + square_size, center_square[1] - square_size],\n center_square - square_size,\n ],\n dtype=np.float32,\n )\n pts2 = pts1 + random_utils.uniform(-alpha_affine, alpha_affine, size=pts1.shape, random_state=random_state).astype(\n np.float32\n )\n matrix = cv2.getAffineTransform(pts1, pts2)\n\n warp_fn = _maybe_process_in_chunks(\n cv2.warpAffine, M=matrix, dsize=(width, height), flags=interpolation, borderMode=border_mode, borderValue=value\n )\n img = warp_fn(img)\n\n if approximate:\n # Approximate computation smooth displacement map with a large enough kernel.\n # On large images (512+) this is approximately 2X times faster\n dx = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dx, (17, 17), sigma, dst=dx)\n dx *= alpha\n if same_dxdy:\n # Speed up even more\n dy = dx\n else:\n dy = random_utils.rand(height, width, random_state=random_state).astype(np.float32) * 2 - 1\n cv2.GaussianBlur(dy, (17, 17), sigma, dst=dy)\n dy *= alpha\n else:\n dx = np.float32(\n gaussian_filter((random_utils.rand(height, width, random_state=random_state) * 2 - 1), sigma) * alpha\n )\n if same_dxdy:\n # Speed up\n dy = dx\n else:\n dy = np.float32(\n gaussian_filter((random_utils.rand(height, width, random_state=random_state) * 2 - 1), sigma) * alpha\n )\n\n x, y = np.meshgrid(np.arange(width), np.arange(height))\n\n map_x = np.float32(x + dx)\n map_y = np.float32(y + dy)\n\n remap_fn = _maybe_process_in_chunks(\n cv2.remap, map1=map_x, map2=map_y, interpolation=interpolation, borderMode=border_mode, borderValue=value\n )\n return remap_fn(img)", "def convert_affine(ref, t, out):\n args = [\n transform_exe,\n '-d', '3',\n '-r', ref,\n '-t', '[{},0]'.format(t),\n '-o', '[{},1]'.format(out),\n '--float'\n ]\n subprocess.check_call(args)", "def affine_trans(self):\n h, w, _ = self.img.shape\n\n \"\"\"\n pts1 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n pts2 = np.float32(\n [\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n [randint(0, rows), randint(0, cols)],\n ]\n )\n \"\"\"\n\n pts1 = np.float32([[50, 50], [200, 50], [50, 200]])\n pts2 = np.float32([[10, 100], [200, 50], [100, 250]])\n\n M = cv2.getAffineTransform(pts1, pts2)\n\n self.img = cv2.warpAffine(self.img, M, (w, h))\n\n self.edits.append(\"affine\")\n return self", "def affine_sigmoid(xin, m= 10, c= 3):\n if type(xin) != np.ndarray:\n x = np.array([xin])\n else:\n x = xin\n\n x = get_affine(x, m, c)\n output = get_sigmoid(x)\n\n if type(xin) != np.ndarray:\n return output[0]\n else:\n return output", "def model_1exp(a, t, s, t0=0):\n a, t, s = physicond(a, t, s)\n\n # auxilary function taking as argument a time array.\n def aux(array_t, details=False):\n\n # applying time offset\n t_var = array_t - t0\n\n # model expression coming from symbolic calculation\n pulse = np.heaviside(t_var, 1.)\n ind = np.where(pulse != 0.)\n pulse[ind] = a * (np.exp(-t_var[ind]/t)-np.exp(-t_var[ind]/s))\n\n # same behavior as model_2exp and model_3exp\n if details == True:\n return pulse, pulse\n elif details == False:\n return pulse\n\n return aux", "def _apply_transform(self, x, transform_parameters):\n # x is a single image, so it doesn't have image number at index 0\n img_row_axis = self.row_axis - 1\n img_col_axis = self.col_axis - 1\n img_channel_axis = self.channel_axis - 1\n\n x = apply_affine_transform(x, transform_parameters.get('theta', 0),\n transform_parameters.get('tx', 0),\n transform_parameters.get('ty', 0),\n transform_parameters.get('shear', 0),\n transform_parameters.get('zx', 1),\n transform_parameters.get('zy', 1),\n row_axis=img_row_axis,\n col_axis=img_col_axis,\n channel_axis=img_channel_axis,\n fill_mode=self.fill_mode,\n cval=self.cval)\n\n if transform_parameters.get('channel_shift_intensity') is not None:\n x = apply_channel_shift(x,\n transform_parameters['channel_shift_intensity'],\n img_channel_axis)\n\n if transform_parameters.get('flip_horizontal', False):\n x = self._flip_axis(x, img_col_axis)\n\n if transform_parameters.get('flip_vertical', False):\n x = self._flip_axis(x, img_row_axis)\n\n if transform_parameters.get('brightness') is not None:\n x = apply_brightness_shift(x, transform_parameters['brightness'])\n\n return x", "def elastic_transform(image, mask, alpha, sigma, alpha_affine,\n random_state=None):\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n shape = image.shape\n shape_size = shape[:2]\n\n # Random affine\n center_square = np.float32(shape_size) // 2\n square_size = min(shape_size) // 3\n pts1 = np.float32([center_square + square_size,\n [center_square[0] + square_size,\n center_square[1] - square_size],\n center_square - square_size])\n pts2 = pts1 + random_state.uniform(-alpha_affine, alpha_affine,\n size=pts1.shape).astype(np.float32)\n M = cv2.getAffineTransform(pts1, pts2)\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma) * alpha\n\n x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]),\n np.arange(shape[2]))\n indices = np.reshape(y + dy, (-1, 1)), np.reshape(\n x + dx, (-1, 1)), np.reshape(z, (-1, 1))\n\n image = cv2.warpAffine(image, M, shape_size[::-1],\n borderMode=cv2.BORDER_REFLECT_101)\n\n image = map_coordinates(image, indices, order=1,\n mode='reflect').reshape(shape)\n if mask is not None:\n mask = cv2.warpAffine(mask, M, shape_size[::-1],\n borderMode=cv2.BORDER_REFLECT_101)\n mask = map_coordinates(\n mask, indices, order=1, mode='reflect'\n ).reshape(shape)\n\n return image, mask", "def get_affine_matrix2d(\n translations: Tensor,\n center: Tensor,\n scale: Tensor,\n angle: Tensor,\n sx: Tensor | None = None,\n sy: Tensor | None = None,\n) -> Tensor:\n transform: Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def get_affine_matrix2d(\n translations: torch.Tensor,\n center: torch.Tensor,\n scale: torch.Tensor,\n angle: torch.Tensor,\n sx: Optional[torch.Tensor] = None,\n sy: Optional[torch.Tensor] = None,\n) -> torch.Tensor:\n transform: torch.Tensor = get_rotation_matrix2d(center, -angle, scale)\n transform[..., 2] += translations # tx/ty\n\n # pad transform to get Bx3x3\n transform_h = convert_affinematrix_to_homography(transform)\n\n if any(s is not None for s in [sx, sy]):\n shear_mat = get_shear_matrix2d(center, sx, sy)\n transform_h = transform_h @ shear_mat\n\n return transform_h", "def elastic_transform(image, alpha=1000, sigma=30, spline_order=1, mode='nearest', random_state=np.random):\n assert image.ndim == 2\n shape = image.shape[:2]\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma, mode=\"constant\", cval=0) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1),\n sigma, mode=\"constant\", cval=0) * alpha\n\n x, y = np.meshgrid(np.arange(shape[0]), np.arange(shape[1]), indexing='ij')\n indices = [np.reshape(x + dx, (-1, 1)), np.reshape(y + dy, (-1, 1))]\n result = map_coordinates(\n image, indices, order=spline_order, mode=mode).reshape(shape)\n return result", "def elastic_transform(image, alpha_param, sigma_param, alpha_affine_param, random_state=None):\n image = np.asarray(image)\n #im_np = im_np.transpose(1,0,2) # when we go from pil to numpy array the W and L dimensions are swaped\n\n if len(image.shape) < 3: # if there is less than 3 channels (black&white) we triplicate the image\n image = np.concatenate((image[:,:, np.newaxis], image[:,:, np.newaxis], image[:,:, np.newaxis]), axis = 2)\n\n alpha = image.shape[1] * alpha_param\n sigma = image.shape[1] * sigma_param\n alpha_affine = image.shape[1] * alpha_affine_param\n\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n shape = image.shape\n shape_size = shape[:2]\n\n # Random affine\n center_square = np.float32(shape_size) // 2\n square_size = min(shape_size) // 3\n pts1 = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])\n pts2 = pts1 + random_state.uniform(-alpha_affine, alpha_affine, size=pts1.shape).astype(np.float32)\n M = cv2.getAffineTransform(pts1, pts2)\n image = cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REFLECT_101)\n\n dx = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dy = gaussian_filter((random_state.rand(*shape) * 2 - 1), sigma) * alpha\n dz = np.zeros_like(dx)\n\n x, y, z = np.meshgrid(np.arange(shape[1]), np.arange(shape[0]), np.arange(shape[2]))\n indices = np.reshape(y+dy, (-1, 1)), np.reshape(x+dx, (-1, 1)), np.reshape(z, (-1, 1))\n\n im_elastic = map_coordinates(image, indices, order=1, mode='reflect').reshape(shape)\n\n return Image.fromarray(np.uint8(im_elastic))", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='nearest',\n cval=0.,\n interp_order=0):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:3, :3]\n final_offset = transform_matrix[:3, -1]\n channel_volumes = [ndi.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=interp_order, # NOTE: The order of the spline interpolation\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_volumes, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def affine(self, image, random_state):\n shape = image.shape\n shape_size = shape[:2]\n transform_std = self.alpha_affine*image.shape[1]\n\n center_square = np.float32(shape[:2]) // 2\n square_size = min(shape[:2]) // 3\n\n source = np.float32([center_square + square_size, [center_square[0]+square_size, center_square[1]-square_size], center_square - square_size])\n destination = source + random_state.uniform(-transform_std, transform_std, size=source.shape).astype(np.float32)\n M = cv2.getAffineTransform(source, destination)\n\n return cv2.warpAffine(image, M, shape_size[::-1], borderMode=cv2.BORDER_REPLICATE)", "def compute_e(f_mat, m_mat):\r\n return m_mat.T @ f_mat @ m_mat", "def elasticTransform_legacy(image, mask, sigma, alpha_affine, random_seed=None):\n\t\n\trandom_state = np.random.RandomState(random_seed)\n\t\n\tif len(image.shape)<3:\n\t\timage = np.expand_dims(image,-1)\n\tif len(mask.shape)<3:\n\t\tmask = np.expand_dims(mask,-1)\n\t\n\tshape = image.shape\n\tshape_size = shape[:2]\n\n\t# Random affine\n\tcenter_square = np.float32(shape_size) // 2\n\tsquare_size = min(shape_size) // 3\n\tpts1 = np.float32([center_square + square_size, [center_square[0] + square_size,center_square[1] - square_size], center_square - square_size])\n\tpts2 = pts1 + random_state.uniform(-alpha_affine,alpha_affine, size=pts1.shape).astype(np.float32)\n\t\n\tM = cv2.getAffineTransform(pts1, pts2)\n\t\n\timage_w = np.zeros_like(image)\n\tfor i in range(image.shape[-1]):\n\t\timage_w[...,i] = cv2.warpAffine(image[...,i], M, shape_size[::-1], borderMode=cv2.BORDER_CONSTANT, borderValue=int(np.min(image[...,i])))\n\t\n\tmask_w = np.zeros_like(mask)\n\tfor i in range(mask.shape[-1]):\n\t\tmask_w[...,i] = cv2.warpAffine(mask[...,i] , M, shape_size[::-1], borderMode=cv2.BORDER_REPLICATE)\n\n\tblur_size = int(2*sigma) | 1\n\tdx = cv2.GaussianBlur((random_state.rand(*shape) * 2 - 1), ksize=(blur_size, blur_size), sigmaX=sigma)\n\tdy = cv2.GaussianBlur((random_state.rand(*shape) * 2 - 1), ksize=(blur_size, blur_size), sigmaX=sigma)\n\t\n\tif len(dx.shape) < 3:\n\t\tdx = np.expand_dims(dx,-1)\n\t\tdy = np.expand_dims(dy,-1)\n\n\tgx, gy = np.meshgrid(np.arange(shape_size[1]), np.arange(shape_size[0]))\n\tgx = np.expand_dims(gx,-1)\n\tgy = np.expand_dims(gy,-1)\n\n\tgx = np.repeat(gx,dx.shape[-1], -1)\n\tgy = np.repeat(gy,dy.shape[-1], -1)\n\n\tgx = (gx + dx).astype(np.float32)\n\tgy = (gy + dy).astype(np.float32)\n\n\timage_d = np.zeros_like(image_w)\n\tmask_d = np.zeros_like(mask_w)\n\n\tfor i in range(image.shape[-1]):\n\t\timage_d[...,i] = cv2.remap(image_w[...,i], gx[...,i], gy[...,i], interpolation=cv2.INTER_LINEAR)\n\t\n\tradix = gx.shape[-1]\n\tfor i in range(mask.shape[-1]):\n\t\tmask_d[...,i] = cv2.remap(mask_w[...,i], gx[...,i%radix], gy[...,i%radix], interpolation=cv2.INTER_LINEAR)\n\t\n\treturn image_d, mask_d", "def get_affine_transform(gps_coords, pdr_coords):\n # Compute similarity Xp = s A X + b\n X = np.array(pdr_coords)\n Xp = np.array(gps_coords)\n T = tf.superimposition_matrix(X.T, Xp.T, scale=True)\n\n A, b = T[:3, :3], T[:3, 3]\n s = np.linalg.det(A)**(1. / 3)\n A /= s\n return s, A, b", "def affine_2Dtransform(img, t_mat, height, width, h_offset=0, w_offset=0, nh_flag=False, nw_flag=False):\n # transform matrix must be validated\n if(np.shape(t_mat) != (2, 2)):\n return img\n\n # implementing matrix multiplication to a default map of source data in order to apply transform\n # and to achieve coordination/location of transformed matrix according to source data(data map)\n coord_map = transform_calcualtion(\n height, width, t_mat, h_offset, w_offset, nh_flag, nw_flag)\n\n # transformed image data construction\n t_img = np.full((height+h_offset, width+w_offset, 3), 255, dtype='uint8')\n\n # applying new map to image inorder to complete the transform\n try:\n for i in range(height):\n for j in range(width):\n [i_new_coord, j_new_coord] = coord_map[i, j, :]\n # unhandled bound-jumpout\n t_img[i_new_coord, j_new_coord, :] = img[i, j, :]\n except:\n print(\"not enough offset/negative coordination pushed\")\n return img\n return t_img", "def get_transform(ds):\n\n if 'transform' in ds.attrs:\n ds_trans = ds.attrs['transform']\n if isinstance(ds_trans, Affine):\n return ds_trans\n else:\n return Affine(*ds_trans)\n\n elif 'crs' in ds.data_vars and 'i2m' in ds.data_vars['crs'].attrs:\n transf_str = ds.data_vars['crs'].attrs['i2m']\n a = list(map(float, transf_str.split(',')))\n return Affine(a[0], a[2], a[4], a[1], a[3], a[5])\n\n else:\n resx, resy = get_resolution(ds)\n xoff = ds['x'].values.min()\n yoff = ds['y'].values.max()\n return Affine(resx, 0, xoff, 0, resy, yoff)", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='constant',\n cval=0.):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [\n interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=0,\n mode=fill_mode,\n cval=cval) for x_channel in x\n ]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='constant',\n cval=0.):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [\n interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=0,\n mode=fill_mode,\n cval=cval) for x_channel in x\n ]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def softmax(x):\n if type(x) == list:\n dim=len(x)\n norm = np.sum(np.exp(x))\n for idx in range(dim):\n x[idx] = np.exp(x[idx])/norm\n elif type(x) == np.ndarray:\n dim=x.shape\n for col in range(dim[1]):\n norm = np.sum(np.exp(x[:, col]))\n for idx in range(dim[0]):\n x[idx, col] = np.exp(x[idx, col])/norm\n else:\n raise Exception('incorrect input')\n return x", "def affineSchur(self):\n return AffineSchurFunctions(self)", "def seToSE( x ):\n x = asarray(x,dtype=float)\n if x.shape != (6,):\n raise ValueError(\"shape must be (6,); got %s\" % str(x.shape))\n #\n return expM(screw(x))", "def affine_sigmoid_forward(x, w, b):\n a, fc_cache = affine_forward(x, w, b)\n out, sigmoid_cache = sigmoid_forward(a)\n cache = (fc_cache, sigmoid_cache)\n return out, cache", "def augmentAffine(img_in, seg_in, strength=0.05):\n B,C,D,H,W = img_in.size()\n affine_matrix = (torch.eye(3,4).unsqueeze(0) + torch.randn(B, 3, 4) * strength).to(img_in.device)\n\n meshgrid = F.affine_grid(affine_matrix,torch.Size((B,1,D,H,W)))\n\n img_out = F.grid_sample(img_in, meshgrid,padding_mode='border')\n seg_out = F.grid_sample(seg_in.float().unsqueeze(1), meshgrid, mode='nearest').long().squeeze(1)\n\n return img_out, seg_out", "def func_exp(x, a, b, c):\n return a * np.exp(b * x) + c", "def _eigen_fns(mat, fns):\n evals, evecs = _eigh(mat)\n\n def transform(fn):\n \"\"\"Generates a transform given a function on the eigenvalues.\"\"\"\n def _(vec, dt):\n return np.einsum(\n 'ji,i,ki,k...->j...',\n evecs, fn(evals, dt), evecs, vec, optimize=True)\n\n return _\n\n return tuple(transform(fn) for fn in fns)", "def elastic_transform(self, image, random_state=None):\n if random_state is None:\n random_state = np.random.RandomState(None)\n\n image = self.affine(image, random_state)\n #from ipdb import set_trace; set_trace()\n indices = self.stretch_indices(image, random_state)\n\n return map_coordinates(image, indices, order=1, mode='reflect').reshape(image.shape)", "def fused_elemwise_activation(x,\n y,\n functor_list,\n axis=-1,\n scale=0.0,\n save_intermediate_out=True):\n if isinstance(functor_list, str):\n functor_list = functor_list.split(',')\n\n if not isinstance(functor_list, list) or len(functor_list) != 2:\n raise ValueError(\n 'functor_list should be a list of str, and the length should be 2.')\n\n helper = LayerHelper('fused_elemwise_activation', **locals())\n out = helper.create_variable_for_type_inference(dtype=x.dtype)\n intermediate_out = helper.create_variable_for_type_inference(dtype=x.dtype)\n helper.append_op(\n type='fused_elemwise_activation',\n inputs={'X': x,\n 'Y': y},\n outputs={'Out': out,\n 'IntermediateOut': intermediate_out},\n attrs={\n 'axis': axis,\n 'scale': scale,\n 'save_intermediate_out': save_intermediate_out,\n 'functor_list': functor_list\n })\n return out", "def _transform(\n self, x: \"torch.Tensor\", y: Optional[\"torch.Tensor\"], **kwargs\n ) -> Tuple[\"torch.Tensor\", Optional[\"torch.Tensor\"]]:\n import torch\n import torchvision.transforms.functional as F\n\n img_size = x.shape[:2]\n\n angle = float(\n torch.empty(1)\n .uniform_(float(self.degree_range[0]), float(self.degree_range[1]))\n .item()\n )\n\n max_dx = float(self.translate[0] * img_size[1])\n max_dy = float(self.translate[1] * img_size[0])\n tx = int(round(torch.empty(1).uniform_(-max_dx, max_dx).item()))\n ty = int(round(torch.empty(1).uniform_(-max_dy, max_dy).item()))\n translations = (tx, ty)\n\n scale = float(torch.empty(1).uniform_(self.scale[0], self.scale[1]).item())\n\n # x needs to have channel first\n x = x.permute(2, 0, 1)\n x = F.affine(\n img=x, angle=angle, translate=translations, scale=scale, shear=(0.0, 0.0)\n )\n x = x.permute(1, 2, 0)\n\n return torch.clamp(x, min=self.clip_values[0], max=self.clip_values[1]), y", "def gen_affine_map(Ab, img_sz, dim=3):\n Ab = Ab.view(Ab.shape[0], dim+1, dim)\n phi = gen_identity_map(img_sz).to(Ab.device)\n phi_cp = phi.view(dim, -1)\n affine_map = torch.matmul(Ab[:, :dim, :], phi_cp)\n affine_map = Ab[:, dim, :].contiguous().view(-1, dim, 1) + affine_map\n affine_map = affine_map.view([Ab.shape[0]] + list(phi.shape))\n return affine_map", "def transpose_as_einsum(x: JaxExpression, params: Params) -> Einsum:\n x_ndim = len(x.shape)\n x_dims = ''.join(it.islice(einsum.einsum_letters(), x_ndim))\n out_dims = ''.join([x_dims[dim] for dim in params['permutation']])\n return Einsum(f'{x_dims}->{out_dims}', (x,))", "def elastic_transform(X, min_alpha=36, max_alpha=38, min_sigma=5, max_sigma=6, random_state=None, n_jobs=1):\n if random_state is None:\n rng = np.random\n else:\n rng = np.random.RandomState(random_state)\n alphas = rng.uniform(min_alpha, max_alpha, size=X.shape[0])\n sigmas = rng.uniform(min_sigma, max_sigma, size=X.shape[0])\n X_elas = Parallel(n_jobs=n_jobs)(delayed(elastic_transform_one)(X[i], alphas[i], sigmas[i]) for i in range(X.shape[0]))\n return np.array(X_elas, dtype='float32')", "def get_affine_transform(center, scale, rot, output_size, shift=(0.0, 0.0), inv=False):\n assert len(center) == 2\n assert len(scale) == 2\n assert len(output_size) == 2\n assert len(shift) == 2\n scale_tmp = scale * 200.0\n shift = np.array(shift)\n src_w = scale_tmp[0]\n dst_w = output_size[0]\n dst_h = output_size[1]\n rot_rad = np.pi * rot / 180\n src_dir = rotate_point([0.0, src_w * -0.5], rot_rad)\n dst_dir = np.array([0.0, dst_w * -0.5])\n src = np.zeros((3, 2), dtype=np.float32)\n src[0, :] = center + scale_tmp * shift\n src[1, :] = center + src_dir + scale_tmp * shift\n src[2, :] = _get_3rd_point(src[0, :], src[1, :])\n dst = np.zeros((3, 2), dtype=np.float32)\n dst[0, :] = [dst_w * 0.5, dst_h * 0.5]\n dst[1, :] = np.array([dst_w * 0.5, dst_h * 0.5]) + dst_dir\n dst[2, :] = _get_3rd_point(dst[0, :], dst[1, :])\n if inv:\n trans = cv2.getAffineTransform(np.float32(dst), np.float32(src))\n else:\n trans = cv2.getAffineTransform(np.float32(src), np.float32(dst))\n return trans", "def quantize_affine_given_quant_params(\n input: torch.Tensor,\n quantize_params: QuantizeAffineParams2,\n) -> torch.Tensor:\n return QuantizeAffineFunction.apply(input, quantize_params)", "def apply_transform(x, transform_matrix, channel_index=0, fill_mode='nearest', cval=0.):\n x = np.rollaxis(x, channel_index, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [ndi.interpolation.affine_transform(x_channel,\n final_affine_matrix,\n final_offset, order=0, mode=fill_mode, cval=cval)\n for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_index+1)\n return x", "def affineTransform(img, pts, newPts):\n\ttmp = img.copy()\n\tif len(img.shape) is 3:\n\t\trows, cols, ch = img.shape\n\telse:\n\t\trows, cols = img.shape\n\tpts1 = np.float32(pts)\n\tpts2 = np.float32(newPts)\n\tM = cv2.getAffineTransform(pts1, pts2)\n\tdst = cv2.warpAffine(tmp, M, (cols, rows))\n\treturn dst", "def transform(self, x):\n return self._transform_eig(x)", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='nearest',\n cval=0.):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [ndi.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=1,\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def singleexp(params, t):\n # 2011-05-18 20:58 IJMC: Created\n # 2011-06-03 11:49 IJMC: Normalized to unity.\n\n if len(params)==2:\n return 1. - params[0] * exp(-t/params[1]) \n else:\n return params[2] * (1. - params[0] * exp(-t/params[1]) )", "def apply_transform(x,\n transform_matrix,\n channel_axis=0,\n fill_mode='nearest',\n cval=0.):\n x = np.rollaxis(x, channel_axis, 0)\n final_affine_matrix = transform_matrix[:2, :2]\n final_offset = transform_matrix[:2, 2]\n channel_images = [ndi.interpolation.affine_transform(\n x_channel,\n final_affine_matrix,\n final_offset,\n order=0,\n mode=fill_mode,\n cval=cval) for x_channel in x]\n x = np.stack(channel_images, axis=0)\n x = np.rollaxis(x, 0, channel_axis + 1)\n return x", "def affine(img, matrix, interpolation=\"nearest\", fill=None, data_format='CHW'):\n ndim = len(img.shape)\n if ndim == 3:\n img = img.unsqueeze(0)\n\n img = img if data_format.lower() == 'chw' else img.transpose((0, 3, 1, 2))\n\n matrix = paddle.to_tensor(matrix, place=img.place)\n matrix = matrix.reshape((1, 2, 3))\n shape = img.shape\n\n grid = _affine_grid(\n matrix, w=shape[-1], h=shape[-2], ow=shape[-1], oh=shape[-2]\n )\n\n if isinstance(fill, int):\n fill = tuple([fill] * 3)\n\n out = _grid_transform(img, grid, mode=interpolation, fill=fill)\n\n out = out if data_format.lower() == 'chw' else out.transpose((0, 2, 3, 1))\n out = out.squeeze(0) if ndim == 3 else out\n\n return out", "def exp2eval(self, p, x, y=None, C=None, sumsq=False, weights=None):\n yd = p[0] + (p[1] * (1.0 - numpy.exp(-x / p[2])) ** 2.0) + \\\n (p[3] * (1.0 - numpy.exp(-x / p[4])))\n if y is None:\n return yd\n else:\n if sumsq is True:\n ss = numpy.sqrt(numpy.sum((y - yd) ** 2.0))\n # if p[4] < 3.0*p[2]:\n # ss = ss*1e6 # penalize them being too close\n return ss\n else:\n return y - yd\n\n # @autojit", "def map(self,Affine,i):\n map_x = np.zeros([self.num,self.d])\n for k in range(self.num):\n map_x[k,:] = Affine.apply(i,self.pick(k))\n Mapped = Model_Points(map_x)\n return Mapped", "def apply_transform(img,\n transform_matrix):\n rows,cols = img.shape[:2]\n dst = cv2.warpAffine(img,transform_matrix,(cols,rows))\n\n\n return dst", "def affine_transform_2d(v, mapping, alpha = 1):\r\n p_wgt = vec2(0, 0)\r\n q_wgt = vec2(0, 0)\r\n w = len(mapping)*[None]\r\n w_sum = 0\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n x = mp[0].x - v.x\r\n y = mp[0].y - v.y\r\n if (x == 0 and y == 0): return mp[1]\r\n w[i] = 1/((x*x + y*y) ** alpha)\r\n p_wgt += mp[0]*w[i]\r\n q_wgt += mp[1]*w[i]\r\n w_sum += w[i]\r\n p_wgt /= w_sum\r\n q_wgt /= w_sum\r\n M1 = mat2(0)\r\n M2 = mat2(0)\r\n for i in range(len(mapping)):\r\n mp = mapping[i]\r\n p_adj = mp[0] - p_wgt\r\n q_adj = mp[1] - q_wgt\r\n M1 += p_adj.transpose_multiply(p_adj)*w[i]\r\n M2 += p_adj.transpose_multiply(q_adj)*w[i]\r\n M1 = M1.inverse()\r\n M = M1*M2\r\n M = M.transpose()\r\n v_out = M*(v - p_wgt) + q_wgt\r\n return v_out", "def transforms_multiply(t0s, t1s):\r\n \r\n return ut.matrix_multiply(t0s, t1s)", "def expms(A, eig=np.linalg.eigh):\r\n # TODO: check that this works reliably for low rank matrices\r\n # first: symmetrize A\r\n D, B = eig(A)\r\n return np.dot(B, (np.exp(D) * B).T)", "def expm1_inplace(a):", "def softmax(x, axis=1):\n sf = np.exp(x)\n sf = sf/np.sum(sf, axis=axis)[:,np.newaxis]\n return sf", "def fit_transform(self, Xs, y=None):\n return self.fit(Xs, y).transform(Xs)", "def attrTransform(self, matrix, transform):\n for ttype, targs in self.reTransformFind.findall(transform):\n targs = list(map(lambda x: float(x), self.reNumberFind.findall(targs)))\n if ttype == 'matrix':\n newmatrix = [ targs[0], targs[1],\n targs[2], targs[3],\n targs[4], targs[5] ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'translate':\n tx = targs[0]\n ty = targs[1] if len(targs) > 1 else 0\n newmatrix = [ 1, 0, 0, 1, tx, ty ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'scale':\n sx = targs[0]\n sy = targs[1] if len(targs) > 1 else sx\n newmatrix = [ sx, 0, 0, sy, 0, 0 ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'rotate':\n if len(targs) == 1:\n alpha = targs[0]\n newmatrix = [ math.cos(alpha), math.sin(alpha),\n -math.sin(alpha), math.cos(alpha),\n 0, 0]\n self.matrixMul(matrix, newmatrix)\n else:\n alpha = targs[0]\n newmatrix = [ 1, 0, 0, 1, targs[1], targs[2] ]\n self.matrixMul(matrix, newmatrix)\n newmatrix = [ math.cos(alpha), math.sin(alpha),\n -math.sin(alpha), math.cos(alpha),\n 0, 0]\n self.matrixMul(matrix, newmatrix)\n newmatrix = [ 1, 0, 0, 1, -targs[1], -targs[2] ]\n self.matrixMul(matrix, newmatrix)\n elif ttype == 'skewX' or ttype == 'skewY':\n self.alert(\"skewX and skewY transformations are not supported\", elem)\n else:\n print('unknown transform type: ', ttype)\n return matrix", "def setrans(Bi, t):\n\n x,v=mat2set(Bi)\n Bo = set2mat((x+t,v))\n Bo = Bo.astype(Bi.dtype)\n return Bo", "def affine(img, angle=0, translate=(0, 0), scale=1, shear=0, resample='BILINEAR', fillcolor=(0,0,0)):\n if not _is_numpy_image(img):\n raise TypeError('img should be CV Image. Got {}'.format(type(img)))\n\n assert isinstance(translate, (tuple, list)) and len(translate) == 2, \\\n \"Argument translate should be a list or tuple of length 2\"\n\n assert scale > 0.0, \"Argument scale should be positive\"\n\n rows, cols, _ = img.shape\n center = (cols * 0.5, rows * 0.5)\n angle = math.radians(angle)\n shear = math.radians(shear)\n M00 = math.cos(angle)*scale\n M01 = -math.sin(angle+shear)*scale\n M10 = math.sin(angle)*scale\n M11 = math.cos(angle+shear)*scale\n M02 = center[0] - center[0]*M00 - center[1]*M01 + translate[0]\n M12 = center[1] - center[0]*M10 - center[1]*M11 + translate[1]\n affine_matrix = np.array([[M00, M01, M02], [M10, M11, M12]], dtype=np.float32)\n dst_img = cv2.warpAffine(img, affine_matrix, (cols, rows), flags=INTER_MODE[resample],\n borderMode=cv2.BORDER_CONSTANT, borderValue=fillcolor)\n return dst_img", "def expIP(self):\n np.exp(self.t, out=self.t)\n return self", "def translateEuler(trans):\n return np.array([[1,0,0,trans[0]],[0,1,0,trans[1]],[0,0,1,trans[2]],[0,0,0,1]])", "def transform(self, x:generic_array, dense=True) -> generic_array:\n if type(x) == np.ndarray and not dense:\n warnings.warn(\"For Numpy transform it is best to use dense=True\")\n \n K_nq = self._pairwise_kernels(x, self.components_, dense=dense)\n x_new = K_nq @ self.normalization\n return x_new", "def analytic(x, t, D, x0, xend, logx=False, c_s=1, use_log2=False):\n import scipy.special\n if t.ndim == 1:\n t = t.reshape((t.size, 1))\n expb = (lambda arg: 2**arg) if use_log2 else np.exp\n x = expb(x) if logx else x\n return c_s * scipy.special.erfc(x/(2*(D*t)**0.5))", "def testTranslateAffine(self):\n affineClass = xyTransformRegistry[\"affine\"]\n affineConfig = affineClass.ConfigClass()\n affineConfig.translation = (1.2, -3.4)\n with lsst.utils.tests.getTempFilePath(\".py\") as filePath:\n self.checkConfig(affineClass, affineConfig, filePath)\n affine = affineClass(affineConfig)\n for fromPoint in self.fromIter():\n toPoint = affine.forwardTransform(fromPoint)\n predToPoint = fromPoint + Extent2D(*affineConfig.translation)\n for i in range(2):\n self.assertAlmostEqual(toPoint[i], predToPoint[i])", "def translateEuler(self,trans):\n return np.array([[1,0,0,trans[0]],[0,1,0,trans[1]],[0,0,1,trans[2]],[0,0,0,1]])", "def softmax(x):\n sf = np.exp(x)\n sf = sf / np.sum(sf, axis=0)\n return sf", "def softmax(x):\n sf = np.exp(x)\n sf = sf / np.sum(sf, axis=0)\n return sf" ]
[ "0.83168423", "0.6524651", "0.6448697", "0.6208682", "0.6185584", "0.60859853", "0.60077345", "0.6005692", "0.6002416", "0.585732", "0.5842322", "0.57764435", "0.5741281", "0.5718799", "0.57156426", "0.57109356", "0.5676722", "0.56560165", "0.5641633", "0.56377053", "0.56344235", "0.5631245", "0.56272626", "0.56263775", "0.5614629", "0.55554545", "0.5553882", "0.5552548", "0.55399966", "0.55360585", "0.5484923", "0.5471292", "0.54649603", "0.5416288", "0.5415288", "0.5394227", "0.5341808", "0.533065", "0.5316358", "0.5300863", "0.5298905", "0.52985513", "0.52416044", "0.5240841", "0.5236045", "0.52254987", "0.5216815", "0.52064705", "0.52002156", "0.51893497", "0.5172894", "0.5152543", "0.51522845", "0.51458055", "0.5132458", "0.5124474", "0.51092124", "0.51092124", "0.5107944", "0.51038617", "0.50998217", "0.50988835", "0.5096238", "0.5093721", "0.5076671", "0.50761884", "0.5075886", "0.50730413", "0.5069762", "0.50534934", "0.5047299", "0.50407267", "0.5039684", "0.503877", "0.50288457", "0.502724", "0.5020765", "0.50171196", "0.5015357", "0.5012609", "0.50116163", "0.5007974", "0.50024426", "0.4995894", "0.4971688", "0.49713078", "0.49635506", "0.49580705", "0.4956665", "0.4940032", "0.49307853", "0.4921124", "0.4918872", "0.49187765", "0.49152216", "0.4911592", "0.49082", "0.49071148", "0.49047735", "0.49047735" ]
0.77893174
1
Softmax loss function, naive implementation (with loops) Inputs have dimension D, there are C classes, and we operate on minibatches of N examples.
def softmax_loss_naive(W, X, y, reg): # Initialize the loss and gradient to zero. loss = 0.0 dW = np.zeros_like(W) ############################################################################# # TODO: Compute the softmax loss and its gradient using explicit loops. # # Store the loss in loss and the gradient in dW. If you are not careful # # here, it is easy to run into numeric instability. Don't forget the # # regularization! # ############################################################################# num_train = X.shape[0] num_classes = W.shape[1] # Calculate loss for each example f = np.zeros((num_train, num_classes)) f_max = np.zeros((num_train, 1)) for i in xrange(num_train): for j in xrange(num_classes): f[i, j] = np.dot(X[i, :], W[:, j]) if f[i, j] > f_max[i]: f_max[i] = f[i, j] exp_f = np.zeros_like(f) sum_exp_f = np.zeros((num_train, 1)) for i in xrange(num_train): for j in xrange(num_classes): f[i, j] -= f_max[i] exp_f[i, j] = math.exp(f[i, j]) sum_exp_f[i] += exp_f[i, j] for i in xrange(num_train): loss += -math.log(exp_f[i, y[i]] / sum_exp_f[i]) loss /= num_train # Calculate regularization term reg_term = 0.0 for i in xrange(W.shape[0]): for j in xrange(W.shape[1]): reg_term += W[i, j]**2 loss += reg * reg_term # Calculate gradient P = np.zeros((num_train, num_classes)) for i in xrange(num_train): for j in xrange(num_classes): P[i, j] = exp_f[i, j] / sum_exp_f[i] P[i, y[i]] -= 1 for i in xrange(dW.shape[0]): for j in xrange(dW.shape[1]): dW[i, j] = 1 / num_train * np.dot(X[:, i].T, P[:, j]) ############################################################################# # END OF YOUR CODE # ############################################################################# return loss, dW
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def softmax_classifier(W, input, label, lamda):\n\n ############################################################################\n # TODO: Put your code here\n\n loss = 0.0\n num_train = input.shape[0]\n num_classes = W.shape[1]\n\n score = np.dot(input, W) # (N,C)\n prediction = np.argmax(score, axis=1)\n score -= np.max(score, axis=1, keepdims=True)\n\n # # cross entropy loss\n # # take exponent of the score and normalized with sum of all exponents.\n probs = np.exp(score) # (N,C)\n e_y = np.sum(np.multiply(probs,label), axis=1) # (N,) probability for correct class\n e_sum = np.sum(probs, axis=1) # (N,) sum of probability over all classes\n\n # implementation of loss equivalent l_i = -f_y_i + log sum_j(e^(f_j))\n # loss = np.sum(-np.log(e_y/e_sum)) # sum of -log across all samples.\n # loss /= num_train # average loss\n loss = np.sum(-1 * e_y) + np.sum(np.log(e_sum))\n loss /= num_train\n\n loss += lamda * np.sum(W * W) # regularization \n\n # Gradient\n delta_score = probs / e_sum.reshape(num_train,1) # (N,C)\n delta_score -= label # (NxC)\n gradient = np.dot(input.T, delta_score)\n gradient /= num_train\n gradient += lamda * 2 * W\n\n ############################################################################\n\n return loss, gradient, prediction", "def softmax(x):\n # (n_samples, n_classes)\n if len(x.shape) == 2:\n row_max = np.max(x, axis=1)\n x -= row_max.reshape((x.shape[0], 1))\n x = np.exp(x)\n row_sum = np.sum(x, axis=1)\n x /= row_sum.reshape((x.shape[0], 1))\n # (n_samples, n_tasks, n_classes)\n elif len(x.shape) == 3:\n row_max = np.max(x, axis=2)\n x -= row_max.reshape(x.shape[:2] + (1,))\n x = np.exp(x)\n row_sum = np.sum(x, axis=2)\n x /= row_sum.reshape(x.shape[:2] + (1,))\n return x", "def softmax_loss_naive(W, X, y, reg):\r\n # Initialize the loss and gradient to zero.\r\n loss = 0.0\r\n dW = np.zeros_like(W)\r\n num_train = X.shape[1] # d*n\r\n num_class = W.shape[0]\r\n\r\n #############################################################################\r\n # Compute the softmax loss and its gradient using explicit loops. #\r\n # Store the loss in loss and the gradient in dW. If you are not careful #\r\n # here, it is easy to run into numeric instability. Don't forget the #\r\n # regularization! #\r\n #############################################################################\r\n loss = 0.0\r\n for i in range(num_train):\r\n X_i = X[:,i] # D*1\r\n score_i = W.dot(X_i)\r\n score_i -= np.max(score_i) #C*1 but keepdims = false so it becomes 1*C\r\n exp_score_i = np.exp(score_i)\r\n probs_i = exp_score_i/np.sum(exp_score_i) #1*C\r\n correct_logprobs_i = -np.log(probs_i[y[i]])\r\n loss += correct_logprobs_i\r\n \r\n dscore_i = probs_i.reshape(num_class,-1)#c*1\r\n dscore_i[y[i]] -= 1 #C*1\r\n X_i = X_i.reshape(1,-1)# 1*D\r\n dW += dscore_i.dot(X_i)\r\n \r\n loss /= num_train\r\n loss += 0.5*reg*np.sum(W*W)\r\n\r\n dW /= num_train\r\n dW += reg*W\r\n \r\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n num_train = X.shape[0]\n # print(\"num_train:\", num_train)\n num_classes = W.shape[1]\n # print(\"num_classes:\", num_classes)\n \n for i in range(num_train):\n scores = X[i].dot(W) # scores is 1 * C\n correct_class = y[i]\n \n # LOSS DUE TO TRAINING SAMPLE = -log(exp^correct_score / sum(exp^all_other_scores))\n log_c = np.max(scores)\n scores -= log_c\n correct_class_score = scores[correct_class]\n exp_scores = np.exp(scores)\n sum_exp_scores = np.sum(np.exp(scores))\n proportion = np.exp(correct_class_score) / sum_exp_scores\n loss -= np.log(proportion)\n # print(proportion)\n \n # ALTERNATIVELY: (we split the log)\n# loss -= scores[y[i]]\n# loss += np.log(np.sum(np.exp(X[i].dot(W))))\n \n # UPDATE GRADIENT\n for j in range(num_classes):\n p = np.exp(scores[j]) / sum_exp_scores # \"probability\" of class j\n dW[:,j] += (p - (j == y[i])) * X[i,:]\n # dW is D by C\n\n loss /= num_train\n loss += reg * np.sum(W * W) \n dW /= num_train\n dW += reg * 2 * W\n\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n \n return loss, dW", "def softmax_loss(x, y):\n # softmax\n num = np.exp(x)\n den = np.sum(num, axis=1)\n softmax = num/den[:, None]\n N = x.shape[0]\n\n # compute the los per class\n loss = softmax[np.arange(N), y]\n loss = -np.log(loss)\n\n # sum all the losses and divide by number of class\n # Also add the regularization loss term\n loss = np.sum(loss)/N \n \n dscores = softmax\n dscores[np.arange(N), y] -= 1\n dscores /= N\n\n return loss, dscores", "def SoftmaxLayer(inputs, n_classes):\n\n l = Conv2DLayer(inputs, n_classes, filter_size=1, nonlinearity=linear, W=HeUniform(gain='relu'), pad='same',\n flip_filters=False, stride=1)\n\n # We perform the softmax nonlinearity in 2 steps :\n # 1. Reshape from (batch_size, n_classes, n_rows, n_cols) to (batch_size * n_rows * n_cols, n_classes)\n # 2. Apply softmax\n\n l = DimshuffleLayer(l, (0, 2, 3, 1))\n batch_size, n_rows, n_cols, _ = get_output(l).shape\n l = ReshapeLayer(l, (batch_size * n_rows * n_cols, n_classes))\n l = NonlinearityLayer(l, softmax)\n\n return l\n\n # Note : we also tried to apply deep supervision using intermediate outputs at lower resolutions but didn't see\n # any improvements. Our guess is that FC-DenseNet naturally permits this multiscale approach", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_train = X.shape[0]\n num_classe = W.shape[1]\n loss = 0.0\n\n for i in range(num_train): #pour chaque image de l'ensemble d'entrainement\n scores = X[i].dot(W)\n scores -= max(scores)\n\n correct_class_score = scores[y[i]] #y[i]=c\n e_syi = np.exp(correct_class_score)\n e_sj = np.sum(np.exp(scores))\n\n loss -= np.log(e_syi/e_sj)\n\n for k in range(num_classe): #pour chaque classe\n dW[:, k] += ((np.exp(scores[k])/e_sj) - (k == y[i])) * X[i].T\n\n # Right now the loss is a sum over all training examples, but we want it\n # to be an average instead so we divide by num_train.\n loss /= num_train\n dW/= num_train\n\n # Add regularization to the loss.\n loss += reg * np.sum(W * W)\n dW += 2 * reg * W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train=X.shape[0]\n num_class=W.shape[1]\n num_feature=X.shape[1]\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n for i in range(num_train):\n #W*Xi C*1\n x=np.exp(np.dot(W.T,X[i,:]))\n denominator=np.sum(x)\n numerator=x[y[i]]\n loss-=np.log(numerator/denominator)\n #numerator and denominator\n #for j in range(num_class):\n normalize_score=x/denominator\n nm=np.reshape(normalize_score, (num_class, 1))\n \n #CxD\n dscore=nm.dot(np.reshape(X[i,:],(1,num_feature)))\n #print(dscore.shape)\n\n dscore[y[i],:]-=X[i,:]\n dW+=dscore.T\n\n loss/=num_train\n dW = dW/num_train + reg*W\n #\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n (num_class, D), (D, num_train) = (W.shape, X.shape)\n class_scores = np.dot(W, X)\n \n # Subtract maximum unnormalized score from each set of class scores\n for i in range(num_train):\n max_class_score = np.max(class_scores[:, i])\n for j in range(num_class):\n class_scores[j, i] -= max_class_score\n \n # Compute softmax and update gradient\n for i in range(num_train):\n normalization_term = sum(np.exp(class_score) for class_score in class_scores[:, i])\n for j in range(num_class):\n class_scores[j, i] = np.exp(class_scores[j, i]) / normalization_term\n # Thanks again to MyHumbleSelf for making me examine this further and discover a bug in my derivation of the softmax gradient!\n dW[j] += (class_scores[j, i] - (j==y[i])) * X[:, i]\n \n # Compute cross-entropy errors and total loss from that\n losses = [np.log(class_scores[y[i], i]) for i in range(num_train)]\n loss = -sum(losses) / num_train\n\n # Add regularization to loss and normalize dW\n loss += 0.5 * reg * np.sum(W * W)\n dW /= num_train\n dW += reg * W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n # softmax P(Y=k|X=x_i) = e^{s_k}/∑e^{s_j} softmax loss = -log(softmax)\n num_train = X.shape[0]\n num_class = W.shape[1]\n for i in range(num_train):\n scores = X[i].dot(W) # get scores\n max_score = np.max(scores)\n scores -= max_score # 考虑数值计算稳定性 softmax = (e^s_c - max)/∑(e^s_j - max)\n correct_score = scores[y[i]] # score_correct\n P_ic = np.exp(correct_score)/np.sum(np.exp(scores))\n loss += -np.log(P_ic)\n for j in range(num_class):\n if j == y[i]:\n dW[:, j] += (P_ic - 1) * X[i].T\n else:\n P_ij = np.exp(scores[j])/np.sum(np.exp(scores))\n dW[:, j] += P_ij * X[i].T\n \n \n loss /= num_train\n loss += reg*np.sum(W*W)\n dW /= num_train\n dW += 2 * reg * W\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[0]\n num_class = W.shape[1]\n #scores = np.zeros(num_train,num_class)\n scores = X.dot(W)\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n for i in range(num_train):\n # compute Li\n fmax= np.max(scores[i])\n scores[i] -= fmax\n correct_class_score = scores[i,y[i]]\n M = np.exp(correct_class_score)/np.sum(np.exp(scores[i]))\n loss += -np.log(M)\n for j in range(num_class):\n N = np.exp(scores[i,j])/np.sum(np.exp(scores[i]))\n if j ==y[i]:\n dW[:,y[i]]+= (M-1)*X[i].T\n else:\n dW[:,j] += N*X[i].T \n loss /= num_train\n loss += reg*np.sum(W*W)\n dW /= num_train \n dW += 2*reg*W \n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(inputs):\n probs = np.exp(inputs)\n # print(probs.shape)\n # t = np.sum(probs, axis=0)\n # print(t.shape)\n\n probs /= np.sum(probs, axis=0)[np.newaxis,:]\n return probs", "def softmax(self, scores):\n\n\n # for each sample, for each class ,caclulate\n # np.exp(scores) : still (n_samples, n_classes)\n\n # axis = 1\n # a00, a01, a02 as a sinlge one to perfrom np_sum\n # which is the same sample \n # sum_exp : still (n_samples, 1)\n\n # softmax = (n_samples, n_classes) / (n_samples, 1) = (n_samples, n_classes) \n\n sum_exp = np.sum(np.exp(scores), axis=1, keepdims=True)\n softmax = np.exp(scores) / sum_exp\n \n return softmax", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n # print \"dW's shape\", dW.shape\n # compute the loss and the gradient\n num_classes = W.shape[1]\n num_train = X.shape[0]\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax.ipynb loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # For every training image\n for train_image in xrange(num_train):\n # Multiply the weights by the image to get the scores\n scores = X[train_image].dot(W)\n # print(scores)\n # And then get the correct score\n correct_label = y[train_image]\n correct_score = scores[correct_label]\n # TODO: Right up to here\n # And then get the score of every other classifier\n all_scores = np.sum(scores)\n # Add a normalizing factor for numeric stability\n normalizing_constant = np.max(scores)\n scores -= normalizing_constant\n correct_score -= normalizing_constant\n #Calculating the softmax values\n softmax = np.exp(correct_score)/np.sum(np.exp(scores))\n\n # print(\"Correct score softmax\",softmax)\n\n # And calculating the loss\n loss += -1*np.log(softmax)\n # print loss\n #TODO: Loss computation is also correct\n\n # And calculating the gradient\n\n # First, update the Weight matrix with the correct example's derivative\n dW[:,correct_label] += (softmax-1)*np.transpose(X[train_image])\n\n # Then do the same for the wrong cases\n incorrect_labels = [x for x in xrange(num_classes) if x != correct_label]\n # Now, update the weights\n for label_index in incorrect_labels:\n #Calculating the softmax for a wrong label\n incorrect_label_softmax = np.exp(scores[label_index])/(np.sum(np.exp(scores)))\n # Calculating the derivative\n necessary_weight = incorrect_label_softmax*np.transpose(X[train_image])\n # Updating the weights\n dW[:,label_index] += necessary_weight\n\n\n # Divide the loss\n loss /= num_train\n dW /= num_train\n\n # Now, do regularization\n loss += 0.5*reg*np.sum(W*W)# Penalize big weights\n dW += reg*W\n\n\n\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[1]\n num_classes = W.shape[0]\n #############################################################################\n # Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n for i in range(num_train): # for each image\n # compute the score\n scores = W.dot(X[:, i])\n\n # shift the values of f so that the highest number is 0:\n scores -= np.max(scores)\n\n # compute the loss\n loss += -np.log(np.exp(scores[y[i]]) / np.sum(np.exp(scores)))\n\n # gradient(https://github.com/seyedamo/cs231n/blob/master/assignment1/cs231n/classifiers/softmax.py)\n scores = np.exp(scores)\n scores /= np.sum(scores)\n for j in range(num_classes): # for each class\n dW[j, :] += scores[j] * X[:, i].T\n\n # dW wrt correct class scores w_yi\n dW[y[i], :] += -X[:, i].T\n\n # Average the loss \n loss /= num_train\n\n # Add regularization to the loss.\n loss += 0.5 * reg * np.sum(W * W)\n\n # average of the gradient\n dW /= num_train\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n return loss, dW", "def softmax_loss(x, y):\n N, C = x.shape\n loss, dx = 0, np.zeros(x.shape) \n for i in range(N):\n loss += -np.log(np.exp(x[i,y[i]])/np.sum(np.exp(x[i,:])))\n dx[i,:] = np.exp(x[i,:])/np.sum(np.exp(x[i,:]))\n dx[i,y[i]] += (-1)\n \n loss /= N\n dx /= N\n return loss, dx", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_classes = W.shape[1]\n #print('num_classes = ', num_classes)\n num_train = X.shape[0]\n #print('num_train = ', num_train)\n \n min_score = 0.0\n shifted_scores = np.zeros(W.shape[1])\n #max_score = np.zeros(W.shape[1])\n max_score = 0.0\n \n loss_array = np.zeros(y.shape[0])\n for i in range(num_train):\n scores = X[i].dot(W)\n #print('scores dimensions = ', scores.shape)\n #print('scores = ', scores)\n #print('i =', i, 'y = ', y[i])\n min_score = np.min(scores)\n max_score = np.max(scores)\n #print(min_score,max_score)\n shifted_scores = np.multiply(-1,scores + abs(min_score))\n #print(scores)\n #print(shifted_scores)\n exp_scores = np.exp(shifted_scores)\n norm = np.amax(exp_scores)\n norm_scores = np.divide(exp_scores,norm)\n loss_array[i] = np.multiply(-1,np.log(norm_scores[y[i]]/(np.sum(norm_scores)-norm_scores[y[i]])))\n #print(loss_array)\n for j in range(num_classes): \n\t\n if j == y[i]: \n dW[:,j] = np.multiply(norm_scores[y[i]],1-norm_scores[y[i]])\n else:\n dW[:,j] = np.multiply(-1,np.multiply(norm_scores[y[i]],norm_scores[y[j]]))\n\t\t\t\n\t\t\t\n loss = np.amax(loss_array)\n\n # Add regularization to the loss.\n loss = 0.5 * reg * np.sum(W * W) + loss\n \n \n pass\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(self, X):\n\n n_samples, _ = X.shape\n\n X_ = np.empty((n_samples, self.n_classes_))\n old_ind = 0\n for i, n in enumerate(self.n_weights_per_class):\n new_ind = int(old_ind + n + 1)\n X_[:, i] = np.dot(X[:, old_ind: new_ind], self.weights_[old_ind: new_ind])\n old_ind = new_ind\n\n exp_X = np.exp(X_ - np.max(X_, axis=1).reshape(-1, 1))\n softmax = exp_X / (np.sum(exp_X, axis=1)).reshape((n_samples, 1))\n\n return softmax", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n pass\n num_tran = X.shape[0]\n num_classes = W.shape[1]\n loss_par =np.zeros(num_tran)\n\n Score = np.dot(X,W)\n expS = np.exp(Score)\n # for i in num_tran:\n sumS = np.sum(expS,axis=1)\n sumS = sumS.reshape(sumS.shape[0],1)\n normalize = np.divide(expS,sumS)\n softmax = -np.log(normalize)\n\n for i in np.arange(num_tran):\n loss_par[i]=softmax[i, y[i]]\n for j in np.arange(num_classes) :\n if j!=y[i]:\n # dW[:,j]+=1/normalize[i,y[i]]*expS[i,y[i]]*expS[i,j]/np.power(sumS[i],2) *X[i,:]\n dW[:,j]+=expS[i,j]/sumS[i] *X[i,:]\n else:\n # dW[:,y[i]]+=-1/normalize[i,y[i]]*expS[i,y[i]]*(sumS[i]-expS[i,y[i]])/np.power(sumS[i],2) *X[i,:]\n dW[:,y[i]]+=-(sumS[i]-expS[i,y[i]])/sumS[i] *X[i,:]\n\n dW /=num_tran\n\n loss = np.sum(loss_par) / num_tran\n # print num_tran,loss\n\n dW+=reg*W\n loss+=0.5*reg*np.sum(W*W)\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss(x, y):\n\n eps = 1e-5\n \n N,C = x.shape\n p = softmax(x)\n llikelihood = -np.log(p[range(N),y] + eps)\n# print(llikelihood)\n loss = np.sum(llikelihood) / N\n\n dx = p\n dx[range(N),y] -= 1\n dx = dx/N\n \n return loss, dx", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n num_train = X.shape[0]\n # print(\"num_train:\", num_train)\n num_classes = W.shape[1]\n # print(\"num_classes:\", num_classes)\n \n scores = X.dot(W) # scores is N*D x D*C -> N*C \n log_c = np.max(scores, axis=1).T\n scores -= log_c[:,None]\n correct_class_score = scores[np.arange(num_train),y]\n exp_scores = np.exp(scores)\n sum_exp_scores = np.sum(np.exp(scores), axis=1)\n proportion = np.exp(correct_class_score) / sum_exp_scores\n loss -= np.sum(np.log(proportion))\n \n # calculating dW = (p - (c = correct c ? 1 : 0)) * x\n correct_class_one_hot = np.zeros_like(scores)\n correct_class_one_hot[np.arange(num_train),y] += 1\n p = np.exp(scores) / sum_exp_scores[:,None] - correct_class_one_hot # N*C / N:1 -> N*C\n dW += X.T.dot(p) # D*N x N*C -> D*C\n\n loss /= num_train\n loss += 0.5 * reg * np.sum(W * W) \n dW /= num_train\n dW += reg * W\n\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_classes = W.shape[1]\n num_train = X.shape[0]\n\n for i in xrange(num_train):\n scores = X[i].dot(W)\n\n # Normalization trick to resolve numerical instability\n # when dealing with the large exponential terms.\n scores -= np.max(scores)\n\n # Cache some terms that are used repeatedly.\n exp_scores = np.exp(scores)\n sum_exp_scores = np.sum(exp_scores)\n correct_class_score = scores[y[i]]\n \n # Update the loss \n loss -= correct_class_score\n loss += np.log(sum_exp_scores)\n\n # Update the gradient\n dW[:,y[i]] -= X[i,:].T\n for j in xrange(num_classes):\n dW[:,j] += ((X[i,:].T * exp_scores[j]) / sum_exp_scores)\n\n \n # Right now the loss is a sum over all training examples, but we want it\n # to be an average instead so we divide by num_train.\n loss /= num_train\n\n dW /= num_train\n\n # Add regularization to the loss.\n loss += 0.5 * reg * np.sum(W * W)\n\n dW += reg*W\n \n pass\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(x):\r\n sum_c = np.sum(np.exp(x), axis=1)\r\n sum_c = np.expand_dims(sum_c, axis=1)\r\n pred_x = np.divide(np.exp(x), sum_c)\r\n return pred_x", "def test_softmax_base():\n x = np.array([[[2.0, 3.0, 4.0, 5.0],\n [3.0, 4.0, 5.0, 6.0],\n [7.0, 8.0, 8.0, 9.0]],\n [[1.0, 2.0, 3.0, 4.0],\n [5.0, 6.0, 7.0, 8.0],\n [6.0, 7.0, 8.0, 9.0]]])\n res = np.array([[[0.0320586, 0.08714432, 0.23688282, 0.64391426],\n [0.0320586, 0.08714432, 0.23688282, 0.64391426],\n [0.07232949, 0.19661193, 0.19661193, 0.53444665]],\n [[0.0320586, 0.08714432, 0.23688282, 0.64391426],\n [0.0320586, 0.08714432, 0.23688282, 0.64391426],\n [0.0320586, 0.08714432, 0.23688282, 0.64391426]]])\n obj.run(res=res, input=x)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_cases = X.shape[0]\n num_class = W.shape[1]\n y_label = np.zeros((num_cases,num_class))\n for i in range(num_cases):\n h1 = np.exp(X[i].dot(W))\n h = h1/np.sum(h1)\n y_label[i] = (np.arange(h.shape[0]) == y[i]) + 0\n loss -= (np.sum(y_label[i] * np.log(h) + (1 - y_label[i]) * np.log(1 - h)))\n delta = np.zeros(W.shape)\n for j in range(num_class):\n delta[:,j] += X[i]\n delta[:,j] *= h1[j]\n delta[:,j] *= (np.sum(h1) - h1[j])/(np.sum(h1) ** 2)\n delta[:,j] = y_label[i][j] / h[j] * delta[:,j] - (1 - y_label[i][j]) / (1 - h[j]) * delta[:,j]\n dW -= delta\n loss /= num_cases\n loss += reg * np.sum(W * W)\n dW /= num_cases\n dW += 2 * reg * W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n num_train = X.shape[0]\n num_class = W.shape[1]\n l = np.zeros([num_train,1])\n for i in range(num_train):\n scores = np.dot(X[i], W)\n f_yi = scores[y[i]]\n exp_num = np.exp(f_yi)\n exp = np.exp(scores)\n exp_deno = np.sum(exp)\n for j in range(num_class):\n if (j == y[i]):\n dW[:,j] -= X[i,:].transpose()\n dW[:,j] += (np.exp(scores[j]) / exp_deno) * X[i,:].transpose()\n l[i] = -np.log(exp_num/exp_deno)\n\n loss = np.sum(l)/num_train\n loss += reg * np.sum(W*W)\n dW /= num_train \n dW += 2 * reg * W\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n return loss, dW", "def softmax(input, dim, inplace=False):\n return FunctionLib.apply(\n 'Softmax', input.device, [input],\n outputs=[input if inplace else None], axis=dim)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n for i in range(X.shape[0]):\n# c = np.matmul(X[i],W)\n# c -= np.amax(c)\n# e_c = np.exp(c)\n# denom = np.sum(e_c)\n# #Nice fact: we know that the largest element in c will also be the largest softmax value, so we only\n# # need to transform that one value. \n# sm_c = e_c/denom\n# \n# loss1 += -np.log(sm_c[y[i]])\n\n # Need to make this whole dang thing more numerically stable. \n c = np.matmul(X[i],W)\n c -= np.amax(c)\n e_c = np.exp(c)\n denom = np.sum(e_c)\n sm_c = e_c/denom\n\n loss += np.log(denom) - c[y[i]]\n# print(-np.log(sm_c[y[i]]) - (np.log(denom)-c[y[i]]))\n\n \"\"\"They are basically the same value\"\"\"\n\n # now computing some gradients\n dL_ds = sm_c\n dL_ds[y[i]] -= 1\n #note that sm_c is modified now!\n \"\"\" #ah, something fundamentally different is happening with numpy. When an array element\n is changed, it's really changed for good. And it changes for all pointers pointing to same object.\n yikes. Actually it's the same with python lists. Anything pointing to And underlying object can\n change that underlying object for all things that point to it. Alas.\"\"\"\n# import pdb; pdb.set_trace()\n \"\"\"Okay I just coudln't bear the for loops...\"\"\"\n dW_update = np.matmul(X[i].reshape(1,X.shape[1]).T,dL_ds[np.newaxis,:])\n dW+=dW_update\n # for n in range(W.shape[0]):\n# for m in range(W.shape[1]):\n# if m == y[i]:\n# dW[n,m] += X[i,n]*(sm_c[m]-e_c[m])\n# else:\n# dW[n,m] += X[i,n]*sm_c[m]\n\n # should be numerically unstable I think.\n\n loss /= X.shape[0]\n loss += reg*np.sum(W*W)\n\n dW /= X.shape[0]\n dW += reg*2*W\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n return loss, dW", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[0]\n dim = dW.shape[0]\n num_classe = W.shape[1]\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n S = X.dot(W)\n # ajouter le - max a la fin\n indexes=np.arange(num_train)\n #c = correct class score\n c = S[indexes, y]\n\n e_syi = np.exp(c)\n e_sj = np.sum(np.exp(S), axis=1)\n Li = - np.log(e_syi/e_sj)\n loss = np.sum(Li) / num_train + reg * np.sum(W * W)\n\n\n M = np.exp(S)/(np.repeat(e_sj, num_classe).reshape(num_train, num_classe)) #(500,10)\n M[indexes, y] -= 1 #bonnes classes\n dW = X.T.dot(M)\n\n dW = dW/num_train + 2 * reg * W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(x):\n shape = x.shape\n probs = np.exp(x - np.max(x, axis=len(shape) - 1, keepdims=True))\n probs /= np.sum(probs, axis=len(shape) - 1, keepdims=True)\n return probs", "def softmax_my(x):\n dim = 1\n try:\n dim = x.ndim\n except AttributeError:\n return cal_1D_softmax(x)\n if dim == 1:\n return cal_1D_softmax(x)\n elif dim == 2:\n return cal_2D_softmax(x)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_train = X.shape[0]\n num_classes = W.shape[1]\n for i in xrange(num_train):\n scores = X[i, :].dot(W)\n scores -= np.max(scores)\n correct_scores = scores[y[i]]\n score_sum = np.sum(np.exp(scores))\n h = np.exp(correct_scores) / score_sum\n loss += -np.log(h)\n for j in xrange(num_classes):\n if j == y[i]:\n dW[:, y[i]] += (np.exp(scores[j]) / score_sum - 1) * X[i, :]\n else:\n dW[:, j] += (np.exp(scores[j]) / score_sum) * X[i, :]\n \n \n loss /= num_train + ( reg * np.sum(W * W))\n dW /= num_train\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(inputs):\n # Your code pass\n # 对向量的每个元素归一化,先把每个元素作为e的指数求结果\n input_exp = np.exp(inputs)\n # 返回 每个元素作为e的指数求结果 与所在列所有的这些结果和的比值,另外添加一个维度\n return input_exp / np.sum(input_exp, axis=1)[:, np.newaxis]", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[0]\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n scores = X.dot(W)\n scores_exp = np.exp(scores-np.max(scores, axis=1, keepdims=True))\n\n sum = np.sum(scores_exp, axis=1, keepdims=True)\n probability = scores_exp/sum\n #list containing the correct classification\n indices = [range(num_train), y]\n correct_class_score = probability[indices]\n\n #calculate -log(prob_y) and take the sum across all training examples\n loss = np.sum(-np.log(correct_class_score))\n loss /= num_train\n loss += 0.5 * reg * np.sum(W * W)\n\n #Compute Gradient\n probability[indices] -=1\n dW = X.T.dot(probability)\n dW /= num_train\n dW += .5 * reg * W\n\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def _softmax(x):\n e = K.exp(x - K.max(x, axis=-1, keepdims=True))\n s = K.sum(e, axis=-1, keepdims=True)\n return e / s", "def softmax_loss1(x, y):\n # tmp = np.max(x, axis=1, keepdims=True)\n shifted_logits = x - np.max(x, axis=1, keepdims=True)\n Z = np.sum(np.exp(shifted_logits), axis=1, keepdims=True)\n log_probs = shifted_logits - np.log(Z)\n probs = np.exp(log_probs)\n N = x.shape[0]\n # tmp2 = np.arange(N)\n tmp3 = log_probs[np.arange(N), y]\n # tmp4 = log_probs[[0,1,2],[2,5,0]]\n loss = -np.sum(log_probs[np.arange(N), y]) / N\n dx = probs.copy()\n dx[np.arange(N), y] -= 1\n dx /= N\n return loss, dx", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[0]\n num_classes = W.shape[1]\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n scores = np.dot(X,W)\n scores = (scores.T - np.max(scores,1)).T\n for i in xrange(num_train):\n nominator = np.exp(scores[i,:])\n denominator = np.sum(np.exp(scores[i,:]))\n loss -= np.log(nominator[y[i]]/denominator)\n for j in xrange(num_classes):\n dW[:,j] += (nominator[j]/denominator)*X[i,:]\n dW[:,y[i]] -= X[i,:]\n\n loss /= num_train\n dW /= num_train\n loss += 0.5*reg*np.sum(W*W)\n dW += reg*W\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(input):\n list_value = []\n len_compute = input.shape[-1]\n shape_input = input.shape\n for x in input.reshape(-1, len_compute):\n # print(x)\n e_x = np.exp(x - np.max(x))\n res = e_x / e_x.sum(axis=0)\n list_value.append(res)\n\n return np.array(list_value).reshape(shape_input)", "def softmax(inputs):\n return np.exp(inputs) / float(sum(np.exp(inputs)))", "def softmax(inputs):\n return np.exp(inputs) / float(sum(np.exp(inputs)))", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n num_classes = W.shape[1]\n num_train = X.shape[0]\n for i in range(num_train):\n score = X[i].dot(W)\n exp_score = np.exp(score)\n probability = exp_score[y[i]] / exp_score.sum()\n loss += -np.log(probability)\n dp = -1 / probability\n for j in range(num_classes):\n ds = np.exp(score[j])\n if j == y[i]:\n des = (exp_score.sum() - exp_score[y[i]]) / np.square(exp_score.sum())\n else:\n des = -(exp_score[y[i]]) / np.square(exp_score.sum())\n dW[:, j] += X[i].T * ds * des * dp # chain rule\n\n loss /= num_train\n dW /= num_train\n\n loss += 0.5 * reg * np.sum(W * W)\n dW += reg * W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax4(x):\n ndim = K.ndim(x)\n if ndim == 2:\n return K.softmax(x)\n elif ndim == 3:\n e = K.exp(x - K.max(x, axis=-1, keepdims=True))\n s = K.sum(e, axis=-1, keepdims=True)\n return e / s\n elif ndim == 4:\n e = K.exp(x - K.max(x, axis=1, keepdims=True))\n s = K.sum(e, axis=1, keepdims=True)\n return e / s\n else:\n raise Exception('Cannot apply softmax to a tensor that is not 2D or 3D. ' +\n 'Here, ndim=' + str(ndim))", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n # needed for calculations\n num_train = X.shape[1]\n\n for i in xrange(num_train):\n # calculate the scores for the current training example with the current weights\n scores = W.dot(X[:, i])\n # scale by the max for numerical stability\n scores -= np.max(scores)\n # calculate the loss\n loss += -scores[y[i]] + np.log(np.sum(np.exp(scores)))\n\n ## L' = -1_y + 1/(\\sum_{}^{} e^f) * e^f\n # e^f\n scores = np.exp(scores)\n # 1/(\\sum_{}^{} e^f)\n scores /= np.sum(scores)\n # -1_y\n scores[y[i]] -= 1\n\n # now scale it by the data\n # we need to use [:, np.newaxis] because when you make a X by 1 dimension slices in numpy the 1 dimension is null\n dW += scores[:, np.newaxis].dot(X[:, i][:, np.newaxis].T)\n\n\n # get the average loss\n loss /= num_train\n # get the average gradient\n dW /= num_train\n\n # regularize the loss function\n loss += 0.5 * reg * np.sum(W * W)\n\n return loss, dW", "def softmax(x):\r\n e_x = np.exp(x - np.expand_dims(np.max(x, axis=-1), axis=-1))\r\n return e_x / np.expand_dims(e_x.sum(axis=-1), axis=-1) # only difference\r", "def softmax_loss(x, y):\n loss, dx = None, None\n ###########################################################################\n # TODO: Implement the loss and gradient for softmax classification. This #\n # will be similar to the softmax loss vectorized implementation in #\n # cs231n/classifiers/softmax.py. #\n ###########################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n num_train = x.shape[0]\n\n x = np.exp(x)\n temp_sum = np.sum(x, axis = 1, keepdims = True)\n x = x / temp_sum\n softmax_result = x\n trans_y = np.zeros((x.shape[0],x.shape[1]))\n trans_y[np.arange(x.shape[0]), y] += 1\n x = - np.log(x)\n x = x * trans_y\n x_sum = np.sum(x)\n loss = x_sum / num_train\n loss = loss + \n\n dx = softmax_result - trans_y\n dx = dx / num_train\n\n\n pass\n\n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n ###########################################################################\n # END OF YOUR CODE #\n ###########################################################################\n return loss, dx", "def softmax_loss(x, y):\n #raise NotImplementedError\n #######################################################################\n # #\n # #\n # TODO: YOUR CODE HERE #\n # #\n # #\n #######################################################################\n N=x.shape[0]\n\n \n x-=np.max(x,axis=1,keepdims=True)\n temp=np.exp(x)\n dr_vec=np.sum(temp,axis=1,keepdims=True)\n\n nr=(x[np.arange(N),y]).reshape([N,1])\n loss=np.sum(-(nr)+np.log(dr_vec))\n \n loss=(loss/N)\n temp/=dr_vec\n temp[np.arange(N),y] -= 1\n \n dx = temp/N\n \n return loss, dx", "def softmax_loss(x, y):\n probs = np.exp(x - np.max(x, axis=1, keepdims=True))\n probs /= np.sum(probs, axis=1, keepdims=True)\n N = x.shape[0]\n loss = -np.sum(np.log(probs[np.arange(N), y])) / N\n dx = probs.copy()\n dx[np.arange(N), y] -= 1\n dx /= N\n return loss, dx", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)", "def softmax_loss(x, y):\n ############################################################################\n # TODO: You can use the previous softmax loss function here. # \n # Hint: Be careful on overflow problem #\n ############################################################################\n ############################################################################\n # START OF YOUR CODE #\n ############################################################################\n N = len(x)\n # We want to get the real y\n log_C = -np.max(x,axis=1,keepdims = True)\n # Get numerator\n e_all = np.exp(x+log_C)\n # Get the final prob\n prob = e_all/e_all.sum(axis=1,keepdims=True)\n # Find final loss\n loss = np.sum(-np.log(prob)[np.arange(N),y])/N\n # Get dx\n dx = prob\n dx[np.arange(N),y] -= 1\n dx /= N\n \n ############################################################################\n # END OF YOUR CODE #\n ############################################################################\n return loss, dx", "def softmax(x):\r\n output = np.exp(x)\r\n return output / np.sum(output, axis=1, keepdims=True)", "def softmax(x):\n #pass # TODO: Compute and return softmax(x)\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n #############################################################################\n # START OF YOUR CODE #\n #############################################################################\n # construct a one-hot vector for y\n onehot_y = np.zeros((y.size, W.shape[1]))\n onehot_y[np.arange(y.size), y] = 1\n dW = dW.T\n for i in range(y.shape[0]):\n f = np.dot(X[i], W)\n \n for j in range(W.shape[1]):\n e_f = np.exp(f - np.max(f))\n softmax = e_f / e_f.sum()\n loss -= onehot_y[i][j] * np.log(softmax[j])\n dW[j] -= X[i] * (onehot_y[i][j] - softmax[j])\n \n loss = loss / y.shape[0] + reg * np.linalg.norm(W)\n dW = dW.T / y.shape[0] + 2 * reg * W\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n \n\n return loss, dW", "def loss(self, X, y):\n\n # Initialize the loss to zero.\n loss = 0.0\n num_classes = self.W.shape[0] # C = num_classes\n num_train = X.shape[0]\n \n exp_a = np.zeros((num_classes,num_train))\n # ================================================================ #\n # YOUR CODE HERE:\n # Calculate the normalized softmax loss. Store it as the variable loss.\n # (That is, calculate the sum of the losses of all the training \n # set margins, and then normalize the loss by the number of \n # training examples.)\n # ================================================================ #\n \n \n for i in np.arange(num_train):\n \n Loss = 0.0\n\n class_scores = np.dot(self.W,X[i,:].T) # calculating class scores (C x 1 vector)\n class_scores -= np.max(class_scores) # considering the possible issue for numerical instability and account for it\n\n exp_a[:,i] = np.exp(class_scores) # turning class scores to probabilities (C x 1 vector), without normalization\n\n Loss -= np.log(exp_a[y[i],i]/np.sum(exp_a[:,i]))\n \n\n #p[:,i] = exp_a[:,i]/np.sum(exp_a[:,i]) # p now is a valid probability matrix\n #print(p[:,i])\n\n loss += Loss \n #print(Loss,i) \n \n pass\n loss /= num_train\n # ================================================================ #\n # END YOUR CODE HERE\n # ================================================================ #\n\n return loss", "def softmax(x):\n pass # TODO: Compute and return softmax(x)\n\n exp_x = np.exp(x)\n sum_x = np.sum(exp_x, axis=0)\n softmax = exp_x/sum_x\n \n return softmax", "def softmax(x): \n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax_loss_naive(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n for i in range(X.shape[0]):\n scores = X[i].dot(W)\n \n idx_max = np.argmax(scores)\n s_max = scores[idx_max]\n scores -= s_max # shift for numerical stability\n \n temp = np.exp(scores)\n summation = np.sum(temp)\n loss += (- scores[y[i]] + np.log(summation))\n \n # computing gradients\n # (1) an explicit version:\n# for j in range(W.shape[1]):\n# if j == y[i]:\n# dW[:, j] -= X[i]\n# dW[:, idx_max] -= (-X[i])\n \n# dW[:, j] += (1 / summation) * temp[j] * X[i]\n# dW[:, idx_max] += (1 / summation) * temp[j] * (-X[i])\n# elif j == idx_max:\n# dW[:, j] += 0 # X[i] + (-X[i]) = 0\n# else:\n# dW[:, j] += (1 / summation) * temp[j] * X[i]\n# dW[:, idx_max] += (1 / summation) * temp[j] * (-X[i])\n \n # (2) a more concise version:\n softmax_scores = temp / summation\n for j in range(W.shape[1]):\n if j == y[i]:\n dW[:, j] += (-1 + softmax_scores[j]) * X[i]\n else:\n dW[:, j] += softmax_scores[j] * X[i]\n \n loss /= X.shape[0]\n dW /= X.shape[0]\n \n loss += reg * np.sum(W * W)\n dW += 2 * reg * W\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss(x, y):\n def softmax(x):\n exps = np.exp(x)\n return exps / np.sum(exps, axis=1)[:,None]\n\n N = y.shape[0]\n p = softmax(x)\n log_likelihood = -np.log(p[range(N),y])\n loss = np.sum(log_likelihood) / N\n\n dx = p.copy()\n dx[range(N),y] -= 1\n dx = dx/N\n\n return loss, dx", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n scores = X@W # 500,10\n# print(scores.shape)\n max_scores = np.max(scores, axis=1).reshape(-1,1) # 500, numeric instablity\n# print(max_scores.shape)\n scores -= max_scores # numeric instablity\n# print(scores.shape)\n correct_scores = scores[np.arange(scores.shape[0]), y] # 500,\n P_ic = np.exp(correct_scores)/np.sum(np.exp(scores), axis=1)\n# print(P)\n loss += np.sum(-np.log(P_ic))/scores.shape[0] # L = ∑L_i/N\n loss += reg * np.sum(W * W) # regularization\n # 向量化梯度:用scores构建一个P [500, 10],首先取exp(scores)得到每一个位置的exp,然后对每个位置除以这一行的exp和\n # 上面的操作会得到500,10的矩阵,每个位置都是softmax之后的结果\n # !重点:对于[i,y[i]]位置,根据P_ic - 1, 要减1 \n P = np.exp(scores) # 正确分类的梯度, 位于梯度矩阵所有c的行\n P /= np.sum(np.exp(scores),axis=1).reshape(-1, 1)\n P[np.arange(scores.shape[0]), y] -= 1 # 将 i, y[i] -= 1\n \n # 得到这个矩阵之后,与X.T相乘即可得到dL/dW P(500,10) X(500,3073) X.T (3073, 500) W(3073, 10)\n dW += X.T@P\n dW /= scores.shape[0] # *1/N\n dW += 2*reg*W # 正则化梯度\n \n # *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****\n\n return loss, dW", "def softmax(x):\n \"\"\"\"\"\"\n return exp(x) / sum(exp(x), axis=0)", "def softmax(x):\n x = np.array(x)\n if x.ndim == 1:\n return column_softmax(x)\n else:\n ret_arr = np.array([column_softmax(col) for col in x.T])\n return ret_arr.T", "def softmax(x):\n shape = np.shape(x)\n numDim = len(shape)\n result = np.zeros_like(x, dtype=float)\n \n if numDim == 1:\n totalSum = 0\n # Sum (e^yi)\n for i in range(len(x)):\n totalSum = totalSum + exp(x[i])\n # e^yi / Sum (e^yi)\n for i in range(len(x)):\n result[i] = exp(x[i])/totalSum\n else:\n xDim = len(x[0])\n totalSum = np.zeros(xDim, dtype=float)\n \n # e^yi\n for i in range(len(x)):\n for j in range(len(x[i])):\n result[i,j] = exp(x[i,j])\n totalSum[j] = totalSum[j] + result[i,j]\n # e^yi / Sum (e^yi)\n for i in range(len(result)):\n for j in range(len(result[i])):\n result[i,j] = result[i,j] / totalSum[j]\n \n return result", "def softmax(x):\n scoreMatExp = np.exp(np.asarray(x))\n return scoreMatExp / scoreMatExp.sum(0)", "def softmax(x):\n # https://stackoverflow.com/questions/34968722/softmax-function-python\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train = X.shape[1]\n num_classes = W.shape[0]\n #############################################################################\n # Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n \n # compute scores\n scores = W.dot(X)\n scores -= np.max(scores)\n\n # softmax function\n softmax = np.exp(scores) / np.sum(np.exp(scores), 0) # 10 x 49000 | C x D\n \n # cross entropy loss\n loss = -np.log(softmax[y, range(num_train)]) # 49000\n loss = np.sum(loss) / num_train\n\n # regularisation\n loss += 0.5 * reg * np.sum(W*W)\n\n # gradient (source:https://github.com/MyHumbleSelf/cs231n/blob/master/assignment1/cs231n/classifiers/softmax.py)\n ind = np.zeros(softmax.shape)\n ind[y, range(num_train)] = 1\n dW = np.dot((softmax-ind), X.T)\n dW /= num_train\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss_vectorized(W, X, y, reg):\n num_train = X.shape[0]\n num_class = W.shape[1]\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n scores = X.dot(W)\n temp_matrix = np.zeros(scores.shape)\n \n max_each_row = np.max(scores,axis=1).reshape(-1,1)\n scores -= max_each_row\n summation = np.sum(np.exp(scores),axis=1).reshape(-1,1)\n scores = np.exp(scores)\n scores = np.divide(scores,summation)\n temp_matrix[range(num_train),list(y)] =-1\n scores += temp_matrix\n dW = X.T.dot(scores) / num_train + 2*reg*W \n log_summation = np.log(summation)\n vector = scores[range(num_train),list(y)].reshape(-1,1) \n L = -vector+ log_summation \n loss = np.sum(L)/num_train + reg*np.sum(W*W)\n \n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n \n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(self,Weights,X,b):\n N = X.shape[0]\n D = X.shape[1]\n C = Weights.shape[1]\n \n #P = np.zeros((N,C))\n #print P.shape\n\n P1 = np.dot(X,Weights) + b\n P1 = np.exp(P1)\n \n for i in range(N):\n P1[i,:] = P1[i,:]/P1[i,:].sum()\n # print P1\n return P1", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=1)", "def softmax_loss(x, y):\n shifted_logits = x - np.max(x, axis=1, keepdims=True)\n Z = np.sum(np.exp(shifted_logits), axis=1, keepdims=True)\n log_probs = shifted_logits - np.log(Z)\n probs = np.exp(log_probs)\n N = x.shape[0]\n loss = -np.sum(log_probs[np.arange(N), y]) / N\n dx = probs.copy()\n dx[np.arange(N), y] -= 1\n dx /= N\n return loss, dx", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def softmax(x):\n return np.exp(x)/np.sum(np.exp(x),axis=0)", "def softmax(X):\n num = np.exp(X)\n den = np.sum(np.exp(X))\n return num / den", "def softmax(x):\n npX = np.array(x)\n expX = np.exp(x)\n\n return expX/sum(expX)", "def softmax(x):\n return np.exp(x) / np.sum(np.exp(x), axis=0)\n # return ( x / np.sum(x, axis=0) )", "def test_softmax(n_epochs=250, optimizer='cg'):\n n_hidden = 10\n n_in = 5\n n_steps = 10\n n_seq = 10 # per batch\n n_batches = 50\n n_classes = 3\n n_out = n_classes # restricted to single softmax per time step\n\n np.random.seed(0)\n # simple lag test\n seq = np.random.randn(n_steps, n_seq * n_batches, n_in)\n targets = np.zeros((n_steps, n_seq * n_batches), dtype=np.int)\n\n thresh = 0.5\n # if lag 1 (dim 3) is greater than lag 2 (dim 0) + thresh\n # class 1\n # if lag 1 (dim 3) is less than lag 2 (dim 0) - thresh\n # class 2\n # if lag 2(dim0) - thresh <= lag 1 (dim 3) <= lag2(dim0) + thresh\n # class 0\n targets[2:, :][seq[1:-1, :, 3] > seq[:-2, :, 0] + thresh] = 1\n targets[2:, :][seq[1:-1, :, 3] < seq[:-2, :, 0] - thresh] = 2\n #targets[:, 2:, 0] = np.cast[np.int](seq[:, 1:-1, 3] > seq[:, :-2, 0])\n\n model = MetaRNN(n_in=n_in, n_hidden=n_hidden, n_out=n_out,\n learning_rate=0.005, learning_rate_decay=0.999,\n n_epochs=n_epochs, batch_size=n_seq, activation='tanh',\n output_type='softmax')\n\n model.fit(seq, targets, validate_every=10, compute_zero_one=True,\n optimizer=optimizer)\n\n seqs = xrange(10)\n\n plt.close('all')\n for seq_num in seqs:\n fig = plt.figure()\n ax1 = plt.subplot(211)\n plt.plot(seq[:, seq_num])\n ax1.set_title('input')\n ax2 = plt.subplot(212)\n\n # blue line will represent true classes\n true_targets = plt.step(xrange(n_steps), targets[:, seq_num],\n marker='o')\n\n # show probabilities (in b/w) output by model\n guess = model.predict_proba(seq[:, seq_num][:, np.newaxis])\n guessed_probs = plt.imshow(guess.squeeze().T, interpolation='nearest',\n cmap='gray')\n ax2.set_title('blue: true class, grayscale: probs assigned by model')", "def softmax(x):\n orig_shape = x.shape\n\n if len(x.shape) > 1:\n # Matrix\n tmp = np.max(x, axis=1)\n x -= tmp.reshape((x.shape[0], 1))\n x = np.exp(x)\n tmp = np.sum(x, axis=1)\n x /= tmp.reshape((x.shape[0], 1))\n else:\n # Vector\n tmp = np.max(x)\n x -= tmp\n x = np.exp(x)\n tmp = np.sum(x)\n x /= tmp\n\n assert x.shape == orig_shape\n return x", "def softmax(X):\n _X = X - np.max(X, axis=1).reshape(-1, 1)\n ep = np.exp(_X)\n return ep / np.sum(ep, axis=1).reshape(-1, 1)", "def softmax(x):\n xx = x\n x = x.reshape((-1, x.shape[-1]))\n e_x = np.exp(x - np.max(x, 1).reshape(-1, 1))\n res = e_x / e_x.sum(axis=1).reshape(-1, 1)\n return res.reshape(xx.shape)", "def softmax(x: npt.NDArray) -> npt.NDArray:\n row_wise_max = np.max(x, axis=1).reshape(-1, 1)\n exp_x = np.exp(x - row_wise_max)\n return exp_x / np.sum(exp_x, axis=1).reshape(-1, 1)", "def _softmax(self,x):\n e_x = np.exp(x - np.max(x))\n return np.nan_to_num(e_x / np.nan_to_num(e_x.sum(axis=0)))", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n # Compute class scores\n (num_class, D), (D, num_train) = W.shape, X.shape\n class_scores = np.dot(W, X)\n\n # Softmax them\n e_x = np.exp(class_scores - class_scores.max(axis=0))\n class_scores = e_x / e_x.sum(axis=0)\n \n # Create mask of ys\n gold_class_matrix = np.zeros((num_class, num_train))\n gold_class_matrix[y, range(num_train)] = 1\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n # Cross entropy loss\n loss = -(gold_class_matrix * np.log(class_scores)).sum()\n \n # Add regularization and normalize\n loss += 0.5 * reg * np.sum(W * W)\n loss /= num_train\n \n # Gradients\n augmented_scores = class_scores - gold_class_matrix\n (num_class, num_train), (num_train, D) = augmented_scores.shape, X.T.shape\n dW = np.dot(augmented_scores, X.T)\n \n # Add regularization and normalize\n dW += reg * W\n dW /= num_train\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(x): \n e_x = np.exp(x - np.max(x)) \n return e_x / e_x.sum()", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n num_train, num_dim = X.shape\n num_classes = W.shape[1]\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n scores = np.dot(X,W)\n # scores = scores.T - np.max(scores,1)\n # f = np.exp(scores.T) \n # correct_scores = f[range(num_train),y] #1*N\n # col_sum = np.sum(f,1)\n # loss = np.sum(-np.log(correct_scores/col_sum))\n\n # mat = f.T/col_sum #\n # mat = mat.T\n # y_pred = np.zeros(mat.shape)\n # y_pred[range(num_train),y] = 1\n # dW = np.dot(X.T,mat-y_pred)\n\n # loss/=num_train\n # loss += 0.5*reg*np.sum(W*W)\n # dW /= num_train\n # dW += reg*W\n f = scores.T - np.max(scores,1)\n f = f.T\n f_correct = scores[range(num_train),y]\n \n sum_col = np.log(np.sum(np.exp(scores),1)) # N*1\n \n loss = sum_col - f_correct # N*1\n loss = np.sum(loss)/num_train + 0.5*reg*np.sum(W*W)\n\n prob = np.exp(f).T / np.sum(np.exp(f),1)\n prob = prob.T\n y_pred = np.zeros(scores.shape)\n y_pred[range(num_train),y] = 1\n dW = X.T.dot(prob - y_pred)\n dW = dW/float(num_train) + reg*W\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax(H, V, d):\n postActivation = np.dot(H,V) + d\n expVector = np.exp(postActivation)\n return expVector/(np.sum(expVector, axis=1)[:,np.newaxis])", "def softmax(X, Y, w, HProp = None, arg=None, reg=None, batchsize=None):\n if reg == None:\n reg_f = 0\n reg_g = 0\n reg_Hv = lambda v: 0\n else:\n reg_f, reg_g, reg_Hv = reg(w)\n global d, C\n n, d = X.shape\n \n if batchsize is not None:\n n_mini = np.int(np.floor(n*batchsize))\n index_batch = np.random.choice(n, n_mini, replace = False)\n# print(index_batch[:5])\n X = X[index_batch,:]\n Y = Y[index_batch]\n n = n_mini\n \n C = int(len(w)/d)\n w = w.reshape(d*C,1) #[d*C x 1]\n W = w.reshape(C,d).T #[d x C]\n XW = np.dot(X,W) #[n x C]\n large_vals = np.amax(XW,axis = 1).reshape(n, 1) #[n,1 ]\n large_vals = np.maximum(0,large_vals) #M(x), [n, 1]\n #XW - M(x)/<Xi,Wc> - M(x), [n x C]\n XW_trick = XW - np.tile(large_vals, (1, C))\n #sum over b to calc alphax, [n x total_C]\n XW_1_trick = np.append(-large_vals, XW_trick,axis = 1)\n #alphax, [n, ]\n sum_exp_trick = np.sum(np.exp(XW_1_trick), axis = 1).reshape(n, 1)\n log_sum_exp_trick = large_vals + np.log(sum_exp_trick) #[n, 1]\n \n f = np.sum(log_sum_exp_trick)/n - np.sum(np.sum(XW*Y,axis=1))/n + reg_f\n if arg == 'f': \n return f\n inv_sum_exp = 1./sum_exp_trick\n inv_sum_exp = np.tile(inv_sum_exp,(1,np.size(W,axis = 1)))\n S = inv_sum_exp*np.exp(XW_trick) #h(x,w), [n x C] \n g = np.dot(X.T, S-Y)/n #[d x C]\n g = g.T.flatten().reshape(d*C,1) + reg_g#[d*C, ] \n\n if arg == 'g':\n return g \n \n if arg == 'fg':\n return f, g\n\n if HProp == None:\n Hv = lambda v: hessvec(X, S, n, v) + reg_Hv(v) \n return f, g, Hv\n else:\n n_H = np.int(np.floor(n*HProp))\n idx_H = np.random.choice(n, n_H, replace = False)\n inv_sum_exp_H = 1./(sum_exp_trick[idx_H,:])\n inv_sum_exp_H = np.tile(inv_sum_exp_H,(1,np.size(W,axis = 1)))\n S_H = inv_sum_exp_H*np.exp(XW_trick[idx_H,:]) #h(x,w), [S x C] \n Hv = lambda v: hessvec(X[idx_H,:], S_H, n_H, v) + reg_Hv(v)\n return f, g, Hv\n \n if arg == 'explicit':\n f = np.sum(log_sum_exp_trick) - np.sum(np.sum(XW*Y,axis=1)) + reg_f\n g = np.dot(X.T, S-Y) #[d x C]\n g = g.T.flatten().reshape(d*C,1) + reg_g #[d*C, ]\n Hv = lambda v: hessvec(X, S, v, reg)\n #S is divided into C parts {1:b}U{c}, [n, ] * C\n S_cell = np.split(S.T,C) \n SX_cell = np.array([]).reshape(n,0) #empty [n x 0] array\n SX_self_cell = np.array([]).reshape(0,0)\n for column in S_cell:\n c = spdiags(column,0,n,n) #value of the b/c class\n SX_1_cell = np.dot(c.A,X) #WX = W x X,half of W, [n x d]\n #fill results from columns, [n x d*C]\n SX_cell = np.c_[SX_cell, SX_1_cell] \n SX_cross = np.dot(SX_cell.T,SX_cell) #take square, [d*C x d*C] \n #X.T x WX half of W, [d x d]\n SX_1self_cell = np.dot(X.T,SX_1_cell) \n #put [d x d] in diag, W_cc, [d*C x d*C] \n SX_self_cell = block_diag(SX_self_cell,SX_1self_cell) \n H = SX_self_cell - SX_cross #compute W_cc, [d*C x d*C]\n H = H + 2*reg*identity(d*C)\n return f, g, Hv, H", "def softmaxCostAndGradient(predicted, target, outputVectors, dataset):\n\n ### YOUR CODE HERE\n scores = outputVectors.dot(predicted.T) # shape = (V, 1)\n y_hat = softmax(scores)\n cost = -scores[target] + np.log(np.sum(np.exp(scores)))\n one_hot_target = np.zeros_like(y_hat)\n one_hot_target[target] = 1\n grad = np.outer((y_hat - one_hot_target), predicted)\n gradPred = outputVectors.T.dot(y_hat - one_hot_target)\n \n '''\n final_predicted = predicted.dot(outputVectors.T)\n probability = softmax(final_predicted)\n cost = -np.log(probability[target])\n \n one_hot_target = np.zeros_like(probability)\n one_hot_target[target] += 1\n dlogits = probability - one_hot_target\n grad = np.outer(predicted, dlogits).T\n gradPred = outputVectors.T.dot(dlogits)\n '''\n ### END YOUR CODE\n\n return cost, gradPred, grad", "def pred(W, X):\n A = softmax_stable(X.dot(W))\n return np.argmax(A, axis = 1)", "def softmax(x):\n e_x = np.exp(x - np.max(x))\n return e_x / e_x.sum(axis=0)", "def softmax_loss_vectorized(W, X, y, reg):\n\n #############################################################################\n # TODO: Compute the softmax.ipynb loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n train_images = X.shape[0]\n # Store all the scores in a matrix\n all_scores = np.dot(X,W)\n #First, calculate the normalizing constant for numeric stability\n constant = np.max(all_scores,axis=1)\n normalized_scores = np.transpose(np.subtract(np.transpose(all_scores),constant))\n\n #Then, calculate softmax for the correct scores\n exp_scores = np.exp(all_scores)\n # First, keep track of the sum of values per row\n exp_sum = np.sum(exp_scores,axis=1)\n\n # Finally, calculate the softmax score for every entry\n softmax_scores = np.transpose(exp_scores)/exp_sum # useful when computing gradient\n softmax_scores = np.transpose(softmax_scores)\n # And then, compute the loss\n loss_score = softmax_scores[range(train_images),y]\n loss_score = -1 * np.log(loss_score) #taking the logarithm\n loss += np.sum(loss_score)\n\n #Normalize and regularize the loss\n loss /= train_images\n loss += 0.5*reg*np.sum(W*W)\n\n #Finally, calculate a vectorized gradient\n\n # Calculate the derivative at the correct label\n softmax_scores[range(train_images),y] -= 1\n # Then, make a matrix containing all the gradient values\n gradient_values = np.dot(np.transpose(X),softmax_scores)\n gradient_values = gradient_values\n\n #FINALLY, update the gradient\n dW+= gradient_values\n #And normalize and regularize it\n dW /= train_images\n dW += reg*W\n\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW", "def softmax_loss(scores, y):\r\n N = scores.shape[0] # number of input data\r\n\r\n # compute data loss\r\n shifted_logits = scores - np.max(scores, axis=1, keepdims=True)\r\n Z = np.sum(np.exp(shifted_logits), axis=1, keepdims=True)\r\n log_probs = shifted_logits - np.log(Z)\r\n probs = np.exp(log_probs)\r\n loss = -np.sum(log_probs[range(N), y]) / N\r\n\r\n # Compute gradient of loss function w.r.t. scores\r\n dscores = probs.copy()\r\n dscores[range(N), y] -= 1\r\n dscores /= N\r\n \r\n return loss, dscores", "def softmax(x):\n x_exp = np.exp(x)\n x_sum = np.sum(x_exp, axis=1, keepdims=True)\n s = x_exp / x_sum\n \n return s", "def softmax_loss_vectorized(W, X, y, reg):\n # Initialize the loss and gradient to zero.\n loss = 0.0\n dW = np.zeros_like(W)\n\n #############################################################################\n # TODO: Compute the softmax loss and its gradient using no explicit loops. #\n # Store the loss in loss and the gradient in dW. If you are not careful #\n # here, it is easy to run into numeric instability. Don't forget the #\n # regularization! #\n #############################################################################\n\n num_classes = W.shape[1]\n num_train = X.shape[0]\n\n # Calculate scores for each classifier (column in the weight matrix W)\n # acting on each training sample (row in X)\n scores = X.dot(W)\n \n # Normalization trick to resolve numerical instability\n # when dealing with the large exponential terms.\n scores -= np.max(scores)\n\n # Cache some terms that are used repeatedly.\n exp_scores = np.exp(scores)\n sum_exp_scores = np.sum(exp_scores,axis=1)\n\n # Find the correct classifier scores for each training sample\n correct_class_scores = scores[np.arange(num_train), y]\n\n # Update the loss\n loss = np.sum(-correct_class_scores + np.log(sum_exp_scores))\n\n # Update the gradient\n correct_indices = np.zeros(scores.shape)\n correct_indices[np.arange(num_train), y] = 1\n\n dW -= correct_indices.T.dot(X).T\n dW += X.T.dot((exp_scores.T / sum_exp_scores).T)\n \n # Average over the training samples\n loss /= num_train\n dW /= num_train\n \n # Add regularization.\n loss += 0.5 * reg * np.sum(W * W)\n dW += reg * W\n\n pass\n #############################################################################\n # END OF YOUR CODE #\n #############################################################################\n\n return loss, dW" ]
[ "0.77309406", "0.76029754", "0.75815004", "0.7345373", "0.7316555", "0.7265773", "0.72579056", "0.72342706", "0.7226934", "0.72213393", "0.7203286", "0.71891123", "0.713433", "0.71218145", "0.7092656", "0.7017707", "0.69664174", "0.6941942", "0.693169", "0.69267535", "0.6920247", "0.69167215", "0.69166845", "0.6902661", "0.68815243", "0.68711543", "0.68626106", "0.68621993", "0.68389505", "0.6825831", "0.6822717", "0.6810713", "0.6808892", "0.6802318", "0.67983186", "0.6763175", "0.6752769", "0.6745803", "0.6724247", "0.6724247", "0.67205447", "0.671932", "0.6706289", "0.67049116", "0.6681859", "0.66787505", "0.6677297", "0.6673336", "0.665494", "0.6631346", "0.66088074", "0.6607533", "0.65828854", "0.6567757", "0.65631723", "0.65579456", "0.6535626", "0.65345955", "0.6526599", "0.65238607", "0.6512038", "0.6511488", "0.6508471", "0.6505849", "0.64962375", "0.6494031", "0.6483469", "0.648092", "0.648092", "0.648092", "0.648092", "0.648092", "0.648092", "0.648092", "0.648092", "0.6463299", "0.6460482", "0.64555496", "0.64555496", "0.644398", "0.6443572", "0.6439401", "0.64138234", "0.6411188", "0.64003414", "0.6399044", "0.6396093", "0.6394289", "0.6384336", "0.6382958", "0.6377422", "0.6353616", "0.6352739", "0.6345235", "0.63434684", "0.63374186", "0.6330419", "0.6324013", "0.63219005", "0.6318117" ]
0.6760264
36