query
stringlengths
9
9.05k
document
stringlengths
10
222k
metadata
dict
negatives
listlengths
30
30
negative_scores
listlengths
30
30
document_score
stringlengths
4
10
document_rank
stringclasses
2 values
Freelancer submits work for the client to approve.
def request_submission_approval(self, milestone_id, note2client, amount): data = {} data['milestone_id'] = milestone_id data['note2client'] = note2client data['amount'] = amount url = 'fp/submissions' return self.post(url, data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def approve(self):\n self._check_if_open()\n data = {\"approved\": True}\n return self.post(\"approve\", data)", "def test_approve(self):\n\n username,userpass = self.testdata.find_account_for('toolsubmitter')\n\n self.utils.account.login_as(username,userpass)\n\n self.contribtool.approve(TOOLNAME,TOOLLICENSEDATA)", "def approve(self):\n self.approved = True\n self.quest_node['approved'] = True\n graph.push(self.quest_node)\n self.payout()", "def jao_approve(self):\n print \"JAO approved this form. Current state:\", self.state", "def action_approve(self):\n if not self.date_approve:\n self.date_approve = fields.Datetime.now()\n\n config = self.env['ka_hr_payroll.config'].default_config()\n if check_rapel_status(self, config):\n self.action_rapel()\n else:\n self.action_done()", "def hod_approve(self):\n print \"HOD approved this form. Current state:\", self.state", "def approve(self, feedback=None):\n self.hit.generate_connection()\n self.hit.connection.approve_assignment(self.mturk_id, feedback=feedback)\n self.update()", "def test_approve(self):\r\n request = RequestFactory()\r\n post = request.post(self.url, {'field': 'mod_queue',\r\n 'op': 'approve',\r\n 1: [self.problem_id.to_deprecated_string(), '2.0', '2']})\r\n view.approve(post, self.course_id, 'mod_queue')\r\n problem_hints = XModuleUserStateSummaryField.objects.get(field_name='mod_queue', usage_id=self.problem_id).value\r\n self.assertTrue('2.0' not in json.loads(problem_hints) or len(json.loads(problem_hints)['2.0']) == 0)\r\n problem_hints = XModuleUserStateSummaryField.objects.get(field_name='hints', usage_id=self.problem_id).value\r\n self.assertTrue(json.loads(problem_hints)['2.0']['2'] == ['Hint 2', 1])\r\n self.assertTrue(len(json.loads(problem_hints)['2.0']) == 2)", "def referee_synopsis_approval(request):\n \n if not validate_request(request): return redirect(reverse(URL_FORBIDDEN))\n \n user = auth.get_user(request)\n referee = Referee.objects.get(user = user)\n \n if request.method == \"POST\":\n id = int(request.POST['id'])\n isApproved = (request.POST['isApproved'] == \"True\")\n feedback = request.POST['feedback']\n\n thesis = Thesis.objects.get(id = id)\n panelMember = PanelMember.objects.get(thesis = thesis, referee = referee)\n\n dict = {'status' : 'OK', 'message' : 'Your response has been submitted successfully' }\n \n if isApproved:\n panelMember.status = 'A'\n panelMember.save()\n else:\n panelMember.status = 'R'\n panelMember.save()\n if referee.type == 'I':\n invite_indian_referees(thesis)\n else:\n invite_foreign_referees(thesis)\n\n return HttpResponse(json.dumps(dict), content_type = 'application/json')\n else:\n return redirect(reverse(URL_BAD_REQUEST))", "def dr_approve(self):\n print \"DR approved this form. Current state:\", self.state", "def test_approve_agreement(self):\n pass", "def test_submit_for_corporate_approval(self):\n # set up the prescription to be ready for corporate approval\n p = self.make('Prescription')\n self.set_cbas_attributes(p)\n self.assertTrue(p.can_corporate_approve)\n self.assertTrue(p.planning_status == p.PLANNING_DRAFT)\n\n # submit for corporate approval\n url = reverse('admin:prescription_prescription_corporate_approve',\n args=(str(p.id),))\n response = self.client.post(url, {}, follow=True)\n self.assertEqual(response.status_code, 200)\n\n # refresh prescription object\n p = Prescription.objects.get(name='test')\n self.assertTrue(p.planning_status == p.PLANNING_SUBMITTED)\n self.assertTrue(p.planning_status_modified is not None)", "def holdingpenreview():\n objectid = request.values.get('objectid', 0, type=int)\n approved = request.values.get('approved', False, type=bool)\n ticket = request.values.get('ticket', False, type=bool)\n if not objectid:\n abort(400)\n workflow_object = workflow_object_class.get(objectid)\n workflow_object.extra_data[\"approved\"] = approved\n workflow_object.extra_data[\"ticket\"] = ticket\n workflow_object.save()\n db.session.commit()\n\n resume.delay(workflow_object.id)\n\n return render_template('authors/forms/new_review_accepted.html',\n approved=approved)", "def approve(self, approver: str, to: str, amount, key: bytes):\n raw_tx = self.approve_build_transaction(approver, to, amount)\n signed_tx = self._sign(raw_tx, key)\n self.send_and_wait(signed_tx)", "def Approve(self, request, global_params=None):\n config = self.GetMethodConfig('Approve')\n return self._RunMethod(\n config, request, global_params=global_params)", "def submitToReview(self, obj):\n self.wftool.doActionFor(obj, \"submit\")", "def approve(self, request, object_id, extra_context=None):\n obj = self.get_object(request, unquote(object_id))\n title = self._approve_title(obj)\n\n AdminAddApprovalForm = self._approve_approval_form(request)\n\n form = AdminAddApprovalForm(initial={'prescription': obj})\n if request.method == 'POST':\n url = reverse('admin:prescription_prescription_detail',\n args=[str(obj.id)])\n if obj.approval_status == obj.APPROVAL_DRAFT and obj.can_approve:\n # create an approval\n obj.approval_status = obj.APPROVAL_SUBMITTED\n obj.approval_status_modified = timezone.now()\n obj.save()\n self.message_user(\n request, \"Successfully submitted for approval.\")\n return HttpResponseRedirect(url)\n elif obj.approval_status == obj.APPROVAL_SUBMITTED:\n if request.POST.get('_cancel'):\n obj.clear_approvals()\n msg = 'Delete: Clearing Approvals/Endorsements', 'Burn ID: {}, Deleted by: {}'. format(obj.burn_id, request.user.get_full_name())\n logger.warning(msg)\n support_email('Delete: Clearing Approvals/Endorsements', msg)\n\n self.message_user(\n request, \"Approval rejected. ePFP is now draft.\")\n return HttpResponseRedirect(url)\n\n form = AdminAddApprovalForm(request.POST,\n initial={'prescription': obj})\n if form.is_valid():\n approval = form.save(commit=False)\n approval.prescription = obj\n approval.creator = request.user\n approval.modifier = request.user\n approval.save()\n obj.approval_status = obj.APPROVAL_APPROVED\n obj.approval_status_modified = timezone.now()\n obj.save()\n self.message_user(\n request, \"Successfully approved.\")\n return HttpResponseRedirect(url)\n elif obj.is_approved:\n if obj.is_closed:\n self.message_user(\n request, \"You can't extend an approval after the \"\n \"prescribed fire plan has been closed.\")\n return HttpResponseRedirect(url)\n if request.POST.get('_cancel'):\n self.message_user(\n request, \"Didn't extend approval.\")\n return HttpResponseRedirect(url)\n else:\n approval = obj.current_approval\n if approval and approval.extension_count < 3:\n approval.extension_count = approval.extension_count + 1\n approval.valid_to = approval.next_valid_to\n approval.save()\n self.message_user(\n request, \"Successfully extended approval.\")\n else:\n self.message_user(request, \"You can't extend an \"\n \"approval more than 3 times.\")\n return HttpResponseRedirect(url)\n\n admin_form, media = self._approve_form(request, obj, form)\n\n context = {\n 'title': title,\n 'current': obj,\n 'form': admin_form,\n 'media': media,\n 'errors': None,\n }\n return TemplateResponse(request, \"admin/prescription/prescription/\"\n \"approval.html\", context,\n current_app=self.admin_site.name)", "def approve_me(message):\n users = hf.get_users()\n for user in users:\n if user[\"id\"] == message._get_user_id():\n if user[\"approval_level\"] == \"unapproved\": # Unknown\n message.reply(Strings['APPROVER_REQUEST'])\n admins = hf.get_admins()\n names = []\n for admin in admins:\n names.append(admin[\"name\"])\n\n approval_message = Strings[\n 'APPROVER_REQUEST_DETAIL'].format(\">, <@\".join(names), user[\"name\"])\n\n #message._client.send_message(config.AUTH_CHANNEL, approval_message)\n message._client.send_message(public_channel, approval_message)\n else:\n message.reply(\":x: Your approval level is already: \" + str(user[\"approval_level\"]))", "def approveList (self, list) : \n for request in list :\n self.approve (request)", "def start_approval_process(self, request=None):\r\n # done here to avoid circular import\r\n from cbhooks.models import HookPoint\r\n\r\n hook_point = HookPoint.objects.filter(name=\"order_approval\").first()\r\n orch_actions = cbhooks._get_orchestration_actions_to_run(hook_point)\r\n if orch_actions:\r\n #the orchestration action NEEDs to be first in order to allow a hook\r\n # to model the approval process correctly and not have something\r\n # auto-approve before the hook is run\r\n logger.debug(\"Order Approval orchestration actions exist, so bypassing built-in approver emails.\")\r\n try:\r\n cbhooks.run_hooks(\"order_approval\", order=self)\r\n except cbhooks.exceptions.HookFailureException as e:\r\n msg = _(\"Failed to run hook for order approval. Status: {status},\"\r\n \" Output: {output}, Errors: {errors}\").format(status=e.status, output=e.output, errors=e.errors)\r\n raise CloudBoltException(msg)\r\n return \"\"\r\n\r\n #now that the hooks have run, check if it should be auto-approved\r\n profile = request.get_user_profile()\r\n if self.is_multilevel_approval():\r\n self.approve_my_grms(profile)\r\n\r\n if self.should_auto_approve():\r\n logger.debug(\"Order can be automatically approved, attempting approval by {}\".format(self.owner))\r\n jobs, msg = self.approve(self.owner)\r\n if jobs:\r\n msg = render_to_string(\r\n 'orders/approved_msg.html', {\r\n 'order': self,\r\n 'autoapproved': True,\r\n 'num_jobs': len(jobs),\r\n 'extramsg': msg,\r\n })\r\n return msg\r\n else:\r\n # No auto approval and no approval hooks, so go with\r\n # the default process of emailing a set of approvers, unless the\r\n # owner is an approver.\r\n msg = _(\"Order #{order_id} has been submitted for approval. \").format(order_id=self.id)\r\n msg += orders.mail.email_approvers(self, request)\r\n logger.debug(msg)\r\n return msg", "def approve(_spender: address, _amount: uint256) -> bool:\n\n self.allowed[msg.sender][_spender] = _amount\n log.Approval(msg.sender, _spender, _amount)\n return True", "def submit_loan_request(self):\n try:\n payload = self.get_data()\n self.mainwindow.show_dialog(\"Processing loan request\",\n 'Your request is being processed and '\n 'your documents are being uploaded.')\n if self.mainwindow.api.create_loan_request(self.mainwindow.app.user, payload):\n if self.mainwindow.api.failed_documents:\n self.mainwindow.show_dialog(\"Documents error\", 'Some of the documents could not be sent.')\n else:\n self.mainwindow.show_dialog(\"Loan request created\", 'Your loan request has been sent.')\n else:\n self.mainwindow.show_dialog(\"Loan request error\", 'You can only have a single loan request.')\n except ValueError:\n self.mainwindow.show_dialog(\"Loan request error\", 'You didn\\'t enter the required information.')", "def approve(self, approved_by=\"system\"):\n\n self.confirm_state(completed=False, cancelled=False)\n\n self.is_valid(\"task invalid before approval\")\n\n # We approve the task before running actions,\n # that way if something goes wrong we know if it was approved,\n # when it was approved, and who approved it.\n self.task.approved = True\n self.task.approved_on = timezone.now()\n self.task.approved_by = approved_by\n self.task.save()\n\n # approve all actions\n for action in self.actions:\n try:\n action.approve()\n except Exception as e:\n handle_task_error(e, self.task, error_text=\"while approving task\")\n\n self.is_valid(\"task invalid after approval\")\n\n need_token = any([act.need_token for act in self.actions])\n if need_token:\n self._create_token()\n else:\n self.submit()", "def submit(request):\n if not request.user.is_authenticated():\n return proceed(request)\n # If dev has already agreed, continue to next step.\n user = UserProfile.objects.get(pk=request.user.id)\n if not user.read_dev_agreement:\n return redirect('submit.app.terms')\n return manifest(request)", "def submit_essay(self, expected_assessment_type, expected_prompt):\r\n\r\n # Check the assessment type and prompt\r\n self.assertEqual(self.open_response.assessment_type, expected_assessment_type)\r\n self.assertIn(expected_prompt, self.open_response.prompt)\r\n\r\n # Enter a submission, which will trigger a pre-defined response from the XQueue stub.\r\n self.open_response.set_response(self.submission)\r\n\r\n # Save the response and expect some UI feedback\r\n self.open_response.save_response()\r\n self.assertEqual(\r\n self.open_response.alert_message,\r\n \"Answer saved, but not yet submitted.\"\r\n )\r\n\r\n # Submit the response\r\n self.open_response.submit_response()", "def test_apply_corporate_approval(self):\n p = self.make('Prescription')\n self.set_cbas_attributes(p)\n p.planning_status = p.PLANNING_SUBMITTED\n p.save()\n\n url = reverse('admin:prescription_prescription_corporate_approve',\n args=(str(p.id),))\n self.client.login(username='fmsb', password='test')\n response = self.client.post(url, {}, follow=True)\n self.assertEqual(response.status_code, 200)\n\n p = Prescription.objects.get(name='test')\n self.assertTrue(p.planning_status == p.PLANNING_APPROVED)\n self.assertTrue(p.planning_status_modified is not None)", "def grr_request_approval(line: Text) -> None:\n args = grr_request_approval.parser.parse_args(shlex.split(line))\n magics_impl.grr_request_approval_impl(args.reason, args.approvers, args.wait)", "def reviewhandler():\n objectid = request.values.get('objectid', 0, type=int)\n if not objectid:\n abort(400)\n\n form = AuthorUpdateForm(formdata=request.form)\n visitor = DataExporter()\n visitor.visit(form)\n\n workflow_object = workflow_object_class.get(objectid)\n workflow_object.extra_data[\"approved\"] = True\n workflow_object.extra_data[\"ticket\"] = request.form.get('ticket') == \"True\"\n workflow_object.extra_data['formdata'] = visitor.data\n workflow_object.data = formdata_to_model(workflow_object, visitor.data)\n workflow_object.save()\n db.session.commit()\n\n resume.delay(workflow_object.id)\n\n return render_template('authors/forms/new_review_accepted.html',\n approved=True)", "def JOBSUBMIT_Send_Approval_Request (parameters, curdir, form, user_info=None):\n global rn,sysno\n # variables declaration\n doctype = re.search(\".*/([^/]*)/([^/]*)/[^/]*$\",curdir).group(2)\n otheraddresses = parameters['addressesDAM']\n categformat = parameters['categformatDAM']\n # retrieve category\n categformat = categformat.replace(\"<CATEG>\",\"([^-]*)\")\n m_categ_search = re.match(categformat, rn)\n if m_categ_search is not None:\n if len(m_categ_search.groups()) > 0:\n ## Found a match for the category of this document. Get it:\n category = m_categ_search.group(1)\n else:\n ## This document has no category.\n category = \"unknown\"\n else:\n category = \"unknown\"\n\n # get record data\n date = get_file_contents(curdir, \"date\")\n title = get_file_contents(curdir, parameters['titleFile']).replace(\"\\n\",\"\")\n title += \" - %s\" % date\n contactname = get_file_contents(curdir, parameters['contactnamefile']).replace(\"\\n\",\", \")\n contactemail = get_file_contents(curdir, parameters['contactemailfile']).replace(\"\\n\",\", \")\n reference = get_file_contents(curdir, parameters['referencefile']).replace(\"\\n\",\", \")\n affiliation = get_file_contents(curdir, parameters['affiliationfile']).replace(\"\\n\",\", \")\n region = get_file_contents(curdir, parameters['regionfile']).replace(\"\\n\",\", \")\n rank = get_file_contents(curdir, parameters['rankfile']).replace(\"\\n\",\", \")\n field = get_file_contents(curdir, parameters['fieldfile']).replace(\"\\n\",\", \")\n experiments = get_file_contents(curdir, parameters['experimentsfile']).replace(\"\\n\",\", \")\n url = get_file_contents(curdir, parameters['urlfile']).replace(\"\\n\",\" \")\n date = get_file_contents(curdir, parameters['datefile']).replace(\"\\n\",\"\")\n abstract = get_file_contents(curdir, parameters['abstractfile'])\n\n # we get the referee password\n sth = run_sql(\"SELECT access FROM sbmAPPROVAL WHERE rn=%s\", (rn,))\n if len(sth) >0:\n access = sth[0][0]\n # Build referee's email address\n refereeaddress = \"\"\n # Try to retrieve the referee's email from the referee's database\n for user in acc_get_role_users(acc_get_role_id(\"referee_%s_%s\" % (doctype,category))):\n refereeaddress += user[1] + \",\"\n # And if there are general referees\n for user in acc_get_role_users(acc_get_role_id(\"referee_%s_*\" % doctype)):\n refereeaddress += user[1] + \",\"\n refereeaddress = re.sub(\",$\",\"\",refereeaddress)\n # Creation of the mail for the referee\n addresses = \"\"\n if refereeaddress != \"\":\n addresses = refereeaddress + \",\"\n if otheraddresses != \"\" and otheraddresses != CFG_WEBSUBMIT_JOBS_SUPPORT_EMAIL:\n addresses += otheraddresses\n else:\n addresses = re.sub(\",$\",\"\",addresses)\n record_url = \"%s/%s/%s\" % (CFG_SITE_URL, CFG_SITE_RECORD, sysno)\n title_referee = \"Request for approval of %s\" % rn\n mail_referee = \"\"\"\nThe document %(rn)s has been submitted to the Jobs database.\\nYour approval is requested on it.\n\nTitle: %(title)s\nContact name(s): %(contactname)s\nContact email(s): %(contactemail)s\nReference(s): %(reference)s\nAffliliation(s): %(affiliation)s\n\nRegion(s): %(region)s\nRank(s): %(rank)s\nField(s): %(field)s\nExperiments(s): %(experiments)s\n\nURL: %(url)s\n\nDeadline date: %(date)s\n\nDescription:\n%(abstract)s\n\nThe record will appear here:\n%(recordlink)s\n\nTo approve/reject the document, you should go to this URL:\\n%(access)s\\n\n \"\"\" % {'rn' : rn,\n 'title' : title,\n 'contactname' : contactname,\n 'contactemail' : contactemail,\n 'reference' : reference,\n 'affiliation' : affiliation,\n 'region' : region,\n 'rank' : rank,\n 'region' : region,\n 'field' : field,\n 'experiments' : experiments,\n 'url' : url,\n 'date' : date,\n 'abstract' : abstract,\n 'access' : \"%s/approve.py?access=%s\" % (CFG_SITE_URL, access),\n 'recordlink' : record_url\n }\n #Send mail to referee\n send_email(fromaddr=CFG_WEBSUBMIT_JOBS_FROMADDR, toaddr=CFG_WEBSUBMIT_JOBS_SUPPORT_EMAIL, subject=title_referee, \\\n content=mail_referee, footer=email_footer(support_email=CFG_WEBSUBMIT_JOBS_SUPPORT_EMAIL),\n copy_to_admin=CFG_WEBSUBMIT_COPY_MAILS_TO_ADMIN, bccaddr=addresses, replytoaddr=contactemail)\n return \"\"", "def approve(self):\n if (self.status == self.APPROVED):\n pass\n\n print ('starting approval process by adding events to the primary cal')\n\n primary_calendar = self.course.calendar_courses.get(primary=True)\n # print ('primary = ' + primary_calendar)\n for event in self.events.all():\n d = event.date\n start = datetime.datetime(d.year, d.month, d.day)\n start = timezone.make_aware(start, timezone.get_current_timezone())\n start = start + datetime.timedelta(hours=8)\n end = start + datetime.timedelta(hours=1)\n\n params = {\n 'calendar': primary_calendar,\n 'title': event.title,\n 'start': start,\n 'end': end\n }\n CalendarEvent.objects.create(**params)\n event.approved = True\n event.save()\n\n print ('trying to set syllabus to approved')\n\n try:\n syllabus = self.syllabus.all()[0]\n syllabus.approved = True\n syllabus.course = self.course\n syllabus.save()\n except:\n print ('dang, that failed, but continuing nonetheless.')\n pass\n\n\n print ('creating students from roster-students')\n\n\n for student in self.students.all():\n email = student.email\n if email:\n user = utils.get_or_create_user(email, student.first_name, student.last_name)\n school = self.course.domain\n user_student = utils.get_or_create_student(school, user)\n\n self.course.enroll_by_roster(user_student, self)\n\n student.approved = True\n student.save()\n\n print ('instructors')\n\n for instructor in self.instructors.all():\n instructor.approved = True\n instructor.save()\n\n print ('approving done')\n\n\n self.status = self.APPROVED\n self.save()\n\n add_notification(\n self.created_by.user,\n 'Your class set for {}, is approved and published!'.format(self.course)\n )" ]
[ "0.70053464", "0.68819475", "0.6813501", "0.67537534", "0.66796976", "0.6598325", "0.6511853", "0.6453754", "0.6377006", "0.6306528", "0.6282506", "0.62592375", "0.6228279", "0.6193639", "0.6178086", "0.6164796", "0.6145355", "0.61233836", "0.6102177", "0.60849494", "0.60825425", "0.60734856", "0.60684836", "0.6067842", "0.6021557", "0.6015952", "0.60146844", "0.60035604", "0.5989923", "0.5980949" ]
0.69773877
1
Get all submissions for specific milestone.
def get_milestone_submissions(self, milestone_id): data = {} url = 'fp/milestones/{0}/submissions'.format(milestone_id) return self.get(url, data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_submissions(subreddit, start_date, end_date, limit):\n\n return API.search_submissions(after=start_date, before=end_date,\n subreddit=subreddit, limit=limit)", "def get_all_submissions(self):\n\n submissions = []\n for chump in self.chumps:\n submissions.add(self.get_sbumissions(chump))\n return submissions", "def canvas_api_submissions(state, course_id, assignment_id):\n\n api = state.canvas_api()\n for submission in api.list_submissions(course_id, assignment_id):\n click.echo(str(submission))", "def get_submissions(self, test_id) -> Optional[List[TestsSubmissions]]:\n try:\n submissions = self.session.query(TestsSubmissions).filter(TestsSubmissions.test_id == test_id).all()\n return submissions\n except Exception as excpt:\n self.session.rollback()\n print(f'Couldn\\'t get tests: {excpt}')\n return None", "def GetSubmissions(self, parameters):\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/falconx-sandbox/GetSubmissions\n FULL_URL = self.base_url+'/falconx/entities/submissions/v1'\n HEADERS = self.headers\n PARAMS = parameters\n result = self.Result()\n try:\n response = requests.request(\"GET\", FULL_URL, params=PARAMS, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n\n return returned", "def test_get_submissions():\n threads = list(get_submissions(TEST_SUBREDDIT, TEST_START_DATE, TEST_END_DATE, TEST_MAX))\n\n # prints the dictionary of variables for each submission\n for x in threads:\n print(x.d_)", "def get_submissions(self, limit=None):\n url = (\"https://api.imgur.com/3/account/{0}/submissions/\"\n \"{1}\".format(self.name, '{}'))\n resp = self._imgur._send_request(url, limit=limit)\n return [_get_album_or_image(thing, self._imgur) for thing in resp]", "def milestones(self, predicate=None):\n \n if predicate is None:\n return self._get(\"milestones\").json()\n else:\n return self._get(\"milestones/search\", params={\"predicate\":predicate}).json()", "def test_issue_get_milestones_list(self):\n pass", "def monthly_submissions(self, request, *args, **kwargs):\n # clear cache\n safe_delete(f\"{USER_PROFILE_PREFIX}{request.user.username}\")\n profile = self.get_object()\n month_param = self.request.query_params.get(\"month\", None)\n year_param = self.request.query_params.get(\"year\", None)\n\n # check if parameters are valid\n if month_param:\n if not month_param.isdigit() or int(month_param) not in range(1, 13):\n raise ValidationError(\"Invalid month provided as parameter\")\n if year_param:\n if not year_param.isdigit() or len(year_param) != 4:\n raise ValidationError(\"Invalid year provided as parameter\")\n\n # Use query parameter values for month and year\n # if none, use the current month and year\n now = datetime.datetime.now()\n month = month_param if month_param else now.month\n year = year_param if year_param else now.year\n\n instance_count = (\n Instance.objects.filter(\n xform__user=profile.user,\n xform__deleted_at__isnull=True,\n date_created__year=year,\n date_created__month=month,\n )\n .values(\"xform__shared\")\n .annotate(num_instances=Count(\"id\"))\n )\n\n serializer = MonthlySubmissionsSerializer(instance_count, many=True)\n return Response(serializer.data[0])", "def milestone(self, milestone_id):\r\n return milestones.Milestone(self, milestone_id)", "def getAllSubmissions(self):\r\n return [(ind, sub) for ind, sub in enumerate(self.submissions)]", "def tickets_in_milestone(self, milestone_names, milestone_start, end):\n\n db = self.env.get_read_db()\n cursor = db.cursor()\n try:\n cursor.execute(\"\"\"\n SELECT _snapshottime, id\n FROM ticket_bi_historical\n WHERE milestone IN ({0})\n AND _snapshottime >=%s\n AND _snapshottime <=%s \n ORDER BY _snapshottime ASC\n \"\"\".format(','.join(('%s',)*len(milestone_names))), milestone_names + [milestone_start, end])\n except Exception:\n db.rollback()\n self.log.exception('Unable to query the historical ticket table')\n return []\n\n data = {}\n for key, ticket in groupby(cursor, itemgetter(0)):\n data[key] = set([])\n for i in ticket:\n data[key].update([i[1]]) \n # Note no sorting necessary as qpPlot does this for us\n\n return data", "def get_submissions(username, problem, cookies, is_clicker, sesssion):\r\n url = '/'.join([courseserver, coursepath, course, 'submission_history', username, 'i4x:/', coursepath, 'problem', problem])\r\n user_agent = {'User-agent': 'csci3202-f13/edx-tools/progress ' + requests.utils.default_user_agent()}\r\n r = sesssion.get(url, cookies=cookies, headers=user_agent)\r\n content = r.text\r\n print 'getting data for ' + username\r\n return parse(content)", "def test_issue_get_milestone(self):\n pass", "def get_historical_submissions(subreddit, limit):\n past_30_months = [10, 9, 8, 7, 6, 5, 4, 3, 2, 1,\n 12, 11, 10, 9, 8, 7, 6, 5, 4, 3,\n 2, 1, 12, 11, 10, 9, 8, 7, 6, 5]\n\n all_submissions = []\n day = 0\n year = 2020\n hacky_year_flag = 0\n\n for month in past_30_months:\n # derive year\n if hacky_year_flag < 9:\n year = 2020\n if 9 < hacky_year_flag <= 21:\n year = 2019\n if hacky_year_flag > 22:\n year = 2018\n\n hacky_year_flag += 1\n\n # generate random day\n if month in [1, 3, 5, 7, 8, 10, 12]:\n day = random.randint(1, 31)\n if month in [4, 6, 9, 11]:\n day = random.randint(1, 30)\n if month in [2]:\n day = random.randint(1, 28)\n\n # generate random 4 hour time chunk\n start_hour = random.randint(0, 19)\n end_hour = start_hour + 4\n\n start_time = int(dt.datetime(year, month, day, start_hour, 0).timestamp())\n end_time = int(dt.datetime(year, month, day, end_hour, 0).timestamp())\n\n # gets submissions and adds submission dictionary to master list\n threads = list(get_submissions(subreddit, start_time, end_time, limit))\n for item in threads:\n all_submissions.append(item.d_)\n\n print('querying month:', hacky_year_flag)\n print('total submissions:', len(all_submissions))\n\n return all_submissions", "def load_submissions(assignment: Assignment, submissions: List[Dict]) -> List[Submission]:\n logger.info(\"Creating %s submissions via Canvas API\", len(submissions))\n\n result: List[Submission] = []\n for submission in submissions:\n result.append(\n assignment.submit(submission)\n )\n\n logger.info(\"Successfully created %s submissions\", len(submissions))\n\n return result", "def test_list_submission_serivces_for_project(self):\n pass", "def get_recent_submissions(self):\n logging.info(\"Retrieving submissions from the last hour\")\n submissions = list(self.subreddit.search('subreddit:{0}'.format(self.subreddit.display_name), time_filter='hour', syntax='lucene', sort='new'))\n logging.info(\"Found {0} submissions in /r/{1} from the last hour.\".format(len(submissions), self.subreddit.display_name))\n return submissions", "def query_jobs(repo_name, revision):\n return buildapi.query_jobs_schedule(repo_name, revision)", "def QuerySubmissions(self, parameters):\n # [GET] https://assets.falcon.crowdstrike.com/support/api/swagger.html#/falconx-sandbox/QuerySubmissions\n FULL_URL = self.base_url+'/falconx/queries/submissions/v1'\n HEADERS = self.headers\n PARAMS = parameters\n result = self.Result()\n try:\n response = requests.request(\"GET\", FULL_URL, params=PARAMS, headers=HEADERS, verify=False)\n returned = result(response.status_code, response.headers, response.json())\n except Exception as e:\n returned = result(500, {}, str(e))\n\n return returned", "def get_public_milestones(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Destiny2/Milestones/\"))", "async def scrape_submissions(self):\n subreddit_origin = await self.reddit.subreddit(self.subreddit)\n\n submission_count = 0\n async for submission in subreddit_origin.new(limit=self.limit):\n if self.memory.contains(submission.id):\n continue\n\n self.memory.add(submission.id)\n\n # Parse Submission\n submission = self.parse_submission(submission)\n\n # Save in Pub/Sub\n if self.enable_publish:\n self.publish(submission)\n\n submission_count += 1\n\n return submission_count", "def backlog_milestone():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"project\", help=\"name of the project\")\n parser.add_argument(\"milestone\", help=\"name of the milestone\")\n\n args = parser.parse_args()\n\n session = GithubSession()\n\n project_data = session.get_project(args.project)\n\n milestone_data = session.get_milestone(args.milestone)\n milestone_title = milestone_data[\"title\"]\n\n backlog_data = session.get_column(project_data, \"backlog\")\n icebox_data = session.get_column(project_data, \"icebox\")\n\n results = session.search(f'repo:openslate/openslate milestone:\"{milestone_title}\"')\n for search_data in results[\"items\"]:\n issue_data = get_issue(search_data[\"number\"]).issue\n issue_card = session.get_card(project_data, issue_data)\n\n if issue_card[\"column_url\"] == icebox_data[\"url\"]:\n session.move_card(issue_card, backlog_data)\n\n print(\".\", end=\"\")", "def milestone(self, milestone_id):\r\n return IssueMilestone(self, milestone_id)", "def get_milestones(username, skillpath):\n muser = database_controller.get_user(username).id\n mskillid = database_controller.get_skill(skillpath).id\n milestonelist = MilestoneAssociation.query.filter(MilestoneAssociation.milestone_users_id == muser,\n MilestoneAssociation.milestone_skill_id == mskillid).all()\n milestone_models = []\n for milestone in milestonelist:\n date = database_controller.get_date_from_id(milestone.milestone_date_id).date\n level = milestone.level\n milestone_models.append(MilestoneModel(date, milestone.comment, level))\n return milestone_models", "def search_submissions():\n r = req('GET', SUB_API + 'search/submissions', params=handle_filters())\n submissions = []\n for submission in demisto.get(r.json(), 'data.items'):\n sample = sample_to_readable(demisto.get(submission, 'item'))\n sample['ID'] = demisto.get(submission, 'item.sample')\n sample['ThreatScore'] = demisto.get(submission, 'item.analysis.threat_score')\n submissions.append(sample)\n demisto.results({\n 'Type': entryTypes['note'],\n 'EntryContext': {'ThreatGrid.Sample(val.ID == obj.ID)': submissions},\n 'HumanReadable': tableToMarkdown('ThreatGrid - Submission Search', submissions,\n ['ID', 'Filename', 'State', 'Status', 'MD5', 'SHA1',\n 'SHA256', 'SubmittedAt', 'ThreatScore']),\n 'ContentsFormat': formats['json'],\n 'Contents': r.json()\n })", "def milestones(self):\r\n return IssueMilestones(self)", "async def run(self) -> list:\n issues = self.adapter.get_sprint_board_issues()\n pull_requests = await self.adapter.get_pull_requests(issues)\n return pull_requests", "def _get_milestone(self, req):\n\n milestone_id = req.args['id']\n try:\n milestone = Milestone(self.env, milestone_id)\n except ResourceNotFound:\n milestone = None\n\n return milestone" ]
[ "0.6087795", "0.6016333", "0.5785119", "0.57595295", "0.5754489", "0.567518", "0.5617337", "0.54758453", "0.53993785", "0.53712153", "0.5342899", "0.53208053", "0.5313582", "0.53092235", "0.5305582", "0.5304025", "0.528234", "0.5224733", "0.51917493", "0.5191734", "0.5182368", "0.5170629", "0.5104739", "0.50509506", "0.50478053", "0.5032541", "0.50318575", "0.5010177", "0.4985661", "0.4949954" ]
0.8213427
0
Get active milestone for specific contract.
def get_active_milestone(self, contract_reference): data = {} url = 'fp/milestones/statuses/active/contracts/{0}'.format(contract_reference) return self.get(url, data)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def GetLatestMilestone():\n # Use CQ Master target to get latest milestone.\n latest_url = LATEST_URL % {'target': constants.CQ_MASTER}\n gs_ctx = gs.GSContext()\n\n logging.info('Getting latest milestone from %s', latest_url)\n try:\n content = gs_ctx.Cat(latest_url).strip()\n\n # Expected syntax is like the following: \"R35-1234.5.6-rc7\".\n assert content.startswith('R')\n milestone = content.split('-')[0][1:]\n logging.info('Latest milestone determined to be: %s', milestone)\n return int(milestone)\n\n except gs.GSNoSuchKey:\n raise GetMilestoneError('LATEST file missing: %s' % latest_url)", "def _get_milestone(self, req):\n\n milestone_id = req.args['id']\n try:\n milestone = Milestone(self.env, milestone_id)\n except ResourceNotFound:\n milestone = None\n\n return milestone", "def milestones_active(self, within_component=None):\n if within_component is not None:\n if isinstance(within_component, str):\n within_component = self.components(\"identifier = %s\" % within_component)[0]\n predicate = \"\"\"\n (StartDate == nil || StartDate < NOW()) \n AND \n (EndDate == nil || EndDate > NOW()) \n AND\n (component.identifier == nil OR %s BEGINSWITH component.fullName)\n \"\"\"\n return self.milestones(predicate % (_obj_id(within_component), within_component[\"fullName\"]))\n else:\n predicate = \"\"\"\n (StartDate == nil || StartDate < NOW()) \n AND \n (EndDate == nil || EndDate > NOW())\n \"\"\"\n return self.milestones(predicate)", "def test_issue_get_milestone(self):\n pass", "def milestone(self, milestone_id):\r\n return milestones.Milestone(self, milestone_id)", "def milestones(self, predicate=None):\n \n if predicate is None:\n return self._get(\"milestones\").json()\n else:\n return self._get(\"milestones/search\", params={\"predicate\":predicate}).json()", "def current_mission(self):\n try:\n return self.missions[self.status.mission_task_list[0]]\n except KeyError:\n return None", "def backlog_milestone():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"project\", help=\"name of the project\")\n parser.add_argument(\"milestone\", help=\"name of the milestone\")\n\n args = parser.parse_args()\n\n session = GithubSession()\n\n project_data = session.get_project(args.project)\n\n milestone_data = session.get_milestone(args.milestone)\n milestone_title = milestone_data[\"title\"]\n\n backlog_data = session.get_column(project_data, \"backlog\")\n icebox_data = session.get_column(project_data, \"icebox\")\n\n results = session.search(f'repo:openslate/openslate milestone:\"{milestone_title}\"')\n for search_data in results[\"items\"]:\n issue_data = get_issue(search_data[\"number\"]).issue\n issue_card = session.get_card(project_data, issue_data)\n\n if issue_card[\"column_url\"] == icebox_data[\"url\"]:\n session.move_card(issue_card, backlog_data)\n\n print(\".\", end=\"\")", "def getModelMilestones(self):\n if self.__jobInfo.engModelMilestones is not None:\n return json.loads(self.__jobInfo.engModelMilestones)\n else:\n return None", "def test_issue_get_milestones_list(self):\n pass", "def milestone(self, milestone_id):\r\n return IssueMilestone(self, milestone_id)", "def get_updates_milestone_for(self, milestone_name, project_name):\n updates_milestone_name = milestone_name + '-updates'\n\n project = self.get_lp_client().projects[project_name]\n milestone = project.getMilestone(name=updates_milestone_name)\n\n if not milestone:\n self.logging.error(\n \"Can't find the milestone '%s' on project '%s'.\",\n updates_milestone_name,\n project_name\n )\n\n return None\n\n return milestone", "def get_contract(self, name):\n return self.contracts[name]", "def get_activity():\n try:\n activity = Activity.objects.filter(active=1).latest('id')\n except Activity.DoesNotExist:\n activity = None\n return activity", "def current(self) -> Optional['outputs.CommitmentPeriodResponse']:\n return pulumi.get(self, \"current\")", "def milestones(self):\r\n return milestones.Milestones(self)", "def milestones(self):\r\n return IssueMilestones(self)", "def test_returns_milestone_if_exists(self):\n repo = gnome.gh.repo_from_callback(MockCallback())\n repo._milestones = (MockFooMilestoneWrapper(),)\n found = repo.get_milestone('foo')\n self.assertTrue(found)", "def get_public_milestones(self):\n # TODO: Assuming first server is good - need to make fallback logic\n return self.session.get_any(\"{base}{request_url}\".format(base=self.servers[0],\n request_url=F\"/Destiny2/Milestones/\"))", "def test_one_contract(self):\n correct_contract = factories.ProjectContract(\n projects=self.projects, status=ProjectContract.STATUS_CURRENT)\n response = self._get()\n self.assertEqual(response.status_code, 200)\n contracts = response.context['contracts']\n self.assertEqual(len(contracts), 1)\n self.assertTrue(correct_contract in contracts)", "def guess_start_date(self, milestone):\n\n db = self.env.get_read_db()\n cursor = db.cursor()\n cursor.execute(\"\"\"\n SELECT _snapshottime\n FROM ticket_bi_historical\n WHERE milestone = %s\n ORDER BY _snapshottime ASC\n LIMIT 1\n \"\"\", [milestone.name])\n\n res = cursor.fetchone()\n if res:\n try:\n return res[0].strftime('%Y-%m-%d')\n except AttributeError as e:\n self.log(e)", "def set_milestone(self):\n\t\te = Event()\n\t\tself.queue.put(e)\n\t\treturn e", "def bringC(self):\n endC = self.countCannibalOnEnd()\n if endC < 1:\n return None\n else:\n newStart = self.start[0:2] + str(4-endC) + self.start[3]\n newEnd = self.end[0:2] + str(endC-1) + self.end[3]\n return MissionaryState(newStart,newEnd,\"bringC\")", "def get_status(conn, aid):\n # Select row with mid\n cursor = conn.execute(\"SELECT status FROM AffirmedAssignments WHERE aid=?\", (aid,))\n row = cursor.fetchone()\n\n if row is None:\n # Assignments entry does not exist.\n return False\n\n # Assignments entry exists\n return row[0]", "def get_start_date(self, req, milestone):\n\n if milestone.start:\n return milestone.start.date()\n elif 'approx_start_date' in req.args:\n return datetime.strptime(req.args['approx_start_date'], '%Y-%m-%d').date() + timedelta(days=1)", "def get_current():\n return Semester.objects.filter(start_date__lte=timezone.now()).order_by('start_date').reverse()[0]", "def current_workflow():\n try:\n return current_worker_pool.workflow\n except AttributeError:\n return None", "def get_project(self, id):\n for project in self.projects:\n if project.id == int(id):\n ret_val = project\n break\n else:\n ret_val = None\n\n return ret_val", "def process_project(self, project_name):\n self.logging.debug('Retrieving project %s..', project_name)\n\n try:\n project = self.get_lp_client().projects[project_name]\n except KeyError:\n self.logging.error(\n \"Project %s wasn't found. Skipped..\",\n project_name\n )\n else:\n if project:\n self.logging.debug(\n 'Retrieving active milestone %s..',\n self.get_new_milestone_name()\n )\n\n new_milestone = project.getMilestone(\n name=self.get_new_milestone_name()\n )\n self.get_stats()[project.name] = {}\n\n for old_milestone_name in self.get_old_milestone_names():\n if self.is_limit_achived():\n break\n\n self.process_milestone_on_project(\n project, old_milestone_name, new_milestone\n )\n\n else:\n self.logging.debug(\n \"Project %s wasn't found. Skipped..\",\n project_name\n )", "def getactivity(self) -> Optional[ba.Activity]:\n if self._activity is None:\n return None\n return self._activity()" ]
[ "0.5884148", "0.5760047", "0.56226885", "0.5472332", "0.539106", "0.5107177", "0.5066164", "0.5040432", "0.50038373", "0.4964783", "0.49621624", "0.49297655", "0.4894369", "0.488449", "0.4875231", "0.48544598", "0.48340154", "0.48127288", "0.4737784", "0.4665102", "0.46433663", "0.45705658", "0.45645636", "0.45446935", "0.45389324", "0.45145777", "0.44532162", "0.44522005", "0.4449233", "0.44479567" ]
0.8093473
0
Strip mathematics, content of chosen sequences, sequences and braces from TeX source.
def stripTex(file): S_TEXT = 0 S_INLINE = 1 S_DISPLAY = 2 S_DOLLAR_IN = 3 S_DOLLAR_OUT = 4 S_SEQUENCE = 5 S_EXPECT_ARG = 6 S_OPTIONAL = 7 # sequences whose 1st argument content is not desired text forbidden = { 'begin', 'end', 'ref', 'eqref', 'usepackage', 'documentclass', 'probbatch', 'probno', 'probpoints', 'probsolauthors', 'probsolvers', 'probavg', 'illfig', 'fullfig', 'plotfig', 'eq' } # -- strip comments -- lines = [] for line in file.readlines(): line += '%' lines.append(line[:line.index('%')]) # TODO \% # -- strip mathematics and chosen sequence's arguments -- # finite state machine with depth counter state = S_TEXT mode = S_TEXT depth = 0 sequence = '' bracketStack = [] # contains either None or index in out where sequence argument starts out = [] for c in ''.join(lines): #print(c, state) if state == S_TEXT: if c == '\\': state = S_SEQUENCE out.append(c) elif c == '$': state = S_DOLLAR_IN elif c == '{': out.append(c) bracketStack.append((len(out), None)) elif c == '}': try: out.append(c) i, seq = bracketStack.pop() # not to shadow "global" sequence if seq != None and seq in forbidden: out = out[:i] except IndexError: print('Unmatched right bracket.') break else: out.append(c) elif state == S_INLINE: if c == '\\': state = S_SEQUENCE mode = S_INLINE elif c == '$': state = S_TEXT mode = S_TEXT elif c == '{': bracketStack.append((len(out), None)) elif c == '}': try: bracketStack.pop() except IndexError: print('Unmatched right bracket.') break elif state == S_DISPLAY: if c == '\\': state = S_SEQUENCE mode = S_DISPLAY elif c == '$': state = S_DOLLAR_OUT elif c == '{': bracketStack.append((len(out), None)) elif c == '}': try: bracketStack.pop() except IndexError: print('Unmatched right bracket.') break elif state == S_DOLLAR_OUT: if c == '$': state = S_TEXT mode = S_TEXT else: pass # stay in display mode elif state == S_DOLLAR_IN: if c == '$': state = S_DISPLAY mode = state else: state = S_INLINE mode = state elif state == S_SEQUENCE: if c in ascii_letters: if mode == S_TEXT: out.append(c) sequence += c elif c == '[': if mode == S_TEXT: out.append(c) state = S_OPTIONAL elif c == '{': state = mode if out[-1] == '\\': # backslashed brace out.append(c) else: bracketStack.append((len(out), sequence)) sequence = '' if mode == S_TEXT: out.append(c) elif c == '}': try: out.append(c) i, seq = bracketStack.pop() # not to shadow "global" sequence if seq != None and seq in forbidden: out = out[:i] except IndexError: print('Unmatched right bracket.') break else: if mode == S_TEXT: out.append(c) state = mode sequence = '' elif state == S_OPTIONAL: # here we suppose no nested [, ] if c == ']': if mode == S_TEXT: out.append(c) state = S_EXPECT_ARG else: if mode == S_TEXT: out.append(c) elif state == S_EXPECT_ARG: if c == '{': bracketStack.append((len(out), sequence)) sequence = '' if mode == S_TEXT: out.append(c) else: state = mode if mode == S_TEXT: out.append(c) else: print('Invalid state') break # end for noMath = ''.join(out) # -- finally simple regexp substitution -- noMath = re.sub('~', ' ', noMath) noMath = re.sub(r'\\[a-zA-Z]+(\[[^\]]*\])?', '', noMath) noMath = re.sub(r'[{}]', '', noMath) print(noMath)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def detex(tex):\n \n #tex = '\\n'.join(reformat(tex, listed=True)[1:])\n global subs\n \n for old, new in subs.iteritems():\n tex = tex.replace(old, new)\n \n return tex.strip()", "def cleanup_code(self, content):\n\n # remove ```py\\n```\n if content.startswith('```') and content.endswith('```'):\n if content[-4] == '\\n':\n return '\\n'.join(content.split('\\n')[1:-1])\n return '\\n'.join(content.split('\\n')[1:]).rstrip('`')\n\n # remove `foo`\n return content.strip('` \\n')", "def cleanup_code( content):\n # remove ```py\\n```\n if content.startswith('```') and content.endswith('```'):\n return '\\n'.join(content.split('\\n')[1:-1])\n\n # remove `foo`\n return content.strip('` \\n')", "def unquote():\n def _unquote(quoted):\n return quoted.subexpression\n yield (\"(λ &[any] . any)\", _unquote)", "def strip(self, src):\r\n # single-quoted character\r\n p = \"('.')\"\r\n \r\n # double-quoted string\r\n p += \"|(\\\"(?:[^\\\"\\\\\\\\]|\\\\\\\\.)*\\\")\"\r\n \r\n # single and multi-line comment\r\n p += \"|(//.*?$)|(/\\\\*[^*]*(?:\\\\*(?!/)[^*]*)*\\\\*/)\"\r\n \r\n # pre-processor directive\r\n p += \"|\" + \"(^\\\\s*#.*?$)\"\r\n\r\n regex = re.compile(p, re.MULTILINE)\r\n return regex.sub(' ', src)", "def cleanup_code(self, content):\n # remove ```py\\n```\n if content.startswith('```') and content.endswith('```'):\n return '\\n'.join(content.split('\\n')[1:-1])\n\n # remove `foo`\n return content.strip('` \\n')", "def cleanup_code(content):\n # remove ```py\\n```\n if content.startswith('```') and content.endswith('```'):\n return '\\n'.join(content.split('\\n')[1:-1])\n # remove `foo`\n return content.strip('` \\n')", "def cleanup_code(content):\n # remove ```py\\n```\n if content.startswith('```') and content.endswith('```'):\n return '\\n'.join(content.split('\\n')[1:-1])\n\n # remove `foo`\n return content.strip('` \\n')", "def refined_text(text):\n import re\n text = text.replace('<e1>','')\n text = text.replace('</e1>','')\n text = text.replace('<e2>','')\n text = text.replace('</e2>','')\n\n text = text[1:-1] # trim quotes\n # text = text.replace('\"','')\n # text = text.replace(',','')\n # text = text.replace('.','')\n # text = text.replace(';','')\n # text = text.replace('`','')\n # text = text.replace('\\'','')\n # text = text.replace('(','')\n # text = text.replace(')','')\n # text = text.replace('/','')\n\n return text", "def process_text(text):\n text = re.sub(r'<@>\\s+|<s>\\s+|</s>\\s+|<p>\\s+|</p>\\s+|\\s+\\,|\\'s|\\'|\\;|\\(|\\)|\\-\\-\\s+|\\s+\\.', '', text)\n text = re.sub(r'\\.\\,', '. ,', text)\n text = re.sub(r'\\,', '', text)\n text = re.sub(r'\\$', '$ ', text)\n text = re.sub(r'\\%', ' %', text)\n text = re.sub(r'\\s\\\"\\s', ' ', text)\n text = re.sub(r'\\.\\s+', '. ', text)\n text = text.lower()\n return text", "def remove_special_tags(text):\n clean = re.compile('{.*?}')\n return re.sub(clean, '', text)", "def _preprocess(self, source):\n source = source.replace(u'\\n', u'').strip()\n source = re.sub(r'<br\\s*\\/?\\s*>', u' ', source, re.I)\n source = re.sub(r'\\s\\s+', u' ', source)\n return source", "def sanitizeTex(texstring):\n\n newstring = (\n texstring.replace(r\"\\\\%\", r\"\\%\")\n .replace(r\"\\\\\", r\"\\tabularnewline\")\n .replace(\"\\$\", \"$\")\n .replace(\"\\_\", \"_\")\n .replace(\"ug/L\", \"\\si[per-mode=symbol]{\\micro\\gram\\per\\liter}\")\n .replace(r\"\\textbackslashtimes\", r\"\\times\")\n .replace(r\"\\textbackslash\", \"\")\n .replace(r\"\\textasciicircum\", r\"^\")\n .replace(\"\\{\", \"{\")\n .replace(\"\\}\", \"}\")\n )\n return newstring", "def clean(text):\r\n #clean structure\r\n text = re.sub(r\"(?<!\\|)Amount ?\\| ?Ingredient(?!\\|)\", \"|Amount|Ingredient|\", text)\r\n text = re.sub(r\"----\\|----\\n\\n\", r\"----|----\\n\", text)\r\n text = re.sub(r\"(?<!\\|)----\\|----(?!\\|)\", \"|----|----|\", text)\r\n text = re.sub(\"## Directions\", \"## Cooking Instructions\", text)\r\n\r\n #fractions \r\n for pat, rep in repls:\r\n text = re.sub(pat, rep, text, flags=re.IGNORECASE)\r\n\r\n #links\r\n def fix_link(match):\r\n return \"](../\"+re.sub(\" \", \"-\", fix_title(match.group(1)))+\")\"\r\n text = re.sub(r\"\\]\\((.*?)\\)\", fix_link, text)\r\n \r\n lines = text.split(\"\\n\")\r\n new_text = []\r\n #add spaces to the end of lines\r\n for line in lines:\r\n match = re.search(r\" $\", line)\r\n if match:\r\n new_text.append(line)\r\n else:\r\n new_text.append(line+\" \")\r\n #remove spaces from the end of lines\r\n # for line in lines:\r\n # match = re.search(r\" +$\", line)\r\n # if match:\r\n # new_text.append(line[:-len(match.group(0))])\r\n # else:\r\n # new_text.append(line)\r\n\r\n text = \"\\n\".join(new_text)\r\n\r\n return text", "def hide_magic(source: str) -> str:\n\n def _hide_magic_line(line: str) -> str:\n return f\"###MAGIC###{line}\" if contains_magic(line) else line\n\n return \"\\n\".join(_hide_magic_line(line) for line in source.split(\"\\n\"))", "def raw_string(seq):\n\n def f(s):\n \"\"\" Filter latex \"\"\"\n r = s.replace('\\\\', '\\\\\\\\').replace('_', '\\_').replace('^', '\\^')\n return r\n\n return [ f(k) for k in seq ]", "def strip_tags(text):\n # Remove header tags\n p = re.compile(\"<\\?.+?\\?>\") \n text = re.sub(p, \"\", text)\n\n # Remove <HOO>, <p> and <s> tags\n text = text.replace(\"<p>\",\"\")\n text = text.replace(\"</p>\",\"\")\n text = text.replace(\"<s>\",\"\")\n text = text.replace(\"</s>\",\"\")\n text = text.replace(\"<HOO>\",\"\")\n text = text.replace(\"</HOO>\",\"\")\n\n return text", "def remove_tags(raw):\n cleanr = re.compile('<.*?>')\n cleantext = re.sub(cleanr, ' ', raw)\n return cleantext", "def minimize_source(source):\n if not isinstance(source, mitogen.core.UnicodeType):\n source = source.decode('utf-8')\n tokens = tokenize.generate_tokens(StringIO(source).readline)\n tokens = strip_comments(tokens)\n tokens = strip_docstrings(tokens)\n tokens = reindent(tokens)\n return tokenize.untokenize(tokens)", "def unpreprocess(self, text, desegment=True):\n\n if self.model_name in SEGMENTED_MODELS and desegment:\n text = self.desegment(text)\n\n # removes the spaces around quotation marks ex: i \" ate \" an apple --> i \"ate\" an apple\n # https://stackoverflow.com/a/53436792/5381220\n text = re.sub(white_spaced_double_quotation_regex, '\"' + r\"\\1\" + '\"', text)\n text = re.sub(white_spaced_single_quotation_regex, \"'\" + r\"\\1\" + \"'\", text)\n text = re.sub(white_spaced_back_quotation_regex, \"\\`\" + r\"\\1\" + \"\\`\", text)\n text = re.sub(white_spaced_back_quotation_regex, \"\\—\" + r\"\\1\" + \"\\—\", text)\n\n # during generation, sometimes the models don't put a space after the dot, this handles it\n text = text.replace(\".\", \" . \")\n text = \" \".join(text.split())\n\n # handle decimals\n text = re.sub(r\"(\\d+) \\. (\\d+)\", r\"\\1.\\2\", text)\n text = re.sub(r\"(\\d+) \\, (\\d+)\", r\"\\1,\\2\", text)\n\n text = re.sub(left_and_right_spaced_chars, r\"\\1\", text)\n text = re.sub(left_spaced_chars, r\"\\1\", text)\n text = re.sub(right_spaced_chars, r\"\\1\", text)\n\n return text", "def sanitize(text):\n #text = re.sub(r'[*]',r'\\*',text) \n text = re.sub(r'~',r'\\~',text) \n #text = re.sub(r'<',r'\\textless',text) \n #text = re.sub(r'>',r'\\textgreater',text) \n text = re.sub(r'\\|',r'\\|',text) \n text = re.sub(r'_',r'\\\\_',text) \n return text", "def unMarkupCommentsAndStrings(self, content):\n\n def replaceMarkups(match):\n groupdict = match.groupdict()\n if groupdict[\"str\"] is not None:\n return self.strings[int(match.group(\"str\"))]\n elif groupdict[\"comment\"] is not None:\n return self.comments[int(match.group(\"comment\"))]\n else:\n assert False\n\n unMarkedup = markups.sub(replaceMarkups, content)\n\n return unMarkedup", "def clean_source_tweet(source):\n src = \"\"\n if source is not None:\n src = re.sub(r\"\"\"<.*?>\"\"\", \"\", source)\n return src", "def clean_code(ls):\r\n ls = remove_white_space(ls)\r\n ls = remove_comments(ls)\r\n ls = remove_empty_lines(ls)\r\n\r\n return ls", "def cleanup(self, body):\n body = re.sub(\"&gt;\", \">\", body) # Recode HTML codes\n body = re.sub(\"&lt;\", \"<\", body)\n body = re.sub(\"&amp;\", \"&\", body)\n body = re.sub(\"&nbsp;\", \" \", body)\n # body = re.sub(\"^\\[deleted\\]$\", \"\", body) # [deleted] and [removed]: delete entire row from dataframe\n body = re.sub(\"http\\S+\", \" \", body) # Remove URL\n body = re.sub(\"/r/\\S+|/u/\\S+\", \" \", body) # Remove /r/subreddit, /u/user\n # body = re.sub(\"(>.*?\\\\n\\\\n)+\", \" \", body) # Remove quoted comments\n # body = re.sub(\"[[:cntrl:]]\", \" \", body) # Remove control characters (\\n, \\b) doesn't work for unicode\n body = \"\".join(ch for ch in body if unicodedata.category(ch)[0]!=\"C\") # Remove control characters (\\n, \\b) etc.\n body = re.sub(\"'\", \"\", body) # Remove single quotation marks (contractions)\n # body = re.sub(\"[[:punct:]]\", \" \", body) # Remove punctuation\n body = \"\".join(ch for ch in body if unicodedata.category(ch)[0]!=\"P\") # Remove punctuation\n body = re.sub(\"\\\\s+\", \" \", body) # Replace multiple spaces with single space\n body = body.strip()\n body = body.lower() # Lower case\n return body # Return body (cleaned up text)", "def _remove_pre_formatting(self):\n preformatted_wrappers = [\n 'pre',\n 'code'\n ]\n\n for wrapper in preformatted_wrappers:\n for formatter in FORMATTERS:\n tag = FORMATTERS[formatter]\n character = formatter\n\n regex = r'(<{w}>.*)<{t}>(.*)</{t}>(.*</{w}>)'.format(\n t=tag,\n w=wrapper\n )\n repl = r'\\g<1>{c}\\g<2>{c}\\g<3>'.format(c=character)\n self.cleaned_html = re.sub(regex, repl, self.cleaned_html)", "def preprocess(self, text):\r\n return text", "def normalize(self, what):\n txt = strippedtxt(what, [\"\\002\", \"\\003\"])\n txt = re.sub(\"\\s+\", \" \", what)\n txt = stripcolor(txt)\n txt = txt.replace(\"\\002\", \"*\")\n txt = txt.replace(\"<b>\", \"*\")\n txt = txt.replace(\"</b>\", \"*\")\n txt = txt.replace(\"<i>\", \"\")\n txt = txt.replace(\"</i>\", \"\")\n txt = txt.replace(\"&lt;b&gt;\", \"*\")\n txt = txt.replace(\"&lt;/b&gt;\", \"*\")\n txt = txt.replace(\"&lt;i&gt;\", \"\")\n txt = txt.replace(\"&lt;/i&gt;\", \"\")\n return txt", "def clean_text(text2, project_key):\n\n text = text2\n text = return_text_without_headlines(text)\n # remove text written between double curly braces\n text = re.sub(r\"{{code}}.*{{code}}\", \"code.\", text)\n text = re.sub(r\"{code.*{code}\", \"code.\", text)\n text = re.sub(r\"{code:java}.*{code:java}\", \"code.\", text)\n text = re.sub(r\"{noformat}.*{noformat}\", \"code.\", text)\n text = re.sub(r\"{{monospaced}}.*{{monospaced}}\", \"code.\", text)\n text = re.sub(r'<script type=\"text/javascript\">.*</noscript>', 'code.', text)\n text = re.sub(r\"'''.*'''\", \"code\", text)\n text = text.replace('<p>&nbsp;</p>', \"\")\n text = text.replace('<div>&nbsp;</div>', \"\")\n text = text.replace('&nbsp;', \" \")\n # remove URLs link\n text = re.sub(r\"<a href=.*</a>\", \"url. \", text)\n text = re.sub(r\"http\\S+\", \"url. \", text)\n text = re.sub(r\"hdfs://\\S+\", \"url. \", text)\n text = re.sub(r\"tcp://\\S+\", \"url. \", text)\n text = re.sub(r\"webhdfs://\\S+\", \"url. \", text)\n text = re.sub(r\":/\\S+\", \"url. \", text)\n text = re.sub(r\"\\S+.com \", \"url. \", text)\n text = re.sub(r\"N/A]\", \" \", text)\n text = \" \".join(x for x in text.split() if not x.endswith('.com'))\n text = \" \".join(x for x in text.split() if not x.endswith('.com*'))\n text = \" \".join(x for x in text.split() if not x.endswith('.org'))\n text = \" \".join(x for x in text.split() if not x.endswith('.xml'))\n text = \" \".join(x for x in text.split() if not x.startswith('*javax.xml.'))\n text = \" \".join(x for x in text.split() if not x.startswith('javax.xml.'))\n # remove Image attachments\n text = re.sub(r\"<p><img alt=.></p>\", \"image.\", text)\n text = re.sub(r\"{}-\\d+\".format(project_key), \"issue\", text)\n # remove date\n text = re.sub(r'(\\w{4})-(\\d{1,2})-(\\d{1,2}) ', 'date.', text)\n text = re.sub(r'(\\w{3,4,5})-(\\d{1,2})-(\\d{4})', 'date.', text)\n text = re.sub(r'(\\d{1,2})/(\\d{1,2})/(\\d{4})', 'date.', text)\n text = re.sub(r'(\\w{3}). (\\d{1,2}), (\\d{4})', 'date.', text)\n text = re.sub(r'(\\w{3}). (\\d{1,2}) (\\d{4})', 'date.', text)\n text = re.sub(r'&lt;= Today’s Date AND', 'date.', text)\n text = re.sub(r'yyyy-mm-dd', 'date', text)\n # remove text written between small braces\n text = re.sub(r'<.+?>', \"\", text)\n text = text.replace(\"e.g.,\", \" \")\n text = text.replace(\"e.g.\", \" \")\n text = text.replace(\"i.e.,\", \" \")\n text = text.replace(\"i.e.\", \" \")\n # replace non-breaking space with regular space\n text = text.replace(u'\\xa0', u' ')\n # replace all punctuations with space\n text = text.replace('-->', \" \")\n text = text.replace('--', \" \")\n text = text.replace('-', \" \")\n text = text.replace('/', \" \")\n text = text.replace('&amp;', \" \")\n text = text.replace(' * ', \". \")\n text = re.sub(r\"\\\"|\\#|\\“|\\*|\\'|\\]|\\^|\\`|\\(|\\)|\\~\", \"\", text)\n text = re.sub(r\"\\\"|\\$|\\%|\\&|\\/|\\|\\=|\\>|\\<|\\@|\\[|\\\\|\\]|\\{|\\||\\}\", \" \", text)\n text = text.replace('$', \"\")\n text = text.replace('?', \".\")\n text = text.replace('+', \" \")\n text = re.sub(r\" \\d\\.\\d\\.N \", \" \", text)\n text = re.sub(r\" \\d\\.\\d\\.b.\", \" \", text)\n text = re.sub(r\" \\d\\.\\d\\.b \", \" \", text)\n text = re.sub(r\"\\d\\.\\d\\.N\", \" \", text)\n text = re.sub(r\"\\d\\.\\d\\.X\", \" \", text)\n text = re.sub(r\"v\\d\\.\\d\\.\\d+\", \" \", text)\n text = re.sub(r\"V\\d\\.\\d\\.\\d+\", \" \", text)\n text = re.sub(r\"v\\d\\.\\d+\", \" \", text)\n text = re.sub(r\"V\\d\\.\\d+\", \" \", text)\n text = re.sub(r\"\\d\\.\\d+\", \" \", text)\n text = re.sub(r\"\\d\\.\\d\\.\\d+\", \" \", text)\n text = text.replace(\"V1\", \" \")\n text = text.replace(\"v1\", \" \")\n # remove digits from text\n text = re.sub(r\"\\d+\", \"\", text)\n text = text.replace('lt;=', \" \")\n text = text.replace('.!', \".\")\n text = text.replace('!.', \".\")\n text = text.replace('!', \".\")\n text = text.replace('... ', \". \")\n text = text.replace('.. ', \". \")\n text = text.replace('..', \".\")\n text = text.replace('. . . ', \". \")\n text = text.replace('. . ', \". \")\n text = text.replace('. . ', \". \")\n text = text.replace(' .', \".\")\n text = text.replace('. . ', \". \")\n text = text.replace('. . ', \". \")\n text = text.replace(':.', \".\")\n text = text.replace(' :', \" \")\n text = text.lower()\n text = text.replace('..', \".\")\n text = ' '.join(text.split())\n\n return text", "def clean_latex(tuple_entry):\n def _clean_latex(tuple_entry_string):\n processed = False\n for symbol in ['_', '*']:\n if symbol in tuple_entry_string:\n tuple_entry_string = tuple_entry_string.replace(symbol, '\\\\' + symbol)\n processed = True\n if processed:\n return '\\\\texttt{' + tuple_entry_string + '}'\n else:\n return tuple_entry_string\n\n return _clean_latex(str(tuple_entry))" ]
[ "0.6479548", "0.5733868", "0.5682467", "0.56789285", "0.56783", "0.5668825", "0.56371295", "0.5628203", "0.56041145", "0.55943316", "0.55638814", "0.5480592", "0.5398655", "0.53941464", "0.53648967", "0.5355816", "0.53553224", "0.531055", "0.5261205", "0.5241949", "0.52383715", "0.52364945", "0.52311295", "0.522351", "0.52148604", "0.51983696", "0.5197308", "0.51940835", "0.5192724", "0.5185245" ]
0.7133635
0
Splits out the IP, Port, Weight from the server argument for l7pools
def parse_server(ctx, param, values): servers = [] for server in values: splitout = server.split(':') if len(splitout) != 3: raise exceptions.ArgumentError( f"--server needs a port and a weight. {server} improperly formatted") server = { 'address': splitout[0], 'port': splitout[1], 'weight': splitout[2] } servers.append(server) return servers
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_pools():\n pools = ch_core.hookenv.action_get('pools')\n if pools:\n return [p.strip() for p in pools.split(',')]\n return None", "def recompose_ip(self, wl):\n ips = []\n r = re.search('([\\d\\.]+)\\(([\\d\\|]+)\\)', wl)\n base = r.group(1)\n items = re.split(r'[|]',r.group(2))\n for i in items:\n ips.append(\"%s%s\" % (base, i))\n return ips", "def getPools(self):\n data = self.connect('get','pools',None)\n return data", "def make_ip(chunk1, chunk2, chunk3, chunk4):# {{{\n\n ip = []\n for i1 in chunk1:\n for i2 in chunk2:\n for i3 in chunk3:\n for i4 in chunk4:\n ip += [str(i1) + '.' + str(i2) + '.' + str(i3) + '.' + str(i4)]\n return ip", "def split_network_line(line):\n fields = line.strip().split()\n receive_field = fields[1]\n transmit_field = fields[2]\n (recv_calls, rx_octets) = receive_field.split('/')\n (send_calls, tx_octets) = transmit_field.split('/')\n return (int(recv_calls), int(rx_octets), int(send_calls), int(tx_octets))", "def ip_address_pools(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:\n return pulumi.get(self, \"ip_address_pools\")", "def get_pools():\n gclass = get_class(\"dhcpPool\")\n if gclass is None:\n logger.error(\"failed to get dhcpPool\")\n return None\n\n pools = {}\n for obj in gclass:\n if \"attributes\" in obj[obj.keys()[0]]:\n attr = obj[obj.keys()[0]][\"attributes\"]\n for r in [\"className\", \"dn\", \"id\", \"type\", \"startIp\", \n \"endIp\", \"freeIPs\"]:\n if r not in attr:\n logger.error(\"missing %s, invalid object: %s\" % (\n r, pretty_print(obj)))\n return None\n ip = ipv4_to_int(attr[\"startIp\"])\n if ip is None:\n logger.error(\"failed to convert ipv4 address for %s\" % obj)\n return None\n p = {\n \"className\": attr[\"className\"],\n \"dn\": attr[\"dn\"],\n \"id\": attr[\"id\"],\n \"type\": attr[\"type\"],\n \"address\": ip,\n \"address_str\": attr[\"startIp\"],\n \"freeIPs\": attr[\"freeIPs\"]\n }\n if ip not in pools:\n pools[ip] = {\"bad_lease\":[], \"good_lease\":[], \"pools\":[],\n \"type\":attr[\"className\"], \"state\":\"\", \"address\":ip}\n pools[ip][\"pools\"].append(p)\n\n # loop through all entries in pool and update state\n for ip in pools:\n state = \"recovery\"\n for p in pools[ip][\"pools\"]:\n if p[\"type\"]!=\"recovery\": state = p[\"type\"]\n pools[ip][\"state\"] = state\n return pools", "def get_pools():\n poolinfostr = fork_and_get_output(\"zpool list -H -o all\".split())\n header = get_zpool_header()\n poolinfo = poolinfostr.splitlines()\n poolobjs = []\n for poolstr in poolinfo:\n poolobjs.append(DataZFS(poolstr, header, 'pool'))\n return poolobjs", "def l7pool_add(env, identifier, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n pool_main = {\n 'name': args.get('name'),\n 'loadBalancingAlgorithm': args.get('method'),\n 'protocol': args.get('protocol')\n }\n\n pool_members = list(args.get('server'))\n\n pool_health = {\n 'interval': args.get('healthinterval'),\n 'timeout': args.get('healthtimeout'),\n 'maxRetries': args.get('healthretry'),\n 'urlPath': args.get('healthpath')\n }\n\n pool_sticky = {\n 'type': args.get('sticky')\n }\n\n try:\n mgr.add_lb_l7_pool(uuid, pool_main, pool_members, pool_health, pool_sticky)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def parseHostList( ipstring ):\r\n\r\n # ideally, we should be able to handle these cases:\r\n # w.x.y.z, .x.y.z, .y.z, .z\r\n # w.x.y.a-b, .x.y.a-b, .x.a-b, .a-b\r\n # w.x.y.z-a.b.c.d, w.x.y-a.b.c, w.x-a.b, w-a\r\n # we also need to be able to parse CIDR ranges. Urgh. w.x.y.z/0\r\n \r\n # ...but for the sake of simplicity we'll implement a subset, consisting of these cases:\r\n # 1. w.x.y.z\r\n # 2. w.x.y.z1-zN\r\n # 3. .z1-.zN\r\n\r\n currentNetwork = '0.0.0'\r\n groups = ipstring.split(',') \r\n iplist = []\r\n for i in groups:\r\n\r\n octets = i.split('.')\r\n if len(octets) == 4: # cases 1 and 2\r\n currentNetwork = \"%s.%s.%s\" % (octets[0],octets[1],octets[2])\r\n iprange = getRange(octets[3])\r\n ips = [\"%s.%s\" % (currentNetwork,i) for i in iprange]\r\n\r\n elif len(octets) == 2: # case 3\r\n network = currentNetwork\r\n iprange = getRange(octets[1])\r\n ips = [\"%s.%s\" % (currentNetwork,i) for i in iprange]\r\n \r\n else:\r\n print 'syntax error in specifying host list!'\r\n sys.exit(1)\r\n \r\n iplist += ips\r\n\r\n return uniq(iplist) # get rid of repeats\r", "def get_pool_list(mnode):\n ret, out, _ = g.run(mnode, \"gluster pool list --xml\", log_level='DEBUG')\n if ret != 0:\n g.log.error(\"Failed to execute 'pool list' on node %s. \"\n \"Hence failed to parse the pool list.\", mnode)\n return None\n\n try:\n root = etree.XML(out)\n except etree.ParseError:\n g.log.error(\"Failed to parse the gluster pool list xml output.\")\n return None\n\n pool_list_list = []\n for peer in root.findall(\"peerStatus/peer\"):\n peer_dict = {}\n for element in peer.getchildren():\n if element.tag == \"hostname\" and element.text == 'localhost':\n element.text = mnode\n if element.tag == \"hostnames\":\n hostnames_list = []\n for hostname in element.getchildren():\n hostnames_list.append(hostname.text)\n element.text = hostnames_list\n peer_dict[element.tag] = element.text\n\n pool_list_list.append(peer_dict)\n return pool_list_list", "def __init__(self,server_list):\n self.workers=[]\n self.worker_by_name={}\n worker_id = 1\n for host,port in server_list:\n # Add the uid here can help with port conflicts, but only works\n # on Unix clusters. We really need to work out a daemon service\n # model that makes the port mess transparent.\n port = port #+ os.getuid()\n new_worker = sync_cluster.standard_sync_client(host,port,worker_id)\n self.workers.append(new_worker)\n self.worker_by_name[host] = new_worker\n worker_id = worker_id + 1", "def _get_pools():\n conn = libvirt.open(None)\n try:\n _spsfs = list()\n _spsnetfs = list()\n if conn:\n # file system pool\n _spsfs = conn.listAllStoragePools(flags=128)\n # nfs pool\n _spsnetfs = conn.listAllStoragePools(flags=256)\n else:\n _logger.error('Failed to contact hypervisor')\n raise ValueError('Failed to contact hypervisor.')\n except libvirt.libvirtError as e:\n _logger.error('Failed to collect vm pool data: %s', str(e))\n raise ValueError('Failed to collect vm pool data.') from e\n finally:\n conn.close()\n return _spsfs, _spsnetfs", "def pull_server_parameters(self, executor: Executor) -> None:\n request = Weights.Request()\n request.id = self._worker_id\n self._future_weights = self._cli['weights'].call_async(request)\n rclpy.spin_until_future_complete(self, self._future_weights, executor)", "def get_sampler_pool():\n return SAMPLER_POOL, NUM_SAMPLER_WORKERS", "def Chunks(l):\n return_list = [[]]\n counter = 0\n index = 0\n for i in l:\n # Size is split in half due to the max size being a sum of src and dst.\n if counter > (self._ADDRESS_LENGTH_LIMIT/2):\n counter = 0\n index += 1\n return_list.append([])\n if i.version == 6:\n counter += self._IPV6_SIZE\n else:\n counter += 1\n return_list[index].append(i)\n return return_list", "def getHostInfo():", "def parse_port_req(self, sock):\n try:\n host_ip = self.s.getsockname()[0] # Get local IPv4 addr of client.\n host_port = sock.getsockname()[1] # Get opened port of socket.\n # PORT requires parameters split up as:\n # octet1,octet2,octet3,octet4,p1,p2\n list_csv_ip = host_ip.split('.') # Split octets into a list.\n port_params = \"\"\n for octet in list_csv_ip:\n port_params += octet + \",\"\n # Parse port into PORT command's expected parameter.\n p1 = str((host_port - (host_port % 256)) / 256)\n p2 = str(host_port % 256)\n port_params += p1 + \",\" + p2\n except:\n return \"\", \"\", \"\"\n return port_params, host_ip, host_port", "def get_pool_info(_ns, pool, human_friendly):\n size = size2str(pool.TotalManagedSpace, human_friendly)\n return (pool.InstanceID,\n pool.ElementName,\n pool.ElementName,\n size,\n \"volume group (LVM)\")", "def _get_ip_pool_for_string_ip(self, ip):\n pool = 'default'\n ip_obj = ipaddress.ip_address(six.text_type(ip))\n ip_pools = self._issue_api_request(\"access_network_ip_pools\")\n for ip_pool, ipdata in ip_pools.items():\n for access, adata in ipdata['network_paths'].items():\n if not adata.get('start_ip'):\n continue\n pool_if = ipaddress.ip_interface(\n \"/\".join((adata['start_ip'], str(adata['netmask']))))\n if ip_obj in pool_if.network:\n pool = ip_pool\n return self._issue_api_request(\n \"access_network_ip_pools/{}\".format(pool))['path']", "def test_get_port_group_by_moid(self):\n pass", "def nodes_from_pool_list(mnode):\n pool_list_data = get_pool_list(mnode)\n if pool_list_data is None:\n g.log.error(\"Unable to get Nodes from the pool list command.\")\n return None\n\n nodes = []\n for item in pool_list_data:\n nodes.append(item['hostname'])\n return nodes", "def get_worker_addresses(self) -> List[str]:", "def add_pool(ctx, pool_name, global_ip_range, global_port_range):\n\n if len(pool_name) > 32:\n ctx.fail(\"Invalid pool name. Maximum allowed pool name is 32 characters !!\")\n\n # Verify the ip address range and format\n ip_address = global_ip_range.split(\"-\")\n if len(ip_address) > 2:\n ctx.fail(\"Given ip address range {} is invalid. Please enter a valid ip address range !!\".format(global_ip_range))\n elif len(ip_address) == 2:\n if is_valid_ipv4_address(ip_address[0]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[0]))\n\n if is_valid_ipv4_address(ip_address[1]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[1]))\n\n ipLowLimit = int(ipaddress.IPv4Address(ip_address[0]))\n ipHighLimit = int(ipaddress.IPv4Address(ip_address[1]))\n if ipLowLimit >= ipHighLimit:\n ctx.fail(\"Given ip address range {} is invalid. Please enter a valid ip address range !!\".format(global_ip_range))\n else:\n if is_valid_ipv4_address(ip_address[0]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[0]))\n ipLowLimit = int(ipaddress.IPv4Address(ip_address[0]))\n ipHighLimit = int(ipaddress.IPv4Address(ip_address[0]))\n\n # Verify the port address range and format\n if global_port_range is not None: \n port_address = global_port_range.split(\"-\")\n\n if len(port_address) > 2:\n ctx.fail(\"Given port address range {} is invalid. Please enter a valid port address range !!\".format(global_port_range))\n elif len(port_address) == 2:\n if is_valid_port_address(port_address[0]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[0]))\n\n if is_valid_port_address(port_address[1]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[1]))\n\n portLowLimit = int(port_address[0])\n portHighLimit = int(port_address[1])\n if portLowLimit >= portHighLimit:\n ctx.fail(\"Given port address range {} is invalid. Please enter a valid port address range !!\".format(global_port_range))\n else:\n if is_valid_port_address(port_address[0]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[0]))\n else:\n global_port_range = \"NULL\"\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n entryFound = False\n table = \"NAT_POOL\"\n key = pool_name\n dataKey1 = 'nat_ip'\n dataKey2 = 'nat_port'\n\n data = config_db.get_entry(table, key)\n if data:\n if data[dataKey1] == global_ip_range and data[dataKey2] == global_port_range:\n click.echo(\"Trying to add pool, which is already present.\")\n entryFound = True\n\n pool_dict = config_db.get_table(table) \n if len(pool_dict) == 16:\n click.echo(\"Failed to add pool, as already reached maximum pool limit 16.\")\n entryFound = True\n\n # Verify the Ip address is overlapping with any Static NAT entry\n if entryFound == False:\n static_dict = config_db.get_table('STATIC_NAT')\n if static_dict:\n for staticKey, staticValues in static_dict.items():\n global_ip = \"---\"\n local_ip = \"---\"\n nat_type = \"dnat\"\n\n if isinstance(staticKey, str) is True:\n global_ip = staticKey\n else:\n continue\n\n local_ip = staticValues[\"local_ip\"]\n\n if \"nat_type\" in staticValues:\n nat_type = staticValues[\"nat_type\"]\n\n if nat_type == \"snat\":\n global_ip = local_ip\n\n ipAddress = int(ipaddress.IPv4Address(global_ip))\n if (ipAddress >= ipLowLimit and ipAddress <= ipHighLimit):\n ctx.fail(\"Given Ip address entry is overlapping with existing Static NAT entry !!\")\n\n if entryFound == False:\n config_db.set_entry(table, key, {dataKey1: global_ip_range, dataKey2 : global_port_range})", "def get_proxy_pool(self,proxy_pool,num):\n\n url='{url}/proxy/?num={num}'.format(url=config.SERVER_URL,num=num)\n\n try:\n res=request.urlopen(url,timeout=5).read()\n res=str(res,encoding='utf8')\n except:\n time.sleep(5)\n check_server() # sleep until server is available\n try:\n res=request.urlopen(url,timeout=5).read()\n res=str(res,encoding='utf8')\n except Exception as e:\n err_str='error: client -> get_proxy_pool : unable to ' \\\n 'connect to proxy server '\n info_manager(err_str,type='KEY')\n if config.KEY_INFO_PRINT:\n print(e)\n return\n\n if 'no valid proxy' in res: # if server return no valid proxy, means server\n # cannot provide proxy to this client\n err_str='error: client -> get_proxy_pool : fail to ' \\\n 'get proxy from server'\n info_manager(err_str,type='KEY')\n time.sleep(1)\n return\n\n try:\n data=res.split(';') # 'url,timedelay;url,timedelay;.....'\n data=[proxy_object(x) for x in data]\n except Exception as e:\n err_str='error: client -> get_proxy_pool : fail to ' \\\n 'parse proxy str info:\\r\\n'+res\n info_manager(err_str,type='KEY')\n return\n\n proxy_pool[:]=proxy_pool[:]+data", "def gen5_3PrimerPairPools(pool_size=8,length=20,*,GC_low=40, GC_high=60,ret_str=True):\n\n masterPool = dict()\n pool_5Ext = genPrimerPairPool(pool_size=pool_size,length=length,GC_low=GC_low,\n GC_high=GC_high,ext=5,ret_str=ret_str)\n\n pool_3Ext = genPrimerPairPool(pool_size=pool_size, length=length, GC_low=GC_low,\n GC_high=GC_high, ext=3, ret_str=ret_str)\n\n masterPool[\"5\\'_ext_pool\"] = pool_5Ext; masterPool[\"3\\'_ext_pool\"] = pool_3Ext\n\n return masterPool", "def _get_data(self):\n raw_data = self._get_raw_data()\n if not raw_data:\n return None\n result = {}\n for line in raw_data:\n if 'tcp' in line:\n parts = line.split()\n proto = parts[0]\n local_addr = parts[3]\n state = parts[5]\n ip, port = local_addr.rsplit(':', 1)\n port = str(port)\n result[port] = 1\n if state == 'LISTEN':\n if port not in self.charts['ports']:\n self.charts['ports'].add_dimension([port, port, 'absolute'])\n return result", "def _split_proxy_info(data: str) -> list:\n \n country = data[:2]\n anonymity = data[3:4]\n type_ = data[4:].strip('-+ ') # Remove splitting (- and space) and google_passed flag (+)\n google_passed = data[-1]\n\n return [country, anonymity, type_, google_passed]", "def _find_ports_for_workers(client: Client, worker_addresses: Iterable[str], local_listen_port: int) -> Dict[str, int]:\n lightgbm_ports: Set[int] = set()\n worker_ip_to_port = {}\n for worker_address in worker_addresses:\n port = client.submit(\n func=_find_open_port,\n workers=[worker_address],\n worker_ip=urlparse(worker_address).hostname,\n local_listen_port=local_listen_port,\n ports_to_skip=lightgbm_ports\n ).result()\n lightgbm_ports.add(port)\n worker_ip_to_port[worker_address] = port\n\n return worker_ip_to_port", "def handle_cluster_pools(self, request):\n \"\"\"\n @api {get} /cluster/pools Get cluster pools\n @apiName GetClusterPools\n @apiGroup Cluster\n @apiVersion 1.0.0\n\n @apiDescription List pools and nodes registered into each.\n\n @apiSuccess {String[]} pool List of nodes registered into the pool.\n\n @apiSuccessExample {json} Example response:\n {\n \"pool1\": [\"node1\", \"node2\"],\n \"pool2: [\"node1\", \"node3\"]\n }\n \"\"\"\n\n headers = {\n 'Content-Type': 'application/javascript',\n 'Access-Control-Allow-Origin': '*'\n }\n\n return HTTPReply(body = json.dumps(self.cluster.pools), headers = headers)" ]
[ "0.5644854", "0.55139905", "0.53467906", "0.5312967", "0.5159871", "0.51250684", "0.5100258", "0.50839937", "0.49522316", "0.49501953", "0.4949965", "0.49358037", "0.48972043", "0.48684308", "0.48380935", "0.48255715", "0.4824517", "0.48167664", "0.48154384", "0.4813256", "0.4805723", "0.4785521", "0.47659227", "0.47612673", "0.47517496", "0.47424942", "0.47388363", "0.4737366", "0.47337866", "0.4730612" ]
0.5742994
0
Updates a listener's configuration. LISTENER should be a UUID, and can be found from `slcli lb detail `
def edit(env, identifier, listener, **args): mgr = SoftLayer.LoadBalancerManager(env.client) uuid, _ = mgr.get_lbaas_uuid_id(identifier) new_listener = { 'listenerUuid': listener } arg_to_option = { 'frontprotocol': 'frontendProtocol', 'backprotocol': 'backendProtocol', 'frontport': 'frontendPort', 'backport': 'backendPort', 'method': 'loadBalancingMethod', 'connections': 'maxConn', 'sticky': 'sessionType', 'clienttimeout': 'clientTimeout', 'sslcert': 'tlsCertificateId' } for key, value in args.items(): if value: new_listener[arg_to_option[key]] = value try: mgr.add_lb_listener(uuid, new_listener) click.secho("Success", fg='green') except SoftLayerAPIError as exception: click.secho(f"ERROR: {exception.faultString}", fg='red')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def update_listener(self, context, old_listener, listener):\n old_val, new_val = self.get_diff_of_dict(old_listener, listener)\n LOG.info(\"Received request 'Update Listener' for Listener:\"\n \"%(listener)s in LB:%(lb_id)s with new Param:\"\n \"%(new_val)s and old Param:%(old_val)s\",\n {'lb_id': listener['loadbalancer_id'],\n 'listener': listener['id'],\n 'old_val': old_val,\n 'new_val': new_val})\n arg_dict = {'context': context,\n lb_const.OLD_LISTENER: old_listener,\n lb_const.LISTENER: listener,\n }\n self._send_event(lb_const.EVENT_UPDATE_LISTENER_V2, arg_dict,\n serialize=True,\n binding_key=listener['loadbalancer_id'],\n key=listener['id'])", "def add_listener(cls, listener: ConfigUnitListener) -> None:\n cls.listener.append(listener)", "def update_listener(self, service, bigips):\n\n u\"\"\"\n ATTENTION: The hole impl. is a hack.\n For ssl profile settings the order is very important:\n 1. A new ssl profile is created but not applied to the listener\n 2. The esd_apply configures the listener with the new profile (so the old one will be detached)\n 3. The update will apply the changes to the listener\n 4. The remove_ssl is than be able to remove unneeded ssl profiles because they got detached in 3.\n \"\"\"\n\n # check for ssl client cert changes\n old_default = None\n old_sni_containers = None\n new_default = None\n new_sni_containers = None\n vip = self.service_adapter.get_virtual(service)\n\n #pdb.set_trace()\n\n listener = service.get('listener')\n if listener.get('protocol') == 'TERMINATED_HTTPS':\n old_listener = service.get('old_listener')\n if old_listener != None:\n listener = service.get('listener')\n if old_listener.get('default_tls_container_id') != listener.get('default_tls_container_id'):\n old_default = old_listener.get('default_tls_container_id')\n new_default = listener.get('default_tls_container_id')\n\n # determine sni delta with set substraction\n old_snis = old_listener.get('sni_containers')\n new_snis = listener.get('sni_containers')\n old_ids = []\n new_ids = []\n for old in old_snis:\n old_ids.append(old.get('tls_container_id'))\n for new in new_snis:\n new_ids.append(new.get('tls_container_id'))\n new_sni_containers = self._make_sni_tls(vip, list(set(new_ids) - set(old_ids)))\n old_sni_containers = self._make_sni_tls(vip, list(set(old_ids) - set(new_ids)))\n\n # create old and new tls listener configurations\n # create new ssl-profiles on F5 BUT DO NOT APPLY them to listener\n old_tls = None\n if (new_default != None or (new_sni_containers != None and new_sni_containers['sni_containers'])):\n new_tls = self.service_adapter.get_tls(service)\n new_tls = self._make_default_tls(vip, new_tls.get('default_tls_container_id'))\n\n if old_default != None:\n old_tls = self._make_default_tls(vip, old_default)\n\n for bigip in bigips:\n # create ssl profile but do not apply\n if new_tls != None:\n try:\n self.add_ssl_profile(new_tls, bigip, False)\n except:\n pass\n if new_sni_containers != None and new_sni_containers['sni_containers']:\n try:\n self.add_ssl_profile(new_sni_containers, bigip, False)\n except:\n pass\n\n\n # process esd's AND create new client ssl config for listener\n self.apply_esds(service, vip)\n\n # apply changes to listener AND remove not needed ssl profiles on F5\n error = None\n network_id = service['loadbalancer']['network_id']\n for bigip in bigips:\n self.service_adapter.get_vlan(vip, bigip, network_id)\n try:\n self.vs_helper.update(bigip, vip)\n except Exception as err:\n LOG.error(\"Error changing listener: {0}\".format(err))\n error = err if error is None else error\n # delete ssl profiles\n if listener.get('protocol') == 'TERMINATED_HTTPS':\n if old_tls != None:\n try:\n self.remove_ssl_profiles(old_tls, bigip)\n except:\n pass\n if old_sni_containers != None and old_sni_containers['sni_containers']:\n try:\n self.remove_ssl_profiles(old_sni_containers, bigip)\n except:\n pass\n\n if error:\n raise error", "def put(self, request, listener_id):\n kwargs = {'listener_id': listener_id}\n update_listener(request, **kwargs)", "def AddListener(self, listener):\n pass", "async def update_listener(hass, config_entry):\n controller = hass.data[DOMAIN][config_entry.entry_id][\"coordinator\"].controller\n old_update_interval = controller.update_interval\n controller.update_interval = config_entry.options.get(CONF_SCAN_INTERVAL)\n if old_update_interval != controller.update_interval:\n _LOGGER.debug(\n \"Changing scan_interval from %s to %s\",\n old_update_interval,\n controller.update_interval,\n )", "def updated(self, newConfiguration):\n log.debug('ConfigListener: configuration %s updated' % newConfiguration)", "def set_listener(self, listener):\n\t\tth = current_thread()\n\n\t\t#print '>> SET listener on', th.name, listener\n\n\t\tth.listener = listener", "def _on_config_changed(self, _):\n self._configure_pod()", "def add_update_listener(self, listener: WorklistUpdateListener):\n self.__worklist_update_listeners.add(listener)", "def add_change_listener(self, listener: INotifiable):\n # Do nothing...", "async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:\n await hass.config_entries.async_reload(entry.entry_id)", "def config_updated(self):\n if callable(self.on_config_updated):\n self.on_config_updated(self.config())", "def handle_config_change(self, msg):\n self.xmpp.event('groupchat_config_status', msg)\n self.xmpp.event('muc::%s::config_status' % msg['from'].bare , msg)", "def addServiceListener(self, listener: ghidra.framework.plugintool.util.ServiceListener) -> None:\n ...", "def add(env, identifier, **args):\n\n mgr = SoftLayer.LoadBalancerManager(env.client)\n uuid, _ = mgr.get_lbaas_uuid_id(identifier)\n\n new_listener = {\n 'backendPort': args.get('backport'),\n 'backendProtocol': args.get('backprotocol') if args.get('backprotocol') else args.get('frontprotocol'),\n 'frontendPort': args.get('frontport'),\n 'frontendProtocol': args.get('frontprotocol'),\n 'loadBalancingMethod': args.get('method'),\n 'maxConn': args.get('connections', None),\n 'sessionType': args.get('sticky'),\n 'tlsCertificateId': args.get('sslcert')\n }\n\n try:\n mgr.add_lb_listener(uuid, new_listener)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def create_listener(self, context, listener):\n LOG.info(\"Received request 'Create Listener' for LB:%(lb)s \",\n {'lb': listener['loadbalancer_id']})\n arg_dict = {'context': context,\n lb_const.LISTENER: listener,\n }\n self._send_event(lb_const.EVENT_CREATE_LISTENER_V2, arg_dict,\n serialize=True,\n binding_key=listener['loadbalancer_id'],\n key=listener['id'])", "def osgi_http_whiteboard_listener(self, osgi_http_whiteboard_listener: ConfigNodePropertyString):\n\n self._osgi_http_whiteboard_listener = osgi_http_whiteboard_listener", "def update_log_config(self, monitor_name, log_config):\n pass", "def addListener(self, listener):\n ret = TaurusAttribute.addListener(self, listener)\n\n if not ret:\n return ret\n\n if self.__subscription_state == SubscriptionState.Unsubscribed:\n self.__subscription_state = SubscriptionState.Subscribed\n\n assert len(self._listeners) >= 1\n\n if self.isPollingActive():\n Manager().addJob(self.__fireRegisterEvent, None, (listener,))\n return ret", "def conf_update(self):\n pass", "def set_listener_verbose(self, listener, verbose_level):\n\n\t\tfound = None\n\n\t\twith options.monitor_lock:\n\t\t\t# we need to search: the current thread is not necessarily the\n\t\t\t# one which is operating for the client-side, in which the\n\t\t\t# original verbose_level has been set.\n\t\t\tfor t in enumerate():\n\t\t\t\tif hasattr(t, 'listener') and t.listener == listener:\n\t\t\t\t\tfound = t\n\t\t\t\t\tbreak\n\n\t\tif found:\n\t\t\twith found.monitor_lock:\n\t\t\t\tfound.listener.verbose = verbose_level", "def add_listener(self, listener):\r\n self.listeners.append(listener)", "def fusion_api_update_li_telemetry_configuration(self, body=None, uri=None, api=None, headers=None):\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=\"\")", "def fusion_api_update_li_port_monitor_configuration(self, body=None, uri=None, api=None, headers=None):\n param = '/port-monitor'\n return self.li.update(body=body, uri=uri, api=api, headers=headers, param=param)", "def update_health(self, health):\n session = db_api.get_session()\n\n # We need to see if all of the listeners are reporting in\n expected_listener_count = 0\n lbs_on_amp = self.amphora_repo.get_all_lbs_on_amphora(session,\n health['id'])\n for lb in lbs_on_amp:\n listener_count = self.listener_repo.count(session,\n load_balancer_id=lb.id)\n expected_listener_count += listener_count\n\n listeners = health['listeners']\n\n # Do not update amphora health if the reporting listener count\n # does not match the expected listener count\n if len(listeners) == expected_listener_count:\n\n # if the input amphora is healthy, we update its db info\n self.amphora_health_repo.replace(session, health['id'],\n last_update=(datetime.\n datetime.utcnow()))\n else:\n LOG.warning('Amphora %(id)s health message reports %(found)i '\n 'listeners when %(expected)i expected',\n {'id': health['id'], 'found': len(listeners),\n 'expected': expected_listener_count})\n\n # We got a heartbeat so lb is healthy until proven otherwise\n lb_status = constants.ONLINE\n\n # update listener and nodes db information\n for listener_id, listener in listeners.items():\n\n listener_status = None\n # OPEN = HAProxy listener status nbconn < maxconn\n if listener.get('status') == constants.OPEN:\n listener_status = constants.ONLINE\n # FULL = HAProxy listener status not nbconn < maxconn\n elif listener.get('status') == constants.FULL:\n listener_status = constants.DEGRADED\n if lb_status == constants.ONLINE:\n lb_status = constants.DEGRADED\n else:\n LOG.warning(('Listener %(list)s reported status of '\n '%(status)s'), {'list': listener_id,\n 'status': listener.get('status')})\n\n try:\n if listener_status is not None:\n self._update_status_and_emit_event(\n session, self.listener_repo, constants.LISTENER,\n listener_id, listener_status\n )\n except sqlalchemy.orm.exc.NoResultFound:\n LOG.error(\"Listener %s is not in DB\", listener_id)\n\n pools = listener['pools']\n for pool_id, pool in pools.items():\n\n pool_status = None\n # UP = HAProxy backend has working or no servers\n if pool.get('status') == constants.UP:\n pool_status = constants.ONLINE\n # DOWN = HAProxy backend has no working servers\n elif pool.get('status') == constants.DOWN:\n pool_status = constants.ERROR\n lb_status = constants.ERROR\n else:\n LOG.warning(('Pool %(pool)s reported status of '\n '%(status)s'), {'pool': pool_id,\n 'status': pool.get('status')})\n\n members = pool['members']\n for member_id, status in members.items():\n\n member_status = None\n if status == constants.UP:\n member_status = constants.ONLINE\n elif status == constants.DOWN:\n member_status = constants.ERROR\n if pool_status == constants.ONLINE:\n pool_status = constants.DEGRADED\n if lb_status == constants.ONLINE:\n lb_status = constants.DEGRADED\n elif status == constants.NO_CHECK:\n member_status = constants.NO_MONITOR\n else:\n LOG.warning('Member %(mem)s reported status of '\n '%(status)s', {'mem': member_id,\n 'status': status})\n\n try:\n if member_status is not None:\n self._update_status_and_emit_event(\n session, self.member_repo, constants.MEMBER,\n member_id, member_status\n )\n except sqlalchemy.orm.exc.NoResultFound:\n LOG.error(\"Member %s is not able to update \"\n \"in DB\", member_id)\n\n try:\n if pool_status is not None:\n self._update_status_and_emit_event(\n session, self.pool_repo, constants.POOL,\n pool_id, pool_status\n )\n except sqlalchemy.orm.exc.NoResultFound:\n LOG.error(\"Pool %s is not in DB\", pool_id)\n\n # Update the load balancer status last\n # TODO(sbalukoff): This logic will need to be adjusted if we\n # start supporting multiple load balancers per amphora\n lb_id = self.amphora_repo.get(\n session, id=health['id']).load_balancer_id\n if lb_id is not None:\n try:\n self._update_status_and_emit_event(\n session, self.loadbalancer_repo,\n constants.LOADBALANCER, lb_id, lb_status\n )\n except sqlalchemy.orm.exc.NoResultFound:\n LOG.error(\"Load balancer %s is not in DB\", lb_id)", "def add_hsm_listener(self, listener: Listener) -> None:\n if ID_HSM_STATUS not in self._listeners:\n self._listeners[ID_HSM_STATUS] = []\n self._listeners[ID_HSM_STATUS].append(listener)", "def register(self, listener):\n\t\t\t## This tests weather the listener has a 'refresh' method\t \n\t\tif not hasattr(listener,'refresh') or not hasattr(listener.refresh,'func_code'):\n\t\t\traise AttributeError(\"%s does not have a 'refresh' method.\\n Type help(Wiimote3DTracker.register) for more information on refresh methods.\"% listener.__class__)\n\t\t## Ok, so we have a refresh method (probably) but we need to make sure \n\t\t## it has the correct number of \targuments\n\t\telif listener.refresh.func_code.co_argcount == 3: \n\t\t\tif listener not in self.cartesianListeners:\n\t\t\t\tself.cartesianListeners += [listener]\n\t\telif listener.refresh.func_code.co_argcount == 5:\n\t\t\tif listener not in self.polarListeners:\n\t\t\t\tself.polarListeners += [listener]\n\t\telse:\n\t\t\tprint listener.refresh.func_code.co_argcount\n\t\t\traise AttributeError(\"%s does not have a valid 'refresh' method.\\n Type help(Wiimote3DTracker.register) for more information on refresh methods.\" % listener.__class__)", "def update_config(update):\n global _config\n new_config = copy.deepcopy(_config)\n _update_dict_recursive(new_config, update)\n logging.config.dictConfig(new_config)\n _configure_ulog_bridge()\n _config = new_config", "def add_listener(self, listener):\n self.listeners.append(listener)" ]
[ "0.6794243", "0.62666476", "0.61793065", "0.59043264", "0.5821499", "0.56862515", "0.56750363", "0.5674302", "0.56067747", "0.5537184", "0.5383394", "0.53498125", "0.5346392", "0.53355396", "0.52670795", "0.52485996", "0.5239451", "0.5133966", "0.5121427", "0.51119864", "0.5102493", "0.5077222", "0.50470436", "0.50432503", "0.5033206", "0.50316215", "0.50261074", "0.5002229", "0.5001886", "0.49853647" ]
0.70541364
0
Removes the listener from identified LBaaS instance LISTENER should be a UUID, and can be found from `slcli lb detail `
def delete(env, identifier, listener): mgr = SoftLayer.LoadBalancerManager(env.client) uuid, _ = mgr.get_lbaas_uuid_id(identifier) try: mgr.remove_lb_listener(uuid, listener) click.secho("Success", fg='green') except SoftLayerAPIError as exception: click.secho(f"ERROR: {exception.faultString}", fg='red')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_listener(self, context, listener):\n LOG.info(\"Received request 'Delete Listener' for LB:%(lb)s \",\n {'lb': listener['loadbalancer_id']})\n arg_dict = {'context': context,\n lb_const.LISTENER: listener,\n }\n self._send_event(lb_const.EVENT_DELETE_LISTENER_V2, arg_dict,\n serialize=True,\n binding_key=listener['loadbalancer_id'],\n key=listener['id'])", "def removeServiceListener(self, listener: ghidra.framework.plugintool.util.ServiceListener) -> None:\n ...", "def delete_listener(self, argu):\n\n if not argu:\n LOG.error(\"In delete_listener, it should not pass the None.\")\n\n # delete vs\n self._delete_vs(\n argu['listener_id'],\n argu['protocol']\n )", "def remove_listener ( cls, listener, class_name = '' ):\n MetaHasTraits._listeners[ class_name ].remove( listener )", "def remove_listener(self, listener: CBListenerType) -> None:\n with self._lock:\n self._listeners.remove(listener) # type: ignore[arg-type]", "def stop_listening(self, listener: EventListener) -> None:\n if listener in self.listeners:\n stats.dec('num_listeners', 'EVENT')\n self.listeners.remove(listener)", "def remove_change_listener(self, listener: INotifiable):\n # Do nothing...", "def delete_entity(self, context, listener):\n resource_path = \"%s/%s/%s\" % (RESOURCE_PREFIX, LISTENERS_RESOURCE,\n listener.id)\n msg = _(\"NetScaler driver listener removal: %s\") % listener.id\n LOG.debug(msg)\n self.client.remove_resource(context.tenant_id, resource_path)", "def remove_update_listener(self, listener: WorklistUpdateListener):\n self.__worklist_update_listeners.remove(listener)", "def removeListener(self, listener):\n ret = TaurusAttribute.removeListener(self, listener)\n\n if ret and not self.hasListeners():\n self._deactivatePolling()\n self.__subscription_state = SubscriptionState.Unsubscribed\n return ret", "def _remove_listener(self):\n if self._listener:\n self.hass.bus.remove_listener(EVENT_TIME_CHANGED,\n self._listener)\n self._listener = None", "def unregister(self, target, hostname, listener_type):", "def remove_listener(self) -> None:\n client = self.discord_client\n\n try:\n remove_listener = cast(\"Bot\", client).remove_listener\n except AttributeError:\n unwrap_client_listener(self.discord_client, self.on_socket_response)\n else:\n log.info(f\"Removing socket response listener from {client}\")\n remove_listener(self.on_socket_response)", "def unregisterListener( self, listener ):\n # FIXME---! eventTypesToListeners!\n if listener in self.listeners:\n del self.listeners[ listener ]", "def unregister(self, listener):\n for event_type in self.listeners:\n for event_listeners in self.listeners[event_type]:\n if event_listeners:\n try:\n event_listeners.remove(listener)\n logger.debug('Unregistered listener for event type \"%s\"', hr_event_type(event_type))\n except ValueError:\n pass", "def test_remove_listener(self):\n listener = lambda state: state\n self.wrapper.add_listener(listener)\n self.assertEqual(len(self.client.listeners), 1)\n self.wrapper.remove_listener(listener)\n self.assertEqual(len(self.client.listeners), 0)", "def remove_event_listener(self, event_type: str, listener: Callable or Awaitable) -> None:\n try:\n self.event_listeners[event_type].remove(listener)\n except ValueError:\n _LOGGER.warning(\"Error removing listener, it does not exist.\")", "def delete(self, request, listener_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_listener,\n listener_id, ignore_missing=True,\n load_balancer_getter=listener_get_load_balancer_id,\n resource_id=listener_id)", "def delete_listeners(ctx):\n if self.balancer_exists():\n self.delete_listeners()\n ctx.info('Deleted all listeners for load balancer {}:'.format(self.get_balancer_name()))\n else:\n ctx.info('Load balancer {} does not exist, no listeners to remove.'.format(self.get_balancer_name()))", "def unregisterStatusListener(self, cb):\r\n self._statusListener.discard(cb)", "def unregister(self,listener):\n\t\tif listener in self.cartesianListeners:\n\t\t\tself.cartesianListeners.remove(listener)\n\t\tif listener in self.polarListeners:\n\t\t\tself.polarListeners.remove(listener)", "def delete_listeners(self):\n listeners_info = self.describe_listeners()\n\n for listener in listeners_info:\n response = self.client.delete_listener(\n ListenerArn=listener['ListenerArn']\n )\n assert response['ResponseMetadata']['HTTPStatusCode'] == 200\n\n self.logger.info('Successfully deleted listener {listener_arn} for balancer {balancer}.'.format(\n listener_arn=listener['ListenerArn'],\n balancer=self.get_balancer_name(),\n ))", "def remove_message_listener(self, name, fn):\n\n name = str(name)\n if name in self._message_listeners:\n if fn in self._message_listeners[name]:\n self._message_listeners[name].remove(fn)\n if len(self._message_listeners[name]) == 0:\n del self._message_listeners[name]", "def remove_action_listener(self, listener):\n for action in self.action_listeners:\n for listeners in self.action_listeners[action]:\n if listeners and listener in listeners:\n listeners.remove(listener)\n logger.debug('Unregistered listener for action \"%s\"', action)", "def unlisten(cls, name: str):\r\n cls.Unlisten(name)", "def unsubscribeFromEvent(eventName,subscriber):", "def disconnect(self, listener_id):\n\n for event_listeners in self._listeners.values():\n for listener in event_listeners:\n if listener.id == listener_id:\n event_listeners.remove(listener)\n break", "def test_unsubscribe_one_listener(self):\n def listener():\n pass\n\n EVENT_MANAGER.subscribe('test_listener', listener)\n EVENT_MANAGER.unsubscribe(listener)\n self.assertNotIn(listener, EVENT_MANAGER._listeners['test_listener'])", "def unlisten(obj, name, func):\n _signals(obj, name).remove(func)", "def remove_listener(self, event_name, func):\n for w in self.wrappers[event_name]:\n if w[0] == func:\n self.wrappers[event_name].remove(w)\n return super().remove_listener(event_name, w[1])\n # if no wrapper exists try removing the function\n return super().remove_listener(event_name, func)" ]
[ "0.7619018", "0.72021365", "0.7081414", "0.6927887", "0.6834801", "0.6769346", "0.6714219", "0.66600806", "0.6638011", "0.6612364", "0.6563146", "0.65185297", "0.6517619", "0.65116155", "0.6510315", "0.6408872", "0.6381993", "0.6373163", "0.6372998", "0.6254823", "0.6224056", "0.62160414", "0.60822", "0.5874327", "0.58714795", "0.5858973", "0.5833543", "0.58057165", "0.5793136", "0.57358396" ]
0.77306855
0
Adds a new l7 pool
def l7pool_add(env, identifier, **args): mgr = SoftLayer.LoadBalancerManager(env.client) uuid, _ = mgr.get_lbaas_uuid_id(identifier) pool_main = { 'name': args.get('name'), 'loadBalancingAlgorithm': args.get('method'), 'protocol': args.get('protocol') } pool_members = list(args.get('server')) pool_health = { 'interval': args.get('healthinterval'), 'timeout': args.get('healthtimeout'), 'maxRetries': args.get('healthretry'), 'urlPath': args.get('healthpath') } pool_sticky = { 'type': args.get('sticky') } try: mgr.add_lb_l7_pool(uuid, pool_main, pool_members, pool_health, pool_sticky) click.secho("Success", fg='green') except SoftLayerAPIError as exception: click.secho(f"ERROR: {exception.faultString}", fg='red')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _add_pool ( self, pool ):\n self._pool_id += 1\n try:\n self._poolstack.append ( pool )\n except:\n self._pool_id -= 1\n raise\n\n self._update_resolver()", "def addpool(miner: Miner, pool):\n api = MinerApi(host=miner.ipaddress, port=int(miner.port))\n jaddpool = api.addpool(\"{0},{1},{2}\".format(pool.url, pool.user, \"x\"))\n return jaddpool[\"STATUS\"][0][\"Msg\"]", "def add_pool(ctx, pool_name, global_ip_range, global_port_range):\n\n if len(pool_name) > 32:\n ctx.fail(\"Invalid pool name. Maximum allowed pool name is 32 characters !!\")\n\n # Verify the ip address range and format\n ip_address = global_ip_range.split(\"-\")\n if len(ip_address) > 2:\n ctx.fail(\"Given ip address range {} is invalid. Please enter a valid ip address range !!\".format(global_ip_range))\n elif len(ip_address) == 2:\n if is_valid_ipv4_address(ip_address[0]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[0]))\n\n if is_valid_ipv4_address(ip_address[1]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[1]))\n\n ipLowLimit = int(ipaddress.IPv4Address(ip_address[0]))\n ipHighLimit = int(ipaddress.IPv4Address(ip_address[1]))\n if ipLowLimit >= ipHighLimit:\n ctx.fail(\"Given ip address range {} is invalid. Please enter a valid ip address range !!\".format(global_ip_range))\n else:\n if is_valid_ipv4_address(ip_address[0]) is False:\n ctx.fail(\"Given ip address {} is not valid global address. Please enter a valid ip address !!\".format(ip_address[0]))\n ipLowLimit = int(ipaddress.IPv4Address(ip_address[0]))\n ipHighLimit = int(ipaddress.IPv4Address(ip_address[0]))\n\n # Verify the port address range and format\n if global_port_range is not None: \n port_address = global_port_range.split(\"-\")\n\n if len(port_address) > 2:\n ctx.fail(\"Given port address range {} is invalid. Please enter a valid port address range !!\".format(global_port_range))\n elif len(port_address) == 2:\n if is_valid_port_address(port_address[0]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[0]))\n\n if is_valid_port_address(port_address[1]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[1]))\n\n portLowLimit = int(port_address[0])\n portHighLimit = int(port_address[1])\n if portLowLimit >= portHighLimit:\n ctx.fail(\"Given port address range {} is invalid. Please enter a valid port address range !!\".format(global_port_range))\n else:\n if is_valid_port_address(port_address[0]) is False:\n ctx.fail(\"Given port value {} is invalid. Please enter a valid port value !!\".format(port_address[0]))\n else:\n global_port_range = \"NULL\"\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n entryFound = False\n table = \"NAT_POOL\"\n key = pool_name\n dataKey1 = 'nat_ip'\n dataKey2 = 'nat_port'\n\n data = config_db.get_entry(table, key)\n if data:\n if data[dataKey1] == global_ip_range and data[dataKey2] == global_port_range:\n click.echo(\"Trying to add pool, which is already present.\")\n entryFound = True\n\n pool_dict = config_db.get_table(table) \n if len(pool_dict) == 16:\n click.echo(\"Failed to add pool, as already reached maximum pool limit 16.\")\n entryFound = True\n\n # Verify the Ip address is overlapping with any Static NAT entry\n if entryFound == False:\n static_dict = config_db.get_table('STATIC_NAT')\n if static_dict:\n for staticKey, staticValues in static_dict.items():\n global_ip = \"---\"\n local_ip = \"---\"\n nat_type = \"dnat\"\n\n if isinstance(staticKey, str) is True:\n global_ip = staticKey\n else:\n continue\n\n local_ip = staticValues[\"local_ip\"]\n\n if \"nat_type\" in staticValues:\n nat_type = staticValues[\"nat_type\"]\n\n if nat_type == \"snat\":\n global_ip = local_ip\n\n ipAddress = int(ipaddress.IPv4Address(global_ip))\n if (ipAddress >= ipLowLimit and ipAddress <= ipHighLimit):\n ctx.fail(\"Given Ip address entry is overlapping with existing Static NAT entry !!\")\n\n if entryFound == False:\n config_db.set_entry(table, key, {dataKey1: global_ip_range, dataKey2 : global_port_range})", "def add_to_pool(self, data: str):\n self.pool.append(data)", "def add_to_pool(self):\n if self.check_pool():\n for func in self.getter._func:\n proxies = self.getter.get_proxies(func)\n for proxy in proxies:\n self.conn.push_to_right(proxy)\n else:\n print('Pool reached max capacity')", "def add_pool(name, **kwargs):\n _CONNECTIONS[name] = redis.StrictRedis(**kwargs)", "def create_pool(self, body=None):\r\n return self.post(self.pools_path, body=body)", "def pool_create(self, pool_name):\n self.core.api.os.shell.cmd('{0} add apppool /name:\"{1}\"'.format(\n self.APP_CMD, pool_name\n ))", "def addNewPool(self,ippool_name,comment):\n self.__addNewPoolCheckInput(ippool_name,comment)\n ippool_id=self.__getNewIPpoolID()\n self.__insertPoolDB(ippool_id,ippool_name,comment)\n ippool_main.getLoader().loadIPpoolByID(ippool_id)\n return ippool_id", "def push_pool ( self, pool ):\n # COULDFIX: pool name possibly not unique when reinserting pools\n assert isinstance ( pool, roverlay.depres.simpledeprule.pool.SimpleDependencyRulePool )\n self._add_pool ( pool )\n return self._poolstack[-1]", "def add_node(self, node):\n self._execution_pool[node.name] = node", "def addIPtoPool(self,ippool_name,ips):\n self.__addIPtoPoolCheckInput(ippool_name,ips)\n ippool_obj=ippool_main.getLoader().getIPpoolByName(ippool_name)\n self.__addIPtoPoolDB(ippool_obj.getIPpoolID(),ips)\n self.__reloadIPpool(ippool_obj.getIPpoolID())", "def l7pool_del(env, identifier):\n mgr = SoftLayer.LoadBalancerManager(env.client)\n try:\n mgr.del_lb_l7_pool(identifier)\n click.secho(\"Success\", fg='green')\n except SoftLayerAPIError as exception:\n click.secho(f\"ERROR: {exception.faultString}\", fg='red')", "def post_loadbalancer_pool_create(self, resource_dict):\n pass", "def post_floating_ip_pool_create(self, resource_dict):\n pass", "def pre_loadbalancer_pool_create(self, resource_dict):\n pass", "def newPool(name: str, superPool, types: [], cls):\n try:\n if name == \"colorholder\":\n superPool = P0(len(types), cls)\n return superPool\n elif name == \"abstractnode\":\n superPool = P1(len(types), cls)\n return superPool\n elif name == \"node\":\n superPool = P2(len(types), superPool, cls)\n return superPool\n \n elif name == \"subnode\":\n superPool = P3(len(types), superPool, cls)\n return superPool\n \n else:\n if superPool is None:\n superPool = BasePool(len(types), name, StoragePool.noKnownFields, StoragePool.noAutoFields, cls)\n else:\n superPool = superPool.makeSubPool(len(types), name, cls)\n return superPool\n finally:\n types.append(superPool)", "def pre_floating_ip_pool_create(self, resource_dict):\n pass", "def add_module(self, module):\n if module.name not in self.pool:\n self.pool[module.name] = module", "def create_pool(self, device, tier, poolname):\n print \"Adding pool %s...\" % poolname\n pool = device.findRemoteStoragePool(StoragePoolPredicates.name(poolname))\n pool.setTier(tier)\n pool.save()\n return pool", "def register_pooling(key, module):\n register(key, module, pooling_dict)", "def __init__(self, pool_size):\n \n self.pool_size=pool_size;", "def __init__(self, pool_size: float = 10):\n self.pool_size = pool_size", "def create_pool(request, **kwargs):\n data = request.DATA\n\n conn = get_sdk_connection(request)\n pool = conn.load_balancer.create_pool(\n protocol=data['pool']['protocol'],\n lb_algorithm=data['pool']['lb_algorithm'],\n session_persistence=data['pool'].get('session_persistence'),\n listener_id=kwargs['listener_id'],\n loadbalancer_id=kwargs['loadbalancer_id'],\n name=data['pool'].get('name'),\n description=data['pool'].get('description'),\n admin_state_up=data['pool'].get('admin_state_up'),\n tls_enabled=data['pool'].get('tls_enabled'),\n # Replace empty string by None (uses default tls cipher string)\n tls_ciphers=data['pool'].get('tls_ciphers') or None,\n )\n\n if data.get('members'):\n args = (request, kwargs['loadbalancer_id'], add_member)\n kwargs = {'callback_kwargs': {'pool_id': pool.id,\n 'index': 0}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n elif data.get('monitor'):\n args = (request, kwargs['loadbalancer_id'], create_health_monitor)\n kwargs = {'callback_kwargs': {'pool_id': pool.id}}\n thread.start_new_thread(poll_loadbalancer_status, args, kwargs)\n\n return _get_sdk_object_dict(pool)", "def update_minion_pool():\n pool = fetch_minion_pool()\n save_minion_pool(pool)\n return pool", "def create_pool(self, service, bigips):\n pool = self.service_adapter.get_pool(service)\n error = None\n\n for bigip in bigips:\n try:\n self.pool_helper.create(bigip, pool)\n except HTTPError as err:\n if err.response.status_code == 409:\n LOG.debug(\"Pool already exists...updating\")\n try:\n self.pool_helper.update(bigip, pool)\n except Exception as err:\n error = f5_ex.PoolUpdateException(err.message)\n LOG.error(\"Failed to assure pool %s on %s: %s\",\n pool['name'], bigip, error.message)\n else:\n error = f5_ex.PoolCreationException(err.message)\n LOG.error(\"Failed to assure pool %s on %s: %s\",\n pool['name'], bigip, error.message)\n except Exception as err:\n error = f5_ex.PoolCreationException(err.message)\n LOG.error(\"Failed to assure pool %s on %s: %s\",\n pool['name'], bigip, error.message)\n\n return error", "def fusion_api_allocate_pool(self, body, uri, api=None, headers=None):\n return self.idpool.allocate(body, uri, api, headers)", "def _Pool(self, name, stride, first_n=None):\n p = self.params\n return p.funnel_pool_tpl.Copy().Set(\n stride=stride,\n first_n=first_n,\n name=name)", "def post(self, request):\n kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id'),\n 'listener_id': request.DATA.get('parentResourceId')}\n return create_pool(request, **kwargs)", "def create_pool(self, context, pool):\n LOG.info(\"Received request 'Create Pool' for Pool:%(pool_id)s \",\n {'pool_id': pool['id']})\n arg_dict = {'context': context,\n lb_const.POOL: pool\n }\n # REVISIT(jiahao) M:N pool is not yet implemented.\n self._send_event(lb_const.EVENT_CREATE_POOL_V2, arg_dict,\n serialize=True,\n binding_key=pool['loadbalancer_id'],\n key=pool['id'])" ]
[ "0.74670166", "0.6696249", "0.6556804", "0.6373237", "0.62294525", "0.62013334", "0.61743003", "0.61615366", "0.61531955", "0.6117457", "0.5936169", "0.59159166", "0.59124887", "0.5877003", "0.58275515", "0.58268243", "0.58231163", "0.5797193", "0.56864727", "0.5671812", "0.561697", "0.56136847", "0.56051624", "0.5602175", "0.5569994", "0.55358887", "0.55355597", "0.54391235", "0.54390466", "0.54328257" ]
0.8028086
0
Deletes the identified pool Identifier is L7Pool Id. NOT the UUID
def l7pool_del(env, identifier): mgr = SoftLayer.LoadBalancerManager(env.client) try: mgr.del_lb_l7_pool(identifier) click.secho("Success", fg='green') except SoftLayerAPIError as exception: click.secho(f"ERROR: {exception.faultString}", fg='red')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def delete_pool(self, pool):\r\n return self.delete(self.pool_path % (pool))", "def delete(self):\n self._lbcall('delete_pool', [self._name])", "def deletePool(self,ippool_name): \n self.__deletePoolCheckInput(ippool_name)\n ippool_obj=ippool_main.getLoader().getIPpoolByName(ippool_name)\n self.__deletePoolDB(ippool_obj.getIPpoolID())\n ippool_main.getLoader().unloadIPpoolByID(ippool_obj.getIPpoolID())", "def delete_pool(self, context, pool):\n LOG.info(\"Received request 'Delete Pool' for Pool:%(pool_id)s \",\n {'pool_id': pool['id']})\n arg_dict = {'context': context,\n lb_const.POOL: pool,\n }\n self._send_event(lb_const.EVENT_DELETE_POOL_V2, arg_dict,\n serialize=True,\n binding_key=pool['loadbalancer_id'],\n key=pool['id'])", "def delete_entity(self, context, pool):\n resource_path = \"%s/%s/%s\" % (RESOURCE_PREFIX, POOLS_RESOURCE,\n pool.id)\n msg = _(\"NetScaler driver pool removal: %s\") % pool.id\n LOG.debug(msg)\n self.client.remove_resource(context.tenant_id, resource_path)", "def execute(self, pool, vthunder):\n try:\n axapi_version = acos_client.AXAPI_21 if vthunder.axapi_version == 21 else acos_client.AXAPI_30\n c = self.client_factory(vthunder)\n #need to put algorithm logic\n out = c.slb.service_group.delete(pool.id)\n LOG.info(\"Pool deleted successfully.\")\n except Exception as e:\n print(str(e))\n LOG.info(\"Error occurred\")", "def delete_pool(self, argu):\n\n if not argu:\n LOG.error(\"In delete_pool, it should not pass the None.\")\n\n # delete policy\n self._delete_policy(\n argu['listener_id'],\n argu['session_persistence_type'],\n argu['lb_algorithm']\n )\n\n cmd_apv_no_group = ADCDevice.no_group(argu['pool_id'])\n for base_rest_url in self.base_rest_urls:\n self.run_cli_extend(base_rest_url, cmd_apv_no_group)", "def delete(self, request, pool_id):\n conn = get_sdk_connection(request)\n retry_on_conflict(\n conn, conn.load_balancer.delete_pool,\n pool_id,\n load_balancer_getter=pool_get_load_balancer_id,\n resource_id=pool_id)", "def delete_device_pool(arn=None):\n pass", "def remove_pool(ctx, pool_name):\n \n entryFound = False\n table = \"NAT_POOL\"\n key = pool_name\n\n if len(pool_name) > 32:\n ctx.fail(\"Invalid pool name. Maximum allowed pool name is 32 characters !!\")\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n data = config_db.get_entry(table, key)\n if not data:\n click.echo(\"Trying to delete pool, which is not present.\")\n entryFound = True\n\n binding_dict = config_db.get_table('NAT_BINDINGS')\n if binding_dict and entryFound == False: \n for binding_name, binding_values in binding_dict.items():\n if binding_values['nat_pool'] == pool_name:\n click.echo(\"Pool is not removed, as it is mapped to Binding {}, remove the pool binding first !!\".format(binding_name))\n entryFound = True\n break\n\n if entryFound == False:\n config_db.set_entry(table, key, None)", "def test_dhcp_pool_uninstall(self):\n self._common_uninstall_delete(\n 'esg_id|pool_id', dhcp_pool.delete,\n {'pool': {}},\n delete_args=['dhcpPoolID'],\n delete_kwargs={\n 'uri_parameters': {'edgeId': 'esg_id', 'poolID': 'pool_id'}\n }\n )", "def csDeletePool(self,poolid,usedid):\n\n logger.debug(\"Attempting to delete pool.\")\n\n url = self.csurl + \"/polcentral/v1_0/pools/delete/\"+poolid\n payload = {\"requestorid\":usedid,\"disallowlostfound\":False}\n\n try:\n r = requests.delete(url, data=json.dumps(payload))\n except Exception:\n logger.error(\"Exception during api call to add pool.\")\n return 'Error'\n\n if r.status_code == 200:\n logger.debug(\"Pool with ID '\"+poolid+\"' was successfully deleted.\")\n return 'Success'\n else:\n logger.error(\"Pool with ID '\"+poolid+\"' was not deleted. Error code is \"+str(r.status_code)+\".\")\n return 'Error'", "def mac_pool_remove(handle, name, parent_dn=\"org-root\"):\r\n dn = parent_dn + '/mac-pool-' + name\r\n mo = handle.query_dn(dn)\r\n if mo:\r\n handle.remove_mo(mo)\r\n handle.commit()\r\n else:\r\n raise ValueError(\"MAC Pool is not available\")", "async def delete_work_pool(\n self,\n work_pool_name: str,\n ):\n try:\n await self._client.delete(f\"/work_pools/{work_pool_name}\")\n except httpx.HTTPStatusError as e:\n if e.response.status_code == status.HTTP_404_NOT_FOUND:\n raise prefect.exceptions.ObjectNotFound(http_exc=e) from e\n else:\n raise", "def destroy_pool(self, context, pool_id=None, host=None):\n if self.cache.get_by_pool_id(pool_id):\n self.destroy_device(pool_id)", "def post_loadbalancer_pool_delete(self, resource_id, resource_dict):\n pass", "def fusion_api_delete_storage_pool(self, uri=None, api=None, headers=None):\n return self.pool.delete(uri=uri, api=api, headers=headers)", "def pre_loadbalancer_pool_delete(self, resource_id):\n pass", "def clearPool(self):\n return self.request('clearPool')", "def storage_pools_delete(context, storage_pools_id_list):\n session = get_session()\n with session.begin():\n for storage_pool_id in storage_pools_id_list:\n LOG.debug('deleting storage_pool {0}:'.format(storage_pool_id))\n query = _storage_pool_get_query(context, session)\n result = query.filter_by(id=storage_pool_id).delete()\n\n if not result:\n LOG.error(exception.StoragePoolNotFound(storage_pool_id))\n\n return", "def post_floating_ip_pool_delete(self, resource_id, resource_dict):\n pass", "def ReleaseIdPool(self, pool):\n\n self.__free_ids += self.__idpools.pop(pool, [])", "def remove_pools(ctx):\n\n config_db = ConfigDBConnector()\n config_db.connect()\n\n entryFound = False\n pool_table_name = 'NAT_POOL'\n binding_table_name = 'NAT_BINDINGS'\n binding_dict = config_db.get_table(binding_table_name)\n pool_dict = config_db.get_table(pool_table_name)\n if pool_dict:\n for pool_key_name in pool_dict:\n entryFound = False\n for binding_name, binding_values in binding_dict.items():\n if binding_values['nat_pool'] == pool_key_name:\n click.echo(\"Pool {} is not removed, as it is mapped to Binding {}, remove the pool binding first !!\".format(pool_key_name,binding_name))\n entryFound = True\n break\n\n if entryFound == False: \n config_db.set_entry(pool_table_name, pool_key_name, None)", "def delIPfromPool(self,ippool_name,ips):\n self.__delIPfromPoolCheckInput(ippool_name,ips)\n ippool_obj=ippool_main.getLoader().getIPpoolByName(ippool_name)\n self.__delIPfromPoolDB(ippool_obj.getIPpoolID(),ips)\n self.__reloadIPpool(ippool_obj.getIPpoolID())", "def pre_floating_ip_pool_delete(self, resource_id):\n pass", "def delete_pool(self, service, bigips):\n loadbalancer = service.get('loadbalancer')\n pool = self.service_adapter.get_pool(service)\n members = service.get('members', list())\n\n error = None\n for bigip in bigips:\n try:\n self.pool_helper.delete(bigip, name=pool[\"name\"],\n partition=pool[\"partition\"])\n except HTTPError as err:\n if err.response.status_code != 404:\n error = f5_ex.PoolDeleteException(err.message)\n LOG.error(\"Failed to remove pool %s from %s: %s\",\n pool['name'], bigip, error.message)\n except Exception as err:\n error = f5_ex.PoolDeleteException(err.message)\n LOG.error(\"Failed to remove pool %s from %s: %s\",\n pool['name'], bigip, error.message)\n\n for member in members:\n self._delete_member_node(loadbalancer, member, bigip)\n\n return error", "async def delete_work_pool(\n work_pool_name: str = Path(..., description=\"The work pool name\", alias=\"name\"),\n worker_lookups: WorkerLookups = Depends(WorkerLookups),\n db: OrionDBInterface = Depends(provide_database_interface),\n):\n\n if work_pool_name.lower().startswith(\"prefect\"):\n raise HTTPException(\n status_code=status.HTTP_403_FORBIDDEN,\n detail=\"Worker pools starting with 'Prefect' are reserved for internal use and can not be deleted.\",\n )\n\n async with db.session_context(begin_transaction=True) as session:\n work_pool_id = await worker_lookups._get_work_pool_id_from_name(\n session=session, work_pool_name=work_pool_name\n )\n\n await models.workers.delete_work_pool(\n session=session, work_pool_id=work_pool_id, db=db\n )", "def storage_pool_delete_by_storage(context, storage_id):\n _storage_pool_get_query(context).filter_by(storage_id=storage_id).delete()", "def delete_global_ip_pool(self,\n id,\n headers=None,\n **request_parameters):\n check_type(headers, dict)\n check_type(id, basestring,\n may_be_none=False)\n if headers is not None:\n if 'X-Auth-Token' in headers:\n check_type(headers.get('X-Auth-Token'),\n basestring, may_be_none=False)\n\n _params = {\n }\n _params.update(request_parameters)\n _params = dict_from_items_with_values(_params)\n\n path_params = {\n 'id': id,\n }\n\n with_custom_headers = False\n _headers = self._session.headers or {}\n if headers:\n _headers.update(dict_of_str(headers))\n with_custom_headers = True\n\n e_url = ('/dna/intent/api/v1/global-pool/{id}')\n endpoint_full_url = apply_path_params(e_url, path_params)\n if with_custom_headers:\n json_data = self._session.delete(endpoint_full_url, params=_params,\n headers=_headers)\n else:\n json_data = self._session.delete(endpoint_full_url, params=_params)\n\n return self._object_factory('bpm_f9079863c95acd945c51f728cbf81f_v2_2_1', json_data)", "def delete_pool(self,\n instance_id: str,\n pool_id: str,\n *,\n x_correlation_id: str = None,\n **kwargs\n ) -> DetailedResponse:\n\n if instance_id is None:\n raise ValueError('instance_id must be provided')\n if pool_id is None:\n raise ValueError('pool_id must be provided')\n headers = {\n 'X-Correlation-ID': x_correlation_id\n }\n sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,\n service_version='V1',\n operation_id='delete_pool')\n headers.update(sdk_headers)\n\n if 'headers' in kwargs:\n headers.update(kwargs.get('headers'))\n\n url = '/instances/{0}/pools/{1}'.format(\n *self.encode_path_vars(instance_id, pool_id))\n request = self.prepare_request(method='DELETE',\n url=url,\n headers=headers)\n\n response = self.send(request)\n return response" ]
[ "0.8177328", "0.7949083", "0.7743754", "0.75254697", "0.74241215", "0.73715234", "0.73054725", "0.7275032", "0.7219141", "0.7130311", "0.71098506", "0.70405763", "0.69057125", "0.6834383", "0.66645586", "0.65110606", "0.6488428", "0.6485374", "0.64453715", "0.64181364", "0.638871", "0.63850105", "0.6329261", "0.6298553", "0.62651074", "0.6263193", "0.6234076", "0.6086259", "0.59987354", "0.5987362" ]
0.81102216
1
Return the probability of a trigram
def trigram_prob(self,u,v,w): ################### # Use the trigram_counts to get q(w|u,v) choices = self.trigram_counts[(u,v)] total = sum(choices.values())+sum(self.word_counts.values()) trigram_probability = (self.trigram_counts[(u,v)][w]+1)/float(total) return trigram_probability
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def raw_trigram_probability(self, trigram):\n num = self.trigramcounts[trigram]\n den = self.bigramcounts[trigram[:-1]]\n\n # ??? why would there be a case where trigram has length 2 ???\n if len(trigram) == 2:\n print(trigram)\n return self.raw_bigram_probability(trigram)\n\n # check for in consistency\n if den == 0 and num != 0:\n print(trigram, trigram[:-1], den, num)\n return 1\n\n # if never seen\n if den == 0 :\n return 1/len(self.bigramcounts)\n\n return num/den", "def raw_trigram_probability(self,trigram):\n\n assert len(trigram) == 3\n bigram = trigram[:2]\n count_bigram = self.bigramcounts[bigram]\n count_trigram = self.trigramcounts[trigram]\n if count_bigram == 0 :\n return 1 / self.total_sentence_count\n else :\n return (count_trigram / count_bigram)", "def raw_unigram_probability(self, unigram):\n result = 0.0\n result = self.unigramcounts[unigram]/self.total\n\n #hint: recomputing the denominator every time the method is called\n # can be slow! You might want to compute the total number of words once, \n # store in the TrigramModel instance, and then re-use it. \n\n\n return result", "def raw_unigram_probability(self, unigram):\n\n #hint: recomputing the denominator every time the method is called\n # can be slow! You might want to compute the total number of words once, \n # store in the TrigramModel instance, and then re-use it.\n num = self.unigramcounts[unigram]\n den = self.word_count\n return num/den", "def raw_unigram_probability(self, unigram):\n\n assert len(unigram) == 1\n count_unigram = self.unigramcounts[unigram]\n if self.total_word_count == 0 :\n return 1 / self.total_sentence_count\n else :\n return (count_unigram / self.total_word_count)", "def raw_trigram_probability(self,trigram):\n\n result = 0.0\n try:\n bigram = (trigram[0],trigram[1],)\n result = self.trigramcounts[trigram]/self.bigramcounts[bigram]\n except Exception as e:\n pass\n else:\n pass\n return result", "def get_trigram_log_prob(self, trigram):\n w1, w2, w3 = trigram\n if (\n w1 in self.ngrams[3][\"data\"]\n and w2 in self.ngrams[3][\"data\"][w1]\n and w3 in self.ngrams[3][\"data\"][w1][w2]\n ):\n return self.ngrams[3][\"data\"][w1][w2][w3]\n return Decimal(1.0).log10() - Decimal(int(self.ngrams[1][\"count\"]) ** 2).log10()", "def smoothed_trigram_probability(self, trigram):\n lambda1 = 1/3.0\n lambda2 = 1/3.0\n lambda3 = 1/3.0\n\n assert len(trigram) == 3\n\n unigram = trigram[:1]\n bigram = trigram[:2]\n return (lambda1 * self.raw_trigram_probability(trigram) \n + lambda2 * self.raw_bigram_probability(bigram) \n + lambda3 * self.raw_unigram_probability(unigram))", "def smoothed_trigram_probability(self, trigram):\n lambda1 = 1/3\n lambda2 = 1/3\n lambda3 = 1/3\n\n tri_prob = self.raw_trigram_probability(trigram)\n bi_prob = self.raw_bigram_probability(trigram[1:])\n uni_prob = self.raw_unigram_probability(trigram[2:])\n\n # check for incorrect probabilities\n if tri_prob > 1 or bi_prob > 1 or uni_prob >1:\n print(\"In correct probabilities : \", trigram, tri_prob, bi_prob, uni_prob)\n\n result = lambda1 * tri_prob + lambda2 * bi_prob + lambda3 * uni_prob\n\n return result", "def trigram_model(list_of_words, bigram_count, trigram_count):\n c_start = list_of_words.count(start_phrase)\n c_end = list_of_words.count(end_phrase)\n if c_start == 0:\n list_of_words.insert(0, start_phrase)\n list_of_words.insert(0, start_phrase)\n if c_start == 1:\n list_of_words.insert(0, start_phrase)\n if c_end == 0:\n list_of_words.append(end_phrase)\n list_of_words.append(end_phrase)\n if c_end == 1:\n list_of_words.append(end_phrase)\n bigram_count = pd.read_csv(bigram_count)\n trigram_count = pd.read_csv(trigram_count)\n proba_dict = {list_of_words[i] + \" \" + list_of_words[i+1] + \" \" + list_of_words[i+2]:\n ((trigram_count[list_of_words[i] + \" \" + list_of_words[i+1] + \" \" + list_of_words[i+2]].values[0]) /\n float(bigram_count[list_of_words[i] + \" \" + list_of_words[i+1]].values[0]))\n if list_of_words[i] + \" \" + list_of_words[i+1] + \" \" + list_of_words[i+2] in trigram_count.columns.values else 0.0 for i in xrange(len(list_of_words) - 2)}\n return proba_dict", "def smoothed_trigram_probability(self, trigram):\n lambda1 = 1/3.0\n lambda2 = 1/3.0\n lambda3 = 1/3.0\n p_uni = self.raw_unigram_probability((trigram[2],))\n p_bi = self.raw_bigram_probability((trigram[1],trigram[2],))\n p_tir = self.raw_trigram_probability(trigram)\n result = 0.0\n result = lambda1*p_tir+lambda2*p_bi+lambda3*p_uni\n return result", "def get_ngramlogprobs(freqdict):\n return", "def raw_bigram_probability(self, bigram):\n if bigram == (\"START\", \"START\"):\n return 1/2\n num = self.bigramcounts[bigram]\n den = self.unigramcounts[bigram[:1]]\n if den == 0:\n return 1/len(self.unigramcounts)\n\n return num/den", "def get_ngramlogprobs_fromcorpus(tokenizedseqs, n):\n return", "def raw_bigram_probability(self, bigram):\n result = 0.0\n try:\n unigram = (bigram[0],)\n result = self.bigramcounts[bigram]/self.unigramcounts[unigram]\n except Exception as e:\n pass\n else:\n pass\n\n return result", "def get_probability(self, sentence):\n if len(sentence) == 1:\n return Decimal(10) ** self.get_unigram_log_prob(sentence)\n elif len(sentence) == 2:\n return Decimal(10) ** self.get_bigram_log_prob(sentence)\n else:\n log_prob = Decimal(0.0)\n for w1, w2, w3 in zip(sentence, sentence[1:], sentence[2:]):\n log_prob += self.get_trigram_log_prob((w1, w2, w3))\n log_prob = Decimal(log_prob)\n return Decimal(10) ** log_prob", "def raw_bigram_probability(self, bigram):\n\n assert len(bigram) == 2\n unigram = bigram[:1]\n count_unigram = self.unigramcounts[unigram]\n count_bigram = self.bigramcounts[bigram]\n if count_unigram == 0 :\n return 1 / self.total_sentence_count\n else :\n return (count_bigram / count_unigram)", "def score(self, sentence):\n # TODO your code here\n\n # initialize count with trained data\n unigram_count = self.count.copy()\n N = self.total\n\n # make a new key for UNK, add-one later\n for token in sentence:\n if token not in unigram_count:\n unigram_count[token] = 0\n\n # calcutate lopP(<s>) + logP(w1) + logP(w2) + ...\n score = 0.0 # P(<s>) = 1\n V = len(unigram_count) # the number of vocab including UNK\n for word in sentence:\n prob = float((unigram_count[word] + 1) / (N + V)) # c(w) + 1 / N + V\n score += math.log(prob)\n\n return score", "def get_probability(some_dict, some_string):\n lowercase_review = some_string.lower()\n split_review = lowercase_review.split()\n product = 1 \n for word in split_review:\n if word not in some_dict:\n probability = 0.00009\n #assigning unknown words a probability very close to zero\n else: \n probability = some_dict[word]\n product *= probability\n return product", "def sentence_probability(self, sentence, ngram_type=1):\n sentence_word_list = sentence.lower().split()\n prob = 0 # sentence probability\n if ngram_type == 1:\n for i in range(len(sentence_word_list)):\n prob = prob + self.n_gram_MLE(sentence_word_list[i], gram_type=1)\n\n if ngram_type == 2:\n for i in range(len(sentence_word_list)):\n if i >= len(sentence_word_list) - 1:\n break\n prob = prob + self.n_gram_MLE(sentence_word_list[i],\n sentence_word_list[i] + ' ' + sentence_word_list[i + 1], gram_type=2)\n\n if ngram_type == 3:\n for i in range(len(sentence_word_list)):\n if i >= len(sentence_word_list) - 2:\n break\n prob = prob + self.n_gram_MLE(sentence_word_list[i] + ' ' + sentence_word_list[i + 1],\n sentence_word_list[i] + ' ' + sentence_word_list[i + 1] + ' ' +\n sentence_word_list[i + 2], gram_type=3)\n\n if ngram_type == 4:\n for i in range(len(sentence_word_list)):\n if i >= len(sentence_word_list) - 3:\n break\n prob = prob + self.n_gram_MLE(sentence_word_list[i] + ' ' + sentence_word_list[i + 1] + ' ' +\n sentence_word_list[i + 2],\n sentence_word_list[i] + ' ' + sentence_word_list[i + 1] + ' ' +\n sentence_word_list[i + 2] + sentence_word_list[i + 3], gram_type=4)\n\n if ngram_type == 5:\n for i in range(len(sentence_word_list)):\n if i >= len(sentence_word_list) - 4:\n break\n prob = prob + self.n_gram_MLE(sentence_word_list[i] + ' ' + sentence_word_list[i + 1] + ' ' +\n sentence_word_list[i + 2] + sentence_word_list[i + 3],\n sentence_word_list[i] + ' ' + sentence_word_list[i + 1] + ' ' +\n sentence_word_list[i + 2] + sentence_word_list[i + 3] +\n sentence_word_list[i + 4], gram_type=5)\n\n return prob", "def probability(self, words):\n if len(words) == 0:\n return 0\n \n prob = 1\n model = self.mdl\n \n words_ngram = NGramLM(self.N, []).create_ngrams(words) # Create NGram model for words\n for ngram in words_ngram:\n # Never seen before ngram or n-1gram\n if (ngram not in list(model['ngram'])) or (ngram[:-1] not in list(model['n1gram'])):\n return 0\n if isinstance(self, NGramLM):\n prob *= model[model['ngram'] == ngram]['prob'].values[0]\n \n def recur_prob(model, w):\n prob = 1\n prev_mod = model.prev_mdl\n if isinstance(prev_mod, UnigramLM): # Unigram base case\n prob *= prev_mod.mdl[w[0]]\n else:\n words_n1gram = NGramLM(prev_mod.N, []).create_ngrams(w) # Create NGram model for words\n prob *= prev_mod.mdl[prev_mod.mdl['ngram'] == words_n1gram[0]]['prob'].values[0]\n prob *= recur_prob(prev_mod, words_n1gram[0]) # Recursive call\n return prob\n\n prob *= recur_prob(self, words_ngram[0])\n \n return prob", "def score_sequence(seq, ngramlogprobs):\n return", "def estimate_probability(word, previous_n_gram, \r\n n_gram_counts, n_plus1_gram_counts, vocabulary_size, k=1.0):\r\n # Note : 1 . Here we are actually not considering the end token or start token as a part of a vocabulary.\r\n # 2 . Although the literature says we need to prepend the n-1 SOS tokens but in reality we are prepending n SOS tokens\r\n \r\n # convert list to tuple to use it as a dictionary key\r\n previous_n_gram = tuple(previous_n_gram)\r\n\r\n previous_n_gram_count = n_gram_counts.get(previous_n_gram, 0)\r\n \r\n\r\n denominator = float(previous_n_gram_count + (k*vocabulary_size))\r\n\r\n n_plus1_gram = previous_n_gram + (word,)\r\n \r\n\r\n n_plus1_gram_count = n_plus1_gram_counts.get(n_plus1_gram, 0)\r\n \r\n\r\n numerator = float(n_plus1_gram_count + k)\r\n\r\n probability = float(numerator/denominator)\r\n \r\n \r\n return probability", "def sentence_logprob(self, sentence):\n grams = get_ngrams(sentence, 3)\n p = 1\n\n for gram in grams:\n p *= np.longfloat(self.smoothed_trigram_probability(gram))\n\n return np.log2(p)", "def unigram_model(list_of_words, unigram_count, N=count_token()):\n d = pd.read_csv(unigram_count)\n proba_dict = {list_of_words[i]: (d[el].values[0] / float(N)) if el in d.columns.values else 0.0 for i, el in enumerate(list_of_words) }\n return proba_dict", "def generate_transition_trigram_probabilities(transition_bigram_counts, transition_trigram_counts):\r\n\ttransition_trigram_probabilities = dict()\r\n\tfor tag_trigram in transition_trigram_counts:\r\n\t\ttransition_trigram_probabilities[tag_trigram] = float(transition_trigram_counts[tag_trigram])/transition_bigram_counts[(tag_trigram[0], tag_trigram[1])]\r\n\treturn transition_trigram_probabilities", "def predict(self, sentence, smoothing=None):\n words = sentence.split()\n words.append(\"STOP\")\n probability = 1.0\n\n words = [self.START_SYMBOL, self.START_SYMBOL] + words\n ###################\n # Compute the probability of a sentence under the trigram model\n # p(x1,..,xn)= \\prod {q(x_i| x_{i-2}x_{i-1}}\n for i in xrange(len(words)-2):\n probability *= self.trigram_prob(words[i], words[i+1], words[i+2])\n\n return probability", "def get_ngram_prob(self, label_seq):\n curr_ngram = self.all_grams\n for i in range(0, len(label_seq)):\n label = label_seq[i]\n if i == len(label_seq) - 1:\n denom = curr_ngram.get_count() + self.SMOOTHING_VALUE * 9\n curr_ngram = curr_ngram.get_next_Ngram(label)\n # For smoothing, just add self.SMOOTHING_VALUE\n numer = curr_ngram.get_count() + self.SMOOTHING_VALUE\n return float(numer) / denom", "def sentence_logprob(self, sentence):\n sum_logprob = 0\n trigrams = get_ngrams(sentence, 3)\n for trigram in trigrams :\n sum_logprob += math.log2(self.smoothed_trigram_probability(trigram))\n\n return sum_logprob", "def sentence_logprob(self, sentence):\n line = get_ngrams(sentence,3)\n log_por = 0.0\n for item in line:\n raw_por = self.smoothed_trigram_probability(item)\n log_por = log_por+math.log2(raw_por)\n\n return float(log_por)" ]
[ "0.7751923", "0.76878923", "0.7636409", "0.7597849", "0.7516805", "0.7431036", "0.73478115", "0.7341473", "0.73259956", "0.72539866", "0.72048503", "0.7177857", "0.7106165", "0.7045311", "0.7008787", "0.69239146", "0.69228655", "0.68907607", "0.6865925", "0.6857251", "0.6838236", "0.68228585", "0.67958623", "0.6780021", "0.67789596", "0.67770594", "0.6761645", "0.67556715", "0.67091185", "0.6656942" ]
0.7807621
0
print trigrams and their counts
def print_trigrams_count(self): for u_v in self.trigram_counts: for w in self.trigram_counts[u_v]: count=self.trigram_counts[u_v][w] print "{2}\tc({0} {1})".format(u_v,w,count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_trigrams():\n tokenizer = Tokenizer(trigram_freq=0.5)\n X = tokenizer.transform([[\"a b c a b c\"]])\n assert X[\"corpus\"][0] == [\"a_b_c\", \"a_b_c\"]\n assert isinstance(tokenizer.trigrams, pd.DataFrame)", "def count_ngrams(self):\n self.unigram = self.count_unigram(self.word_list)\n self.bigram = self.count_bigram(self.word_list)\n # self.trigram = self.count_trigram(self.word_list)\n # self.four_gram = self.count_four_gram(self.word_list)\n # self.five_gram = self.count_five_gram(self.word_list)", "def trigram(self, input):\n for l in input:\n line = l.strip()\n y1, y2, y3 = line.split(' ')\n if line:\n print line, log(self.q(y3, y1, y2))", "def _profile(self, text):\n prof = zeros(len(self.alph)**self.N)\n ngs = ngrams(text, self.N)\n for tup in ngs:\n loc = 0\n for i in range(len(tup)):\n loc += (len(self.alph)**i) * self.alph.index(tup[i])\n prof[loc] += 1\n return prof", "def trigrams(sentence):\n return [word+'_'+sentence[i+1]+'_'+sentence[i+2] \n if word+'_'+sentence[i+1]+'_'+sentence[i+2] in word_ids else None \n for i, word in enumerate(sentence[:-2])] if config.USE_TRIGRAMS else []", "def _count(self):\n words = [word.lower() for word in self.corpus.words()]\n bigrams_words = bigrams(words)\n for bigram in bigrams_words:\n self._bigrams[bigram] += 1", "def trigrams(content):\n for item in range(len(content) - 2):\n buld_tupls = tuple(contents[item: item + 2])\n # test to make sure that two items are in a tuple\n # print(buld_tupls)\n # print(type(buld_tupls))", "def count_ngrams(self, corpus):\n \n self.unigramcounts = defaultdict(int)\n self.bigramcounts = defaultdict(int)\n self.trigramcounts = defaultdict(int)\n\n self.sentence_counts = 0\n self.word_count = 0\n\n for line in corpus:\n sequence = line\n self.sentence_counts +=1\n\n unigrams = get_ngrams(sequence, n=1)\n for gram in unigrams:\n self.word_count += 1\n self.unigramcounts[gram] +=1\n\n bigrams = get_ngrams(sequence, n=2)\n for gram in bigrams:\n self.bigramcounts[gram] +=1\n\n trigrams = get_ngrams(sequence, n=3)\n for gram in trigrams:\n self.trigramcounts[gram] +=1\n\n #self.unigramcounts[('START')] = self.sentence_counts *2\n self.bigramcounts[('START', 'START')] = self.sentence_counts\n\n #return self", "def match_all_phrases(self, inphrases):\n# temporary - attempted matches\n attempted_matches = []\n phrase_attempts = {}\n phrase = \"\"\n step = \"A\"\n # ALL full phrases \n for phrase in inphrases:\n phrase_attempts[phrase] = 1\n attempted_matches.append(phrase + ':' + step)\n if phrase in self.cls_phrases:\n match_choices = self.cls_phrases[phrase]\n #return match_choices, attempted_matches, phrase\n return (self.get_list_counts(match_choices), attempted_matches, \n phrase, self.get_most_common(match_choices))\n\n # Normalised version of ALL all full phrases \n phrases = [self.get_normalised_phrase(p) for p in inphrases]\n\n # 3 all prefix trigrams \n step = \"3\"\n for ngram in [p.split()[0:3] for p in phrases if len(p.split()) > 2]:\n phrase = ' '.join(ngram)\n phrase_attempts[phrase] = 1\n attempted_matches.append(phrase + ':' + step)\n if phrase in self.cls_phrases:\n match_choices = self.cls_phrases[phrase]\n return (self.get_list_counts(match_choices), attempted_matches, \n phrase, self.get_most_common(match_choices))\n\n # 2 all prefix bigrams \n step = \"2\"\n for ngram in [p.split()[0:2] for p in phrases if len(p.split()) > 1]:\n phrase = ' '.join(ngram)\n phrase_attempts[phrase] = 1\n attempted_matches.append(phrase + ':' + step)\n if phrase in self.cls_phrases:\n match_choices = self.cls_phrases[phrase]\n return (self.get_list_counts(match_choices), attempted_matches, \n phrase, self.get_most_common(match_choices))\n\n # 1 all valid words \n step = \"1\"\n for phr_elem in phrases:\n #print phr_elem.split()\n for phrase in [w.strip() for w in phr_elem.split() \n if self.isExcluded(w.strip()) == False and w.strip() not in phrase_attempts]:\n #print \"***\", phrase\n phrase_attempts[phrase] = 1\n attempted_matches.append(phrase + ':' + step)\n if phrase in self.cls_phrases:\n match_choices = self.cls_phrases[phrase]\n return (self.get_list_counts(match_choices), attempted_matches, \n phrase, self.get_most_common(match_choices))\n\n return [], attempted_matches, phrase, None", "def count_ngrams(self, corpus):\n \n self.unigramcounts = {} # might want to use defaultdict or Counter instead\n self.bigramcounts = {} \n self.trigramcounts = {} \n\n self.total = 2\n ##Your code here\n\n for sentence in corpus:\n temp_1 = get_ngrams(sentence,1)\n temp_2 = get_ngrams(sentence,2)\n temp_3 = get_ngrams(sentence,3)\n for i in range(len(temp_1)):\n if temp_1[i] in self.unigramcounts:\n self.unigramcounts[temp_1[i]] += 1\n else:\n self.unigramcounts[temp_1[i]] = 1\n self.total += 1\n\n for i in range(len(temp_2)):\n if temp_2[i] in self.bigramcounts:\n self.bigramcounts[temp_2[i]] += 1\n else:\n self.bigramcounts[temp_2[i]] = 1\n\n for i in range(len(temp_3)):\n if temp_3[i] in self.trigramcounts:\n self.trigramcounts[temp_3[i]] += 1\n else:\n self.trigramcounts[temp_3[i]] = 1\n return", "def trigrams_to_new_text(tris):\n new_text_list = []\n target_length = 50\n current_key = random.choice(list(tris.keys()))\n new_text_list.extend(current_key)\n while len(new_text_list) < target_length:\n list_of_candidates = tris[current_key]\n third_word = random.choice(list_of_candidates)\n current_key = current_key[1], third_word\n new_text_list.append(third_word)\n return \" \".join(new_text_list)", "def __tagsToNgrams__(self):\n bigrams = defaultdict(int)\n trigrams = defaultdict(int)\n for tags in self.getTags():\n tags = list(tags)\n for i in range(2):\n tags.insert(0, BEGIN)\n for k in range(2, len(tags)):\n trigrams[tuple(tags[k-2:k+1])] += 1\n bigrams[tuple(tags[k-1:k+1])] += 1\n return bigrams, trigrams", "def getgrams(text, tokendict):\n n = len(text)\n grams = []\n # Get lower-case of words\n if n >= 1:\n grams.append(tokendict[text[0]].lemma.lower())\n grams.append(tokendict[text[-1]].lemma.lower())\n grams.append(tokendict[text[0]].pos)\n grams.append(tokendict[text[-1]].pos)\n if n >= 2:\n token = tokendict[text[0]].lemma.lower() \\\n + ' ' + tokendict[text[1]].lemma.lower()\n grams.append(token)\n token = tokendict[text[-2]].lemma.lower() \\\n + ' ' + tokendict[text[-1]].lemma.lower()\n grams.append(token)\n return grams", "def getBiTrigrams(self, text):\n bigram_measures = cl.BigramAssocMeasures()\n trigram_measures = cl.TrigramAssocMeasures()\n tk = RegexpTokenizer(r'\\w+')\n\n st = LancasterStemmer()\n\n text = tk.tokenize(text.lower())\n job_vec = [st.stem(word) for word in text if word not in self.stopWords]\n\n bigrams = []\n trigrams = []\n collocations = cl.BigramCollocationFinder.from_words(job_vec)\n tri_collocations = cl.TrigramCollocationFinder.from_words(job_vec)\n top10 = collocations.score_ngrams(bigram_measures.raw_freq)\n top10 = sorted(bigram for bigram,score in top10)\n tri_top10 = tri_collocations.score_ngrams(bigram_measures.raw_freq)\n tri_top10 = sorted(trigram for trigram,score in tri_top10)\n for coll in top10:\n bigrams.append(coll[0] + ' ' + coll[1])\n for tri_coll in tri_top10:\n trigrams.append(tri_coll[0] + ' ' + tri_coll[1] + ' ' + tri_coll[2])\n\n return bigrams, trigrams", "def _generateUnigrams(self,text):\n self.unigrams=self._generateNgrams(text,1)", "def _iter_strings(self, indent=1, show_counts=True):\n ...", "def main():\n for line in read_mapper_input(sys.stdin):\n # Split document ID and document string\n docid = line.split('|')[0]\n document = line.split('|')[1]\n \n frequencies = {}\n # Crudely tokenize document into words and tally up word counts. This\n # works best if preprocessing strips punctuation, removes stopwords,\n # performs stemming, etc.\n for word in document.split():\n try:\n frequencies[word] += 1\n except KeyError:\n frequencies[word] = 1\n \n # Print term frequencies to stdout for ingestion by reducer.\n for word in frequencies:\n print '%s\\t%s\\t%s' % (word, docid, tf(word, document))", "def test_ngram():\n # Some examples of functions usage\n trigram_counts, bigram_counts, unigram_counts, token_count = train_ngrams(S_train)\n print \"#trigrams: \" + str(len(trigram_counts))\n print \"#bigrams: \" + str(len(bigram_counts))\n print \"#unigrams: \" + str(len(unigram_counts))\n print \"#tokens: \" + str(token_count)\n perplexity = evaluate_ngrams(S_dev, trigram_counts, bigram_counts, unigram_counts, token_count, 0.5, 0.4)\n print \"#perplexity: \" + str(perplexity)\n ### YOUR CODE HERE\n print(vocabsize)\n ### END YOUR CODE", "def set_grams(data_path,top=100):\n files = glob.glob(data_path + \"/*/*word*.txt\") # txt files in subfolders\n ngram = []\n table = str.maketrans(\"\",\"\",string.punctuation)\n for f_in in files:\n with open(f_in, 'r') as fi:\n for lines in fi:\n item = lines.replace(\"\\n\",\"\").split()\n term = \"\"\n count = 0\n if len(item)==3: # bigrams\n term0 = str(item[0]).translate(table).strip()\n term1 = str(item[1]).translate(table).strip()\n term = \"{},{}\".format(term0,term1) if (len(term0)>2 and len(term1)>2 and not term0.isnumeric() and not term1.isnumeric()) else (term0 if (len(term0)>2 and not term0.isnumeric()) else (term1 if (len(term1)>2 and not term1.isnumeric()) else \"\")) # comma(,) for OR in Twitter \n count = int(item[2])\n elif len(item)==2: # unigrams\n term = str(item[0]).translate(table).strip()\n count = int(item[1])\n if count>=top and str(term) != 'nan' and len(term)>=3: # ignore term freq minor than top and term length than 3\n ngram.append(term)\n fi.close()\n gn_set = set(ngram)\n \n print(len(gn_set))\n \n f = open(data_path+\".txt\", 'w')\n for w in gn_set:\n f.write('{}\\n'.format(w))\n f.close()\n \n return list(gn_set)", "def _generateNgrams(self,text,n=2):\n token = Utilities.CVTokeniser(text)\n # token = nltk.word_tokenize(text)\n computedNgrams=ngrams(token,n)\n return Counter(computedNgrams)", "def make_trigrams(texts,trigram_mod,bigram_mod):\n return [trigram_mod[bigram_mod[doc]] for doc in texts]", "def train_ngrams(dataset):\n trigram_counts = dict()\n bigram_counts = dict()\n unigram_counts = dict()\n token_count = 0\n\n ### YOUR CODE HERE\n def enterDic(phrase, dict):\n if phrase in dict:\n dict[phrase] += 1\n else:\n dict[phrase] = 1\n\n unigram_counts[word_to_num['UUUNKKK']] = 0\n\n for sentence in dataset:\n enterDic(sentence[1], unigram_counts) # count number of start of sentences\n enterDic((sentence[0], sentence[1]), bigram_counts) # count number of start of sentences\n token_count += 2\n for i in range(2, len(sentence)):\n token_count += 1\n enterDic(sentence[i], unigram_counts)\n enterDic((sentence[i - 1], sentence[i]), bigram_counts)\n enterDic((sentence[i - 2], sentence[i - 1], sentence[i]), trigram_counts)\n ### END YOUR CODE\n return trigram_counts, bigram_counts, unigram_counts, token_count", "def main():\n wf = WordFrequencies()\n tokens = wf.tokenize(sys.argv[1])\n occurrences = wf.computeWordFrequencies(tokens)\n wf.print(occurrences)", "def get_counts(data):\n\n bigrams = {}\n unigrams = {}\n #range is len-1 because the bigram uses ith+1 element\n data=list(data)\n for i in range(0, len(data)-1):\n #ith element and ith+1 element\n bigram=(data[i],data[i+1])\n if(bigram in bigrams):\n count=bigrams[bigram]\n bigrams[bigram]= count+1\n else:\n #if bigram not in dict of bigrams, add with count 1\n bigrams[bigram]=1\n\n for unigram in data:\n if(unigram in unigrams):\n count=unigrams[unigram]\n unigrams[unigram]= count+1\n else:\n #if unigram not present, add with count 1\n unigrams[unigram]=1\n\n return bigrams,unigrams", "def train_ngrams(dataset):\n trigram_counts = dict()\n bigram_counts = dict()\n unigram_counts = dict()\n token_count = 0\n ### YOUR CODE HERE\n raise NotImplementedError\n ### END YOUR CODE\n return trigram_counts, bigram_counts, unigram_counts, token_count", "def __init__(self, n, sents):\n assert n > 0\n self._n = n\n print(\"Counting...\")\n count = defaultdict(int)\n while n >= 0:\n for sent in sents:\n s = sent[:] # En una oracion auxiliar agrego el item de start y end para contarlos\n s.insert(0, \"<s>\")\n s.append(\"</s>\")\n for i in range(len(s) - n + 1):\n count[tuple(s[i:i + n])] += 1\n n -= 1\n count[()] = count[()] - count[('<s>',)] - count[\n ('</s>',)] # Pero no quiero que <s> y </s> sean considerados por ()\n self._count = count\n print(\"Computing vocabulary...\")\n self._voc = voc = set()\n for sent in sents:\n voc = voc.union(set(sent))\n voc.add('</s>')\n self._voc = voc\n self._V = len(voc) # vocabulary size\n print(\"Done\")", "def wcount(lines, topn):\n word = ''\n for i in lines:\n if 65<=ord(i) and ord(i)<=90:\n word = word + i \n elif 97<=ord(i) and ord(i)<=122:\n word = word + i\n else:\n word = word + ' ' \n word = word.split()\n #提取不重复的单词\n alreadyknown = []\n for m in word:\n if m not in alreadyknown:\n alreadyknown.append(m)\n #分别数数,排序,建构字典\n empty = []\n final = {}\n final2 = {}\n for j in alreadyknown:\n number = icount(word,j)\n final[j]=number\n final2[str(number)]=j\n empty.append(number)\n empty.sort()\n empty.reverse()\n last_step = empty[:10]\n #通过数字找到对应word\n last_str = ''\n for y in last_step:\n z = final2[str(y)]\n last_str += z + \"\\t\" + str(y) + \"\\n\"\n return last_str", "def text_report(self):\n\n word_count = self.word_count()\n\n print(\"\\nThere are {} words in the text.\".format(word_count))\n mean, median, mode = self.average_word_length()\n\n print(\"\\nMean, median and mode word length is {}, {}, {}.\".format(mean, median, mode))\n\n if word_count < 10:\n print(\"\\nLongest words:\")\n else:\n print(\"\\n10 longest words:\")\n for s in self.longest_words():\n print(s)\n\n print(\"\\nMost common words:\")\n for s in self.common_words():\n print(\"{} x {}\".format(s[1], s[0]))\n\n longest_grams = []\n\n # find n_longest n-grams\n n_longest = 10\n # strongly doubt that there will be n-grams longer than 50\n for i in range(min(50, word_count), 1, -1):\n if len(longest_grams) >= n_longest:\n break\n grams = self.find_ngrams(i)\n grams_list = sorted(grams, key=grams.get, reverse=True)\n\n for g in grams_list:\n if grams[g] > 4:\n # do not want to include n-grams which are substrings of longer n-grams\n substring = False\n for s in longest_grams:\n if g in s[1]:\n substring = True\n break\n if not substring:\n longest_grams.append([grams[g], g])\n\n print(\"\\nLongest n-grams:\")\n for g in longest_grams:\n print(\"{} x {}\".format(g[0], g[1]))\n print('\\n')", "def test_ngram():\n #Some examples of functions usage\n trigram_counts, bigram_counts, unigram_counts, token_count = train_ngrams(S_train)\n print \"#trigrams: \" + str(len(trigram_counts))\n print \"#bigrams: \" + str(len(bigram_counts))\n print \"#unigrams: \" + str(len(unigram_counts))\n print \"#tokens: \" + str(token_count)\n perplexity = evaluate_ngrams(S_dev, trigram_counts, bigram_counts, unigram_counts, token_count, 0.5, 0.4)\n print \"#perplexity: \" + str(perplexity)\n ### YOUR CODE HERE\n ### END YOUR CODE", "def printResults(listWords):\n width = 0\n for word in listWords:\n if len(word.name) > width:\n width = len(word.name)\n for word in listWords:\n lstring = str(word.listOfLines).replace('[','').replace(']','')\n print '%s: %d times, lines: %s' % (word.name.rjust(width), \n word.occurence, lstring)" ]
[ "0.6398917", "0.61486876", "0.61456525", "0.6103407", "0.60742563", "0.6020676", "0.59596294", "0.59009767", "0.5852056", "0.584664", "0.58018523", "0.57938594", "0.57902086", "0.57824033", "0.57702184", "0.5755301", "0.57186836", "0.56907433", "0.5649752", "0.5608072", "0.5605857", "0.56028813", "0.56008554", "0.5582136", "0.5581412", "0.55516136", "0.55432886", "0.5512127", "0.550356", "0.5501697" ]
0.8932815
0
A MP3 file is being downloaded because there of a SyncDescription. This method get a downloaded music file and searches the SyncDescription which was "responsible" for the music file's download.
def sync_description_for_video_id(self, music_file: DownloadedMusicFile) -> SyncDescription: video_id = music_file.get_video_id() sync_descriptions = self.load_sync_descriptions() for description in sync_descriptions: file_channel_archive = self.file_for_channel_archive(description) with open(file_channel_archive, "r") as f: for line in f: if video_id in line: return description raise AttributeError("It is uncertain, where the video {0} came from".format(video_id))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def download_track(self, track = None, url = None):\n # check that track doesn't exist\n if url == None or track == None:\n return\n\n print \"Retrieving the name of the track.\"\n filename = self.get_track_filename(url)\n\n print \"Filename found: \" + filename\n \n if (filename, track.user[\"username\"]) in self.past_songs_db_data or \\\n (filename, \"\") in self.past_songs_db_data or \\\n os.path.isfile(filename): \n print \"File exists\"\n else:\n print \"Downloading\"\n filename = wget.download(url)\n self.set_track_metadata(track, filename, url)\n mp3_name = filename[:-4] + \".mp3\"\n\n # Save filename for future reference\n self.past_songs_db.write(filename + \"\\n\")\n self.past_songs_db_data.append((filename, track.user[\"username\"]))\n \n if not filename.endswith(\".mp3\"):\n self.past_songs_db.write(mp3_name + \"\\n\")\n self.past_songs_db_data.append((mp3_name, track.user[\"username\"]))\n \n print", "def find_and_download_songs(kwargs):\n sponsorblock_postprocessor = []\n reference_file = kwargs[\"reference_file\"]\n files = {}\n with open(reference_file, \"r\", encoding=\"utf-8\") as file:\n for line in file:\n temp = line.split(\";\")\n name, artist, album, i = (\n temp[0],\n temp[1],\n temp[4],\n int(temp[-1].replace(\"\\n\", \"\")),\n )\n\n query = f\"{artist} - {name} Lyrics\".replace(\":\", \"\").replace('\"', \"\")\n print(f\"Initiating download for {query}.\")\n\n file_name = kwargs[\"file_name_f\"](\n name=name, artist=artist, track_num=kwargs[\"track_db\"][i].get(\"playlist_num\")\n )\n\n if kwargs[\"use_sponsorblock\"][0].lower() == \"y\":\n sponsorblock_postprocessor = [\n {\n \"key\": \"SponsorBlock\",\n \"categories\": [\"skip_non_music_sections\"],\n },\n {\n \"key\": \"ModifyChapters\",\n \"remove_sponsor_segments\": [\"music_offtopic\"],\n \"force_keyframes\": True,\n },\n ]\n save_path = kwargs[\"track_db\"][i][\"save_path\"]\n file_path = path.join(save_path, file_name)\n\n mp3file_path = f\"{file_path}.mp3\"\n\n if save_path not in files:\n path_files = set()\n files[save_path] = path_files\n else:\n path_files = files[save_path]\n\n path_files.add(f\"{file_name}.mp3\")\n\n if (\n kwargs[\"no_overwrites\"]\n and not kwargs[\"skip_mp3\"]\n and path.exists(mp3file_path)\n ):\n print(f\"File {mp3file_path} already exists, we do not overwrite it \")\n continue\n\n outtmpl = f\"{file_path}.%(ext)s\"\n ydl_opts = {\n \"proxy\": kwargs.get(\"proxy\"),\n \"default_search\": \"ytsearch\",\n \"format\": \"bestaudio/best\",\n \"outtmpl\": outtmpl,\n \"postprocessors\": sponsorblock_postprocessor,\n \"noplaylist\": True,\n \"no_color\": False,\n \"postprocessor_args\": [\n \"-metadata\",\n \"title=\" + name,\n \"-metadata\",\n \"artist=\" + artist,\n \"-metadata\",\n \"album=\" + album,\n ],\n }\n if not kwargs[\"skip_mp3\"]:\n mp3_postprocess_opts = {\n \"key\": \"FFmpegExtractAudio\",\n \"preferredcodec\": \"mp3\",\n \"preferredquality\": \"192\",\n }\n ydl_opts[\"postprocessors\"].append(mp3_postprocess_opts.copy())\n with yt_dlp.YoutubeDL(ydl_opts) as ydl:\n try:\n ydl.download([query])\n except Exception as e: # skipcq: PYL-W0703\n log.debug(e)\n print(f\"Failed to download {name}, make sure yt_dlp is up to date\")\n if not kwargs[\"skip_mp3\"]:\n set_tags(temp, mp3file_path, kwargs)\n if kwargs[\"remove_trailing_tracks\"] == \"y\":\n for save_path in files:\n for f in os.listdir(save_path):\n if f not in files[save_path]:\n print(f\"File {f} is not in the playlist anymore, we delete it\")\n os.remove(path.join(save_path, f))", "def podcast_download(self):\r\n warnings.filterwarnings(\"ignore\", category=UnicodeWarning)\r\n now = datetime.datetime.now()\r\n\r\n for podcast_file in self.podcast_list:\r\n published, name, link, title = podcast_file\r\n if self.podcast_list != []:\r\n line_file = (published + ';' + title + ';' + name + ';' + link).encode(\"utf-8\") \r\n if line_file in open(self.download_log).read():\r\n pass\r\n else:\r\n title = unicodedata.normalize('NFKD', title).encode('ascii', 'ignore')\r\n download_folder = os.path.join('downloads', title)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n try:\r\n published = str(parser.parse(published))[:10]\r\n except IOError as error:\r\n print 'Error' + (error) + ': File - ' + str(title)\r\n download_folder = os.path.join(download_folder, published)\r\n if not os.path.exists(download_folder): \r\n os.makedirs(download_folder)\r\n namefile_unicode = link[link.rfind('/')+1:]\r\n namefile_str = unicodedata.normalize('NFKD', \r\n namefile_unicode).encode('ascii', 'ignore')\r\n namefile_str = namefile_str.decode('utf-8', 'ignore').encode(\"utf-8\")\r\n if '.mp3' in namefile_str:\r\n len_name = namefile_str.index('.mp3')\r\n elif '.MP3' in namefile_str:\r\n len_name = namefile_str.index('.MP3')\r\n namefile_str = namefile_str[:len_name + 4]\r\n fileoutput = os.path.join(download_folder, namefile_str)\r\n name = unicodedata.normalize('NFKD', name).encode('ascii', 'ignore')\r\n print str(published) + '; ' + name\r\n ## downlink\r\n download_file(link, fileoutput) \r\n ## tagging\r\n mp3_tagging(fileoutput, podcast_file)\r\n ## write log\r\n write_file(self.download_log, line_file)\r\n end = datetime.datetime.now()\r\n print '\\r' + 'Download Time = ' + str(end-now) + '\\r'\r\n return None", "def download_mix(mtrack):\n\n if os.path.exists(mtrack.mix_path):\n return True\n\n try:\n top_folderid = GDRIVE_FOLDERS[mtrack.dataset_version]\n except KeyError:\n raise IOError(\"Unable to find data in Google Drive for this version.\")\n\n file_list = get_named_child(top_folderid, mtrack.title)\n correct_file = [f for f in file_list if f['title'] == mtrack.track_id]\n\n if len(correct_file) == 0:\n raise IOError(\"Could not find multitrack\")\n else:\n mtrack_file = correct_file[0]\n\n mix_file_list = get_named_child(mtrack_file['id'], 'MIX')\n if len(mix_file_list) > 0:\n mix_file = mix_file_list[0]\n else:\n raise IOError(\"Could not find Mix\")\n\n make_mtrack_basedir(mtrack)\n download_file(mix_file['id'], mtrack.mix_path)\n\n DOWNLOADED_FILEPATHS.append(mtrack.mix_path)\n\n return True", "async def download(self, ctx, *, song):\n try:\n with youtube_dl.YoutubeDL(ytdl_download_format_options) as ydl:\n if \"https://www.youtube.com/\" in song:\n download = ydl.extract_info(song, True)\n else:\n infosearched = ydl.extract_info(\n \"ytsearch:\"+song, False)\n download = ydl.extract_info(\n infosearched['entries'][0]['webpage_url'], True)\n filename = ydl.prepare_filename(download)\n embed = discord.Embed(\n title=\"Your download is ready\", description=\"Please wait a moment while the file is beeing uploaded\")\n await ctx.send(embed=embed, delete_after=30)\n await ctx.send(file=discord.File(filename))\n os.remove(filename)\n except (youtube_dl.utils.ExtractorError, youtube_dl.utils.DownloadError):\n embed = discord.Embed(title=\"Song couldn't be downloaded\", description=(\"Song:\"+song))\n await ctx.send(embed=embed)", "def download_raw(mtrack, stemid, rawid):\n raw_track = mtrack.raw_audio[stemid][rawid]\n\n if os.path.exists(raw_track.audio_path):\n return True\n\n try:\n top_folderid = GDRIVE_FOLDERS[mtrack.dataset_version]\n except KeyError:\n raise IOError(\"Unable to find data in Google Drive for this version.\")\n\n file_list = get_named_child(top_folderid, mtrack.title)\n correct_file = [f for f in file_list if f['title'] == mtrack.track_id]\n\n if len(correct_file) == 0:\n raise IOError(\"Could not find multitrack\")\n else:\n mtrack_file = correct_file[0]\n\n raw_file_list = get_named_child(mtrack_file['id'], 'RAW')\n if len(raw_file_list) > 0:\n raw_folder = raw_file_list[0]\n else:\n raise IOError(\"Could not find raws folder\")\n\n raw_file_list2 = get_named_child(\n raw_folder['id'], os.path.basename(raw_track.audio_path)\n )\n if len(raw_file_list2) > 0:\n raw_file = raw_file_list2[0]\n else:\n raise IOError(\"Could not find raw file\")\n\n make_mtrack_basedir(mtrack)\n download_file(raw_file['id'], raw_track.audio_path)\n\n DOWNLOADED_FILEPATHS.append(raw_track.audio_path)\n\n return True", "def download_mp3(s, path=None, ref=None):\n\n s.driver.ensure_element_by_class_name('x-action-col-icon').click()\n s.driver.switch_to.frame('result_frame')\n time.sleep(1)\n # Get URL of mp3 file\n src = s.driver.ensure_element_by_id('messagePlayer').get_attribute('src')\n # Selenium --> Requests\n s.transfer_driver_cookies_to_session()\n # Download\n r = s.get(src, stream=True)\n if path == None:\n if ref == None:\n # Get ref number\n soap = BeautifulSoup(s.driver.page_source, 'lxml')\n ref = soap.findAll('div', class_='x-grid-cell-inner')[1].text\n path = '%s.mp3' % ref\n if r.status_code == 200:\n with open(path, 'wb') as f:\n for chunk in r.iter_content(1024*2014):\n f.write(chunk)\n else:\n return 1\n # Requests --> Selenium\n s.transfer_session_cookies_to_driver()\n return s", "def test_download_simfile(self):\n scrape_category.download_simfile(self.simfile, self.dest,\n tidy=False,\n use_logfile=True,\n extract=True,\n link=self.link)\n\n # There should now be three files - a download log, a zip, and\n # an unzipped simfile.\n self.check_saved_files(log=True, unzipped=True, zipped=True)\n\n records = {\"100\": self.simfile}\n updated_records = scrape_category.update_records_from_log(records, self.dest)\n assert len(updated_records) == 1\n assert \"100\" in updated_records\n # The records should be updated to reflect where the simfile\n # was actually saved\n assert updated_records[\"100\"].name == \"foo\"", "async def download_audio(event):\n url = event.pattern_match.group(1)\n rmsg = await event.get_reply_message()\n if not url and rmsg:\n myString = rmsg.text\n url = re.search(\"(?P<url>https?://[^\\s]+)\", myString).group(\"url\")\n if not url:\n return await edit_or_reply(event, \"`What I am Supposed to find? Give link`\")\n codevent = await edit_or_reply(event, \"`Preparing to download...`\")\n reply_to_id = await reply_id(event)\n ytdl_data = await ytdl_down(codevent, audio_opts, url)\n if ytdl_data is None:\n return\n await codevent.edit(\n f\"`Preparing to upload song:`\\\n \\n**{ytdl_data['title']}**\\\n \\nby *{ytdl_data['uploader']}*\"\n )\n f = pathlib.Path(f\"{ytdl_data['title']}.mp3\".replace(\"|\", \"_\"))\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.jpg\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = pathlib.Path(f\"{ytdl_data['title']}.mp3.webp\".replace(\"|\", \"_\"))\n if not os.path.exists(codthumb):\n codthumb = None\n c_time = time.time()\n ul = io.open(f, \"rb\")\n uploaded = await event.client.fast_upload_file(\n file=ul,\n progress_callback=lambda d, t: asyncio.get_event_loop().create_task(\n progress(d, t, codevent, c_time, \"upload\", file_name=f)\n ),\n )\n ul.close()\n attributes, mime_type = await fix_attributes(f, ytdl_data, supports_streaming=True)\n media = types.InputMediaUploadedDocument(\n file=uploaded,\n mime_type=mime_type,\n attributes=attributes,\n thumb=await event.client.upload_file(codthumb) if codthumb else None,\n )\n await event.client.send_file(\n event.chat_id,\n file=media,\n reply_to=reply_to_id,\n caption=ytdl_data[\"title\"],\n supports_streaming=True,\n force_document=False,\n )\n os.remove(f)\n if codthumb:\n os.remove(codthumb)\n await codevent.delete()", "def download_stem(mtrack, stemid):\n stem = mtrack.stems[stemid]\n\n if os.path.exists(stem.audio_path):\n return True\n\n try:\n top_folderid = GDRIVE_FOLDERS[mtrack.dataset_version]\n except KeyError:\n raise IOError(\"Unable to find data in Google Drive for this version.\")\n\n file_list = get_named_child(top_folderid, mtrack.title)\n correct_file = [f for f in file_list if f['title'] == mtrack.track_id]\n\n if len(correct_file) == 0:\n raise IOError(\"Could not find multitrack\")\n else:\n mtrack_file = correct_file[0]\n\n stem_file_list = get_named_child(mtrack_file['id'], 'STEMS')\n if len(stem_file_list) > 0:\n stem_folder = stem_file_list[0]\n else:\n raise IOError(\"Could not find stems folder\")\n\n stem_file_list2 = get_named_child(\n stem_folder['id'], os.path.basename(stem.audio_path)\n )\n if len(stem_file_list2) > 0:\n stem_file = stem_file_list2[0]\n else:\n raise IOError(\"Could not find stem file\")\n\n make_mtrack_basedir(mtrack)\n download_file(stem_file['id'], stem.audio_path)\n\n DOWNLOADED_FILEPATHS.append(stem.audio_path)\n\n return True", "def test_skipping_downloaded_file_on_disc(\n requests_mock: rm_Mocker,\n mp3_file1_mock: bytes,\n mp3_file2_mock: bytes,\n tmp_path: Path,\n lep_dl: LepDL,\n) -> None:\n test_downloads: LepFileList = LepFileList()\n file_1 = LepFile(\n filename=\"Test File #1.mp3\",\n primary_url=\"http://traffic.libsyn.com/teacherluke/36-london-video-interviews-pt-1-audio-only.mp3\", # noqa: E501,B950\n )\n file_2 = LepFile(\n filename=\"Test File #2.mp3\",\n primary_url=\"https://traffic.libsyn.com/secure/teacherluke/733._A_Summer_Ramble.mp3\", # noqa: E501,B950\n )\n\n test_downloads.append(file_1)\n test_downloads.append(file_2)\n\n requests_mock.get(\n \"http://traffic.libsyn.com/teacherluke/36-london-video-interviews-pt-1-audio-only.mp3\", # noqa: E501,B950\n content=mp3_file1_mock,\n )\n requests_mock.get(\n \"https://traffic.libsyn.com/secure/teacherluke/733._A_Summer_Ramble.mp3\", # noqa: E501,B950\n content=mp3_file2_mock,\n )\n\n lep_dl.files = test_downloads\n lep_dl.detach_existed_files(tmp_path)\n existing_file_1 = tmp_path / \"Test File #1.mp3\"\n existing_file_1.write_text(\"Here are mp3 1 bytes\")\n lep_dl.download_files(tmp_path)\n expected_file_2 = tmp_path / \"Test File #2.mp3\"\n assert existing_file_1.read_text() == \"Here are mp3 1 bytes\"\n assert expected_file_2.exists()\n assert len(list(tmp_path.iterdir())) == 2\n assert len(lep_dl.existed) == 1", "def download_audio(self, link: str):\n\t\t# Download file to specified directory\n\t\toptions = {'outtmpl': 'data/music/%(title)s.%(ext)s'}\n\t\tyt_dl = youtube_dl.YoutubeDL(options)\n\t\tlink_info = yt_dl.extract_info(link, download=True) # Video Download\n\t\ttitle = link_info['title']\n\t\t# Find file in downloaded directory\n\t\tsource = None\n\t\tfor file in os.listdir(\"data/music/\"):\n\t\t\tfile_title, file_ext = file.title().split(\".\")\n\t\t\tif self.caseless_equals(title, file_title):\n\t\t\t\tsource = f\"data/music/{title}.{file_ext.lower()}\"\n\t\t\t\tbreak # End loop\n\t\t# Add audio source to playlist\n\t\tself.playlist.append({\n\t\t\t\"title\": title,\n\t\t\t\"link\": link,\n\t\t\t\"source\": source,\n\t\t\t\"loop\": False\n\t\t})", "async def get_file(self, link, name, md5, session):\n if os.path.exists(name) or md5 in opts.archived_md5:\n self.count += 1\n return\n\n async with session.get(link) as media:\n # Open file initially with .part suffix\n with open(f\"{name}.part\", \"wb\") as f:\n while True:\n chunk = await media.content.read(1024)\n if not chunk:\n break\n f.write(chunk)\n\n # Remove .part suffix once complete\n # After this point file won't get removed if script gets interrupted\n os.rename(f\"{name}.part\", name)\n\n if opts.archive:\n log_hash(md5)\n self.count += 1\n msg(f\"{self.fetch_progress()} {self.board}/{self.dir}/{name}\")", "def detect_netease_music_name(file_path, dist_path, KEEP_SOURCE=True):\n headers = {\n \"User-Agent\": \"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0\"\n }\n url_base = \"http://music.163.com/api/song/detail/?id={}&ids=[{}]\"\n\n if not os.path.exists(dist_path):\n os.mkdir(dist_path)\n\n for file_name in os.listdir(file_path):\n if not file_name.endswith(\".mp3\"):\n continue\n if not len(file_name.split(\"-\")) == 3:\n print(\n \">>>> File %s not in format <song id>-<bite rate>-<random number>.mp3\"\n % (file_name)\n )\n continue\n\n try:\n song_id = file_name.split(\"-\")[0]\n url_target = url_base.format(song_id, song_id)\n resp = requests.get(url_target, headers=headers)\n rr = json.loads(resp.text)\n\n tt = eyed3.load(os.path.join(file_path, file_name))\n tt.tag.title = rr[\"songs\"][0][\"name\"].replace(\"\\xa0\", \" \")\n tt.tag.artist = rr[\"songs\"][0][\"artists\"][0][\"name\"]\n tt.tag.album = rr[\"songs\"][0][\"album\"][\"name\"]\n tt.tag.album_artist = rr[\"songs\"][0][\"album\"][\"artists\"][0][\"name\"]\n print(\n \"song_id = %s, tt.tag title = %s, artist = %s, album = %s, album_artist = %s\"\n % (\n song_id,\n tt.tag.title,\n tt.tag.artist,\n tt.tag.album,\n tt.tag.album_artist,\n )\n )\n tt.tag.save()\n except UnicodeEncodeError as e:\n print(\n \">>>> UnicodeEncodeError, try again later: file_name = %s, error = %s\"\n % (file_name, str(e))\n )\n continue\n except:\n print(\">>>> Some other error happens: file_name = %s\" % (file_name))\n continue\n\n dist_name = (\n os.path.join(\n dist_path,\n \"%s - %s\"\n % (tt.tag.artist.replace(\"/\", \" \"), tt.tag.title.replace(\"/\", \" \")),\n )\n + \".mp3\"\n )\n \n if KEEP_SOURCE == True:\n shutil.copyfile(os.path.join(file_path, file_name), dist_name)\n else:\n os.rename(os.path.join(file_path, file_name), dist_name)", "def download_song(song):\n ydl.download([song])\n print(\"Finished downloading and converting: \" + song)", "def _download_metadata(track_id, dataset_version):\n metadata_path = os.path.join(METADATA_PATH, _METADATA_FMT % track_id)\n if os.path.exists(metadata_path):\n return True\n\n try:\n top_folderid = GDRIVE_FOLDERS[dataset_version]\n except KeyError:\n raise IOError(\"Unable to find data in Google Drive for this version.\")\n\n file_list = get_named_child(top_folderid, track_id)\n correct_file = [f for f in file_list if f['title'] == track_id]\n\n if len(correct_file) == 0:\n raise IOError(\"Could not find multitrack\")\n else:\n mtrack_file = correct_file[0]\n\n metadata_file_list = get_named_child(mtrack_file['id'], 'METADATA')\n if len(metadata_file_list) > 0:\n metadata_file = metadata_file_list[0]\n else:\n folder_file_list = get_files_in_folder(mtrack_file['id'])\n print(len(folder_file_list))\n for fobject in folder_file_list:\n print(fobject['title'])\n raise IOError(\"Could not find Metadata\")\n\n download_file(metadata_file['id'], metadata_path)\n\n DOWNLOADED_FILEPATHS.append(metadata_path)\n\n return True", "def get_next_song(self):\r\n if self.timestamp:\r\n delta = datetime.datetime.now() - self.timestamp\r\n if delta < timedelta(seconds=3):\r\n self.log.warning(u\"Song '%s' stopped playing after less than 3 seconds for some reason!\" % self.meta)\r\n time.sleep(3)\r\n self.timestamp = datetime.datetime.now()\r\n\r\n song = self.findQueued()\r\n\r\n self.meta = u\"%s - %s\" % (song.artist(), song.title)\r\n self.log.debug(\"Now playing \\\"%s\\\" [ID %s]\" % (song.title, song.id))\r\n self.song = song\r\n\r\n try:\r\n filepath = song.file.path.encode(self.fsenc)\r\n except:\r\n try:\r\n filepath = song.file.path.encode(self.sysenc)\r\n except:\r\n filepath = song.file.path\r\n self.log.debug(\"Returning path %s\" % filepath)\r\n return filepath", "def fetch_song_data(url):\r\n response = requests.get(url)\r\n return response.text", "def download_song(url, filename):\n page = requests.get(url, headers=HEADERS)\n if page.status_code == 200: # OK\n with open(filename, 'w') as outf:\n outf.write(page.text)\n else:\n print(f'download failed with status code {page.status_code}!')", "def _get_file(self, path: str) -> Tuple[str, bytes]:\n self._trace(\"fetching: %s\" % path)\n meta, resp = self._connection.files_download(path)\n return (meta.rev, resp.content)", "def test_download_specific_episode(self):\n episode = self._get_episode()\n torrent_filename = self.fetcher.download_specific_episode(episode)\n self.assertEqual(torrent_filename, FILENAME_2)", "def download(self):\n #the link has some meta data in it that we need to get a hold of so we cant use metaData.getLink()\n data = None\n\n for link in self.metaData.jsonObj['links']:\n if link.get('rel') == \"content\":\n data = link\n\n assert data is not None\n\n response = self._adapter.getRequest(data['href'], self._baseHeader)\n return {\"filename\": data['title'], \"mime\": data['type'], \"binary\": response['Body'] }", "def download(track_id, ext):\n\n if ext != 'mp3':\n return Response('', status=404)\n\n track = models.Track.query.get(track_id)\n if track is None:\n abort(404)\n\n track_file = open(track.get_path(), 'r')\n filename_header = (\n 'Content-Disposition', 'attachment; filename=\"%s.mp3\"' % track.title\n )\n\n return Response(response=track_file.read(), mimetype='audio/mpeg',\n headers=[filename_header])", "async def async_parse_m3u_url(self, playlist):\n try:\n websession = async_get_clientsession(self.hass)\n async with async_timeout.timeout(10):\n response = await websession.get(playlist)\n\n except (asyncio.TimeoutError, aiohttp.ClientError) as error:\n _LOGGER.warning(\n \"For: %s unable to get the M3U playlist: %s\", self._name, playlist\n )\n return playlist\n\n if response.status == HTTPStatus.OK:\n data = await response.text()\n _LOGGER.debug(\"For: %s M3U playlist: %s contents: %s\", self._name, playlist, data)\n\n lines = [line.strip(\"\\n\\r\") for line in data.split(\"\\n\") if line.strip(\"\\n\\r\") != \"\"]\n if len(lines) > 0:\n _LOGGER.debug(\"For: %s M3U playlist: %s lines: %s\", self._name, playlist, lines)\n urls = [u for u in lines if u.startswith('http')]\n _LOGGER.debug(\"For: %s M3U playlist: %s urls: %s\", self._name, playlist, urls)\n if len(urls) > 0:\n return urls[0]\n else:\n _LOGGER.error(\"For: %s M3U playlist: %s No valid http URL in the playlist!!!\", self._name, playlist)\n self._nometa = True\n else:\n _LOGGER.error(\"For: %s M3U playlist: %s No content to parse!!!\", self._name, playlist)\n\n else:\n _LOGGER.error(\n \"For: %s (%s) Get failed, response code: %s Full message: %s\",\n self._name,\n self._host,\n response.status,\n response,\n )\n\n return playlist", "def produce_music_start(self) -> str:\n try:\n self.folder_create(self.folder_config)\n value_path = os.path.join(self.folder_config, name_sound)\n if not (os.path.exists(value_path) and os.path.isfile(value_path)):\n audio_get = pafy.new(url=url_sound)\n best_audio = audio_get.getbestaudio()\n best_audio.download(filepath=value_path)\n return value_path\n except Exception as e:\n a = TelegramManager()\n a.proceed_message_values(f'We faced problem with the getting audio. Mistake: {e}')\n return ''", "def getDownload(self, html, episode_number):\n soup = BeautifulSoup(html, \"html.parser\")\n download = soup.find_all('source')\n if download:\n self.downloads[\"Episode %s.mp4\" % str(episode_number)] = download[0]['src']\n return\n\n print(\"[!] Download link not found for episode %s\" % str(episode_number))", "def __maybeDownload():\n if not os.path.isdir(Download.DATA_ROOT): # 若 data 目录不存在,创建 data 目录\n os.mkdir(Download.DATA_ROOT)\n file_path = os.path.join(Download.DATA_ROOT, Download.FILE_NAME)\n\n if os.path.exists(file_path): # 若已存在该文件\n statinfo = os.stat(file_path)\n if statinfo.st_size == Download.FILE_SIZE: # 若该文件正确,直接返回 file_path\n print('Found and verified %s' % file_path)\n return file_path\n else: # 否则,删除文件重新下载\n os.remove(file_path)\n\n download_url = Download.URL + Download.FILE_NAME\n print('Downloading %s ...' % download_url)\n filename, _ = urlretrieve(download_url, file_path) # 下载数据\n print('Finish downloading')\n\n statinfo = os.stat(filename)\n if statinfo.st_size == Download.FILE_SIZE: # 校验数据是否正确下载\n print('Found and verified %s' % filename)\n else:\n print(statinfo.st_size)\n raise Exception('Failed to verify ' + filename + '. Can you get to it with a browser ?')\n return filename", "def test_downloading_mocked_mp3_files(\n requests_mock: rm_Mocker,\n mp3_file1_mock: bytes,\n mp3_file2_mock: bytes,\n tmp_path: Path,\n lep_dl: LepDL,\n) -> None:\n test_downloads: LepFileList = LepFileList()\n file_1 = LepFile(\n filename=\"Test File #1.mp3\",\n primary_url=\"https://traffic.libsyn.com/secure/teacherluke/733._A_Summer_Ramble.mp3\", # noqa: E501,B950\n )\n file_2 = LepFile(\n filename=\"Test File #2.mp3\",\n primary_url=\"https://audioboom.com/posts/5678762-episode-169-luke-back-on-zep-part-4.mp3\", # noqa: E501,B950\n )\n test_downloads.append(file_1)\n test_downloads.append(file_2)\n\n requests_mock.get(\n \"https://traffic.libsyn.com/secure/teacherluke/733._A_Summer_Ramble.mp3\", # noqa: E501,B950\n content=mp3_file1_mock,\n )\n requests_mock.get(\n \"https://audioboom.com/posts/5678762-episode-169-luke-back-on-zep-part-4.mp3\", # noqa: E501,B950\n content=mp3_file2_mock,\n )\n\n lep_dl.non_existed = test_downloads\n lep_dl.download_files(tmp_path)\n expected_file_1 = tmp_path / \"Test File #1.mp3\"\n expected_file_2 = tmp_path / \"Test File #2.mp3\"\n assert expected_file_1.exists()\n assert 21460 < expected_file_1.stat().st_size < 22000\n assert expected_file_2.exists()\n assert 18300 < expected_file_2.stat().st_size < 18350\n assert len(lep_dl.downloaded) == 2", "def update_mp3_metadata(self, mp3_file):\n if isinstance(mp3_file, str):\n mp3_file = mp3_utility.Mp3File(file_path=mp3_file, load_tags_from_file=True)\n remote_name = self.get_remote_name(mp3_file.file_path)\n archive_item_file_details = self.item_files_dict.get(remote_name, None)\n mp3_metadata = mp3_file.metadata\n if archive_item_file_details is None:\n logging.warning(\"The file does not exist! Skipping.\")\n else:\n remote_tag_update_needed = (archive_item_file_details.get(\"artist\", \"\") != mp3_metadata.artist) or (\n archive_item_file_details.get(\"creator\", \"\") != mp3_metadata.artist) or (\n archive_item_file_details.get(\"title\", \"\") != mp3_metadata.title) or (\n archive_item_file_details.get(\"album\", \"\") != mp3_metadata.album) or (\n archive_item_file_details.get(\"album_artist\",\n \"\") != mp3_metadata.album_artist)\n if remote_tag_update_needed:\n logging.info(\"***Updating %s in archive item.\" % remote_name)\n logging.info(\n internetarchive.modify_metadata(\n self.archive_id,\n metadata=dict(title=mp3_metadata.title, album=mp3_metadata.album,\n album_artist=mp3_metadata.album_artist,\n artist=mp3_metadata.artist, creator=mp3_metadata.artist),\n target=os.path.join(\"files\", remote_name)))", "def _get_file_helper(self):\n page = self.course.moodle.fetch(\n self._download_url % self.id,\n None\n )\n # The resource URL should magically 303 across to the actual file\n if page.history and page.history[0].status_code == 303:\n return page, page.content\n\n # If it doesn't 303 to the actual file then there might be a download\n # link to try\n bs = bs4.BeautifulSoup(page.text, 'lxml')\n\n div = bs.find('div', class_='resourceworkaround')\n\n if div: # it's a link to the resource\n link = div.find('a').href\n\n page = self.course.moodle.fetch(\n link,\n None\n )\n return page, page.content\n\n # Perhaps it's an embedded object\n obj = bs.find('object', id='resourceobject')\n if obj:\n link = obj['data']\n\n page = self.course.moodle.fetch(\n link,\n None\n )\n return page, page.content\n\n raise ValueError(\"No idea how to get that resource\")" ]
[ "0.6019038", "0.5953762", "0.5950835", "0.5851799", "0.5833112", "0.582813", "0.57368785", "0.57094854", "0.56998867", "0.56914955", "0.5675499", "0.5610916", "0.56106955", "0.56062835", "0.5562782", "0.5531623", "0.552138", "0.550489", "0.54986244", "0.54729104", "0.5463579", "0.544893", "0.5406085", "0.54015034", "0.5355998", "0.5344535", "0.5338295", "0.52911335", "0.5233772", "0.5229373" ]
0.6188046
0
The empty string is not an IPv6 address literal.
def test_empty(self): self.assertFalse(isIPv6Address(""))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def clean_ipv6_address(\n ip_str, unpack_ipv4=False, error_message=_(\"This is not a valid IPv6 address.\")\n):\n try:\n addr = ipaddress.IPv6Address(int(ipaddress.IPv6Address(ip_str)))\n except ValueError:\n raise ValidationError(error_message, code=\"invalid\")\n\n if unpack_ipv4 and addr.ipv4_mapped:\n return str(addr.ipv4_mapped)\n elif addr.ipv4_mapped:\n return \"::ffff:%s\" % str(addr.ipv4_mapped)\n\n return str(addr)", "def test_ipv6_validation_failure():\n with pytest.raises(socket.error):\n is_ipv6('2001::0234:C1ab::A0:aabc:003F')", "def is_valid_ipv6_address(address, allow_brackets = False):\n\n if allow_brackets:\n if address.startswith('[') and address.endswith(']'):\n address = address[1:-1]\n\n if address.count('.') == 3:\n # Likely an ipv4-mapped portion. Check that its vaild, then replace with a\n # filler.\n\n ipv4_start = address.rfind(':', 0, address.find('.')) + 1\n ipv4_end = address.find(':', ipv4_start + 1)\n\n if ipv4_end == -1:\n ipv4_end = None # don't crop the last character\n\n if not is_valid_ipv4_address(address[ipv4_start:ipv4_end]):\n return False\n\n addr_comp = [address[:ipv4_start - 1] if ipv4_start != 0 else None, 'ff:ff', address[ipv4_end + 1:] if ipv4_end else None]\n address = ':'.join(filter(None, addr_comp))\n\n # addresses are made up of eight colon separated groups of four hex digits\n # with leading zeros being optional\n # https://en.wikipedia.org/wiki/IPv6#Address_format\n\n colon_count = address.count(':')\n\n if colon_count > 7:\n return False # too many groups\n elif colon_count != 7 and '::' not in address:\n return False # not enough groups and none are collapsed\n elif address.count('::') > 1 or ':::' in address:\n return False # multiple groupings of zeros can't be collapsed\n\n for entry in address.split(':'):\n if not re.match('^[0-9a-fA-f]{0,4}$', entry):\n return False\n\n return True", "def test_invalidWithScopeID(self):\n self.assertFalse(isIPv6Address(\"%eth0\"))\n self.assertFalse(isIPv6Address(\":%eth0\"))\n self.assertFalse(isIPv6Address(\"hello%eth0\"))", "def test_ipv6_validation_success():\n assert is_ipv6('2001:db8::ff00:42:8329')", "def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address\")", "def SupportsIPv6(self) -> bool:", "def is_host_ip6(value):\n try:\n return bool(ipaddress.IPv6Address(value))\n\n except:\n pass", "def test_unicodeAndBytes(self):\n self.assertTrue(isIPv6Address(b\"fe80::2%1\"))\n self.assertTrue(isIPv6Address(u\"fe80::2%1\"))\n self.assertFalse(isIPv6Address(u\"\\u4321\"))\n self.assertFalse(isIPv6Address(u\"hello%eth0\"))\n self.assertFalse(isIPv6Address(b\"hello%eth0\"))", "def is_valid_ipv6_address(ip_str):\n try:\n ipaddress.IPv6Address(ip_str)\n except ValueError:\n return False\n return True", "def OSSupportsIPv6(self) -> bool:", "def is_valid_ipv6_address(address):\n try:\n socket.inet_pton(socket.AF_INET6, address)\n except (socket.error, TypeError):\n return False\n return True", "def testIPv6noscheme(self):\n if self.needScheme: return\n \n self.assertEqual([\"[2001:a68:104:1337:250:daff:fe72:871c]/toimia\"], grab('foo [2001:a68:104:1337:250:daff:fe72:871c]/toimia', self.needScheme))", "def is_valid_ipv6_address(address):\n try:\n socket.inet_pton(socket.AF_INET6, address)\n except socket.error: # not a valid address\n return False\n return True", "def test_ipv6_in_net(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344/24\")\n assert test_ip.in_network(\"2001:0d00::/24\")\n assert test_ip.in_network(\"2001:0d00::/29\")", "def is_IPv6Address(ipv6address):\n\n return bool(re.match(re_ipv6, ipv6address))", "def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")", "def validateIP(ip):\n # type: (str)->None\n try:\n socket.inet_aton(ip)\n except socket.error:\n socket.inet_pton(socket.AF_INET6, ip)", "def testIPv6(self):\n self.assertEqual([\"http://[2001:a68:104:1337:250:daff:fe72:871c]/toimia\"], grab('foo http://[2001:a68:104:1337:250:daff:fe72:871c]/toimia', self.needScheme))", "def isIpv4Addr(string):\n return (True)", "def is_valid_ipv6_address(ip_str):\r\n # We need to have at least one ':'.\r\n if ':' not in ip_str:\r\n return False\r\n\r\n # We can only have one '::' shortener.\r\n if ip_str.count('::') > 1:\r\n return False\r\n\r\n # '::' should be encompassed by start, digits or end.\r\n if ':::' in ip_str:\r\n return False\r\n\r\n # A single colon can neither start nor end an address.\r\n if ((ip_str.startswith(':') and not ip_str.startswith('::')) or\r\n (ip_str.endswith(':') and not ip_str.endswith('::'))):\r\n return False\r\n\r\n # We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid)\r\n if ip_str.count(':') > 7:\r\n return False\r\n\r\n # If we have no concatenation, we need to have 8 fields with 7 ':'.\r\n if '::' not in ip_str and ip_str.count(':') != 7:\r\n # We might have an IPv4 mapped address.\r\n if ip_str.count('.') != 3:\r\n return False\r\n\r\n ip_str = _explode_shorthand_ip_string(ip_str)\r\n\r\n # Now that we have that all squared away, let's check that each of the\r\n # hextets are between 0x0 and 0xFFFF.\r\n for hextet in ip_str.split(':'):\r\n if hextet.count('.') == 3:\r\n # If we have an IPv4 mapped address, the IPv4 portion has to\r\n # be at the end of the IPv6 portion.\r\n if not ip_str.split(':')[-1] == hextet:\r\n return False\r\n if not is_valid_ipv4_address(hextet):\r\n return False\r\n else:\r\n try:\r\n # a value error here means that we got a bad hextet,\r\n # something like 0xzzzz\r\n if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:\r\n return False\r\n except ValueError:\r\n return False\r\n return True", "def _is_shorthand_ip(ip_str):\r\n if ip_str.count('::') == 1:\r\n return True\r\n if filter(lambda x: len(x) < 4, ip_str.split(':')):\r\n return True\r\n return False", "def check_ipv4_ipv6_fqdn(val):\n\n try:\n val = u'{0}'.format(val)\n ip = ip_network(val, strict=False)\n return ip.version\n except ValueError:\n return 0", "def test_ipv6_from_binary(self):\n ip1 = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n ip1_2 = ip_address.IPAddress(ip1.bytes, binary=True)\n assert ip1 == ip1_2", "def is_net_ip6(value):\n for test in [lambda x: ipaddress.IPv6Network(x)._prefixlen != 128,\n lambda x: ipaddress.IPv6Interface(x)._prefixlen != 128]:\n try:\n return bool(test(value))\n\n except:\n pass\n\n return False", "def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []", "def ip_f(x: Text) -> Tuple[Optional[Text], Optional[Text]]:\n try:\n addrv6 = ipaddress.IPv6Address(x)\n return \"ipv6\", str(addrv6.exploded)\n except ipaddress.AddressValueError:\n try:\n ipaddress.IPv4Address(x)\n return \"ipv4\", x\n except ipaddress.AddressValueError:\n pass\n\n return None, None", "def ipv6_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"ipv6_addresses\")", "def is_valid_ip_address(address):\n return Convert.is_valid_ipv6_address(\n address) or Convert.is_valid_ipv4_address(address)", "def is_ipv6(addr):\n try:\n socket.inet_pton(socket.AF_INET6, addr)\n return True\n except socket.error:\n return False" ]
[ "0.69366324", "0.69032264", "0.6863372", "0.6833882", "0.6812875", "0.67084074", "0.66962737", "0.6655872", "0.66131544", "0.65501314", "0.6547397", "0.6505707", "0.64604706", "0.6450955", "0.62647307", "0.6258979", "0.62298465", "0.6160988", "0.61486316", "0.6139477", "0.61365783", "0.6081804", "0.6080557", "0.60774237", "0.60773546", "0.6067941", "0.6046365", "0.6035668", "0.60287505", "0.6014996" ]
0.81282455
0
An otherwise valid IPv6 address literal may also include a C{"%"} followed by an arbitrary scope identifier.
def test_scopeID(self): self.assertTrue(isIPv6Address("fe80::1%eth0")) self.assertTrue(isIPv6Address("fe80::2%1")) self.assertTrue(isIPv6Address("fe80::3%en2"))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_invalidWithScopeID(self):\n self.assertFalse(isIPv6Address(\"%eth0\"))\n self.assertFalse(isIPv6Address(\":%eth0\"))\n self.assertFalse(isIPv6Address(\"hello%eth0\"))", "def format_url_address(address):\n try:\n addr = netaddr.IPAddress(address)\n if addr.version == constants.IPV6_FAMILY:\n return \"[%s]\" % address\n else:\n return str(address)\n except netaddr.AddrFormatError:\n return address", "def expand_ipv6_address(address):\n\n if not is_valid_ipv6_address(address):\n raise ValueError(\"'%s' isn't a valid IPv6 address\" % address)\n\n # expand ipv4-mapped portions of addresses\n if address.count('.') == 3:\n ipv4_start = address.rfind(':', 0, address.find('.')) + 1\n ipv4_end = address.find(':', ipv4_start + 1)\n\n if ipv4_end == -1:\n ipv4_end = None # don't crop the last character\n\n # Converts ipv4 address to its hex ipv6 representation. For instance...\n #\n # '5.9.158.75' => '0509:9e4b'\n\n ipv4_bin = _get_address_binary(address[ipv4_start:ipv4_end])\n groupings = [ipv4_bin[16 * i:16 * (i + 1)] for i in range(2)]\n ipv6_snippet = ':'.join(['%04x' % int(group, 2) for group in groupings])\n\n addr_comp = [address[:ipv4_start - 1] if ipv4_start != 0 else None, ipv6_snippet, address[ipv4_end + 1:] if ipv4_end else None]\n address = ':'.join(filter(None, addr_comp))\n\n # expands collapsed groupings, there can only be a single '::' in a valid\n # address\n if '::' in address:\n missing_groups = 7 - address.count(':')\n address = address.replace('::', '::' + ':' * missing_groups)\n\n # inserts missing zeros\n for index in range(8):\n start = index * 5\n end = address.index(':', start) if index != 7 else len(address)\n missing_zeros = 4 - (end - start)\n\n if missing_zeros > 0:\n address = address[:start] + '0' * missing_zeros + address[start:]\n\n return address", "def is_valid_ipv6_address(address, allow_brackets = False):\n\n if allow_brackets:\n if address.startswith('[') and address.endswith(']'):\n address = address[1:-1]\n\n if address.count('.') == 3:\n # Likely an ipv4-mapped portion. Check that its vaild, then replace with a\n # filler.\n\n ipv4_start = address.rfind(':', 0, address.find('.')) + 1\n ipv4_end = address.find(':', ipv4_start + 1)\n\n if ipv4_end == -1:\n ipv4_end = None # don't crop the last character\n\n if not is_valid_ipv4_address(address[ipv4_start:ipv4_end]):\n return False\n\n addr_comp = [address[:ipv4_start - 1] if ipv4_start != 0 else None, 'ff:ff', address[ipv4_end + 1:] if ipv4_end else None]\n address = ':'.join(filter(None, addr_comp))\n\n # addresses are made up of eight colon separated groups of four hex digits\n # with leading zeros being optional\n # https://en.wikipedia.org/wiki/IPv6#Address_format\n\n colon_count = address.count(':')\n\n if colon_count > 7:\n return False # too many groups\n elif colon_count != 7 and '::' not in address:\n return False # not enough groups and none are collapsed\n elif address.count('::') > 1 or ':::' in address:\n return False # multiple groupings of zeros can't be collapsed\n\n for entry in address.split(':'):\n if not re.match('^[0-9a-fA-f]{0,4}$', entry):\n return False\n\n return True", "def format_ipv6(value, mask):\n value_ipv6 = \":\".join(re.findall('..', \"{:032x}\".format(value)))\n if mask is None:\n return value_ipv6\n value_mask = \":\".join(re.findall('..', \"{:032x}\".format(mask)))\n return \"{}/{}\".format(value_ipv6, value_mask)", "def test_ipv6_in_range(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n \n assert test_ip.in_range(\"2000:0db8:85a3:08d3:1319:8a2e:0370:7344\",\"2002:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n assert test_ip.in_range(\"2001:0db8:85a3:07d3:1319:8a2e:0370:7344\",\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n assert test_ip.in_range(\"::ffff:1.1.1.1\",\"2501:0db8:85a3:08d3:1319:8a2e:0370:7344\")", "def safe_ip_format(ip):\r\n try:\r\n if netaddr.IPAddress(ip).version == 6:\r\n return '[%s]' % ip\r\n except (TypeError, netaddr.AddrFormatError): # hostname\r\n pass\r\n # it's IPv4 or hostname\r\n return ip", "def safe_addr(ip_addr):\n return '.'.join(ip_addr.split('.')[:2] + ['xxx', 'xxx'])", "def test_ipv6_in_net(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344/24\")\n assert test_ip.in_network(\"2001:0d00::/24\")\n assert test_ip.in_network(\"2001:0d00::/29\")", "def check_ipv4_ipv6_fqdn(val):\n\n try:\n val = u'{0}'.format(val)\n ip = ip_network(val, strict=False)\n return ip.version\n except ValueError:\n return 0", "def defangIPaddr(address):\n address_as_list = list(address)\n length_of_address = len(address_as_list)\n for i in range(length_of_address):\n if address_as_list[i] == \".\":\n address_as_list[i] = \"[.]\"\n return \"\".join(address_as_list)", "def clean_ipv6_address(\n ip_str, unpack_ipv4=False, error_message=_(\"This is not a valid IPv6 address.\")\n):\n try:\n addr = ipaddress.IPv6Address(int(ipaddress.IPv6Address(ip_str)))\n except ValueError:\n raise ValidationError(error_message, code=\"invalid\")\n\n if unpack_ipv4 and addr.ipv4_mapped:\n return str(addr.ipv4_mapped)\n elif addr.ipv4_mapped:\n return \"::ffff:%s\" % str(addr.ipv4_mapped)\n\n return str(addr)", "def get_host_string(addr: AddressTupleVXType) -> str:\n if len(addr) >= 3:\n addr = cast(AddressTupleV6Type, addr)\n if addr[3]:\n return \"{}%{}\".format(addr[0], addr[3])\n return addr[0]", "def ip_f(x: Text) -> Tuple[Optional[Text], Optional[Text]]:\n try:\n addrv6 = ipaddress.IPv6Address(x)\n return \"ipv6\", str(addrv6.exploded)\n except ipaddress.AddressValueError:\n try:\n ipaddress.IPv4Address(x)\n return \"ipv4\", x\n except ipaddress.AddressValueError:\n pass\n\n return None, None", "def format_single_address(address: Address | str) -> str:\n address = coerce_address(address)\n name = address.display_name\n if not name:\n return address.addr_spec\n\n if not needs_qp_encode(name):\n if specials_regex.search(name):\n # simple quoting works here, since we disallow\n # backslash escaping double quotes.\n name = f'\"{name}\"'\n return f'{name} <{address.addr_spec}>'\n\n name = qp_encode_display_name(name)\n return f'{name} <{address.addr_spec}>'", "def ip_address(addr):\n parts = addr.split('.')\n if len(parts) != 4:\n raise TypeError('{} does not match an IP address pattern'.format(addr))\n for part in parts:\n try:\n num = int(part)\n if num < 0 or num > 255:\n raise TypeError('{} does not match an IP address pattern'.format(addr))\n except ValueError:\n raise TypeError('{} does not match an IP address pattern'.format(addr))\n return addr", "def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []", "def _is_shorthand_ip(ip_str):\r\n if ip_str.count('::') == 1:\r\n return True\r\n if filter(lambda x: len(x) < 4, ip_str.split(':')):\r\n return True\r\n return False", "def overlay_ip(ip):\n return \"192.168.{}.{}\".format( *ip.split(\".\")[2:])", "def test_unicodeAndBytes(self):\n self.assertTrue(isIPv6Address(b\"fe80::2%1\"))\n self.assertTrue(isIPv6Address(u\"fe80::2%1\"))\n self.assertFalse(isIPv6Address(u\"\\u4321\"))\n self.assertFalse(isIPv6Address(u\"hello%eth0\"))\n self.assertFalse(isIPv6Address(b\"hello%eth0\"))", "def forge_contract(value) -> bytes:\n parts = value.split('%')\n address, entrypoint = (parts[0], parts[1]) if len(parts) == 2 else (parts[0], 'default')\n res = forge_address(address)\n if entrypoint != 'default':\n res += entrypoint.encode()\n return res", "def test_ipv6_validation_failure():\n with pytest.raises(socket.error):\n is_ipv6('2001::0234:C1ab::A0:aabc:003F')", "def _make_addr_resolve(self, addr: 'str | bytes', htype: 'int') -> 'bytes':\n _addr = addr.encode() if isinstance(addr, str) else addr\n\n if htype == Enum_Hardware.Ethernet:\n if PAT_MAC_ADDR.fullmatch(_addr) is not None:\n return _addr.replace(b':', b'').replace(b'-', b'')\n raise ProtocolError(f'Invalid MAC address: {addr!r}')\n return _addr", "def ipwrap(address: Any) -> str:\n try:\n if not isinstance(address, int):\n ipaddress.IPv6Address(address)\n return f\"[{address}]\"\n except ValueError:\n pass\n\n return str(address)", "def __init__(self, address, netmask=None):\n\n if netmask:\n ip = Ipv6Address(address)\n address = \"%s/%s\" % (ip,netmask)\n\n google.ipaddr.IPv6Network.__init__(self, address, strict=False)", "def ip6_cidr_range(ingress, debug=False):\n if debug:\n print('ip6_cidr_range ' + str(ingress) + lineno())\n print('type: ' + str(type(ingress)) + lineno())\n if hasattr(ingress, '__dict__'):\n print('vars: ' + str(vars(ingress)) + lineno())\n\n suffix = \"/128\";\n\n if type(ingress) == type(dict()):\n\n if debug:\n print('ingress is a dict: ' + lineno())\n\n if 'CidrIp' in ingress:\n\n if debug:\n print('CiderIp in ingress '+lineno())\n\n if type(ingress['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress['CidrIp']:\n return True\n\n elif ingress['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n if sys.version_info[0] < 3 and type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif hasattr(ingress, 'cidrIpv6'):\n\n if type(ingress.cidrIpv6) == type(str()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n for item in ingress:\n if 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n return True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress.cidrIpv6) == type(dict()):\n\n for item in ingress.cidrIp:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6[item]:\n return True\n\n elif ingress.cidrIpv6[item].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif type(ingress.cidrIpv6) == type(str()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif sys.version_info[0] < 3 and type(ingress.cidrIpv6) == type(unicode()):\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return False\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n else:\n print('not sure what this is')\n print('need to fix')\n sys.exit(1)\n\n else:\n if debug:\n print('ip is: ' + str(ingress.cidrIpv6) + lineno())\n print('type: ' + str(type(ingress.cidrIpv6)) + lineno())\n\n if type(ingress.cidrIpv6) == type(list()):\n\n has_invalid_cidr = False\n\n for item in ingress.cidrIpv6:\n\n if debug:\n print('list item: ' + str(item) + lineno())\n\n if type(item) == type(dict()):\n\n for item2 in item:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item[item2]:\n return True\n\n elif item2 == 'Ref':\n return True\n\n elif item[item2].endswith(suffix):\n if debug:\n print('ip ends with /32' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /32' + lineno())\n return False\n\n elif 'CidrIp' in item:\n if type(item['CidrIp']) == type(str()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n if sys.version_info[0] < 3:\n if type(item['CidrIp']) == type(unicode()):\n\n if debug:\n print('ip is: ' + str(item['CidrIp']) + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in item['CidrIp']:\n has_invalid_cidr = True\n\n elif item['CidrIp'].endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n has_invalid_cidr = False\n\n return has_invalid_cidr\n\n else:\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress.cidrIpv6:\n return True\n\n elif ingress.cidrIpv6.endswith(suffix):\n if debug:\n print('ip ends with /128' + lineno())\n return True\n\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n elif type(ingress) == type(str()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n elif sys.version_info[0] < 3 and type(ingress) == type(unicode()):\n if debug:\n print('is a str ' + lineno())\n\n # only care about literals. if a Hash/Ref not going to chase it down\n # given likely a Parameter with external val\n if 'Ref' in ingress:\n return True\n\n elif ingress.endswith('/128'):\n return True\n else:\n if debug:\n print('ip does not end with /128' + lineno())\n return False\n\n return False", "def test_unreserved_percentencoding():\n assert (normalize_url(\"http://www.example.com/%7Eusername/\") ==\n \"http://www.example.com/~username\")\n assert (normalize_url('http://example.com/foo%23bar') ==\n 'http://example.com/foo%23bar')\n assert (normalize_url('http://example.com/foo%2fbar') ==\n 'http://example.com/foo%2Fbar')\n assert (normalize_url('http://example.com/foo%3fbar') ==\n 'http://example.com/foo%3Fbar')", "def test_ipv6_validation_success():\n assert is_ipv6('2001:db8::ff00:42:8329')", "def isIpv4AddrWithNetmask(string):\n return (True)", "def formataddr( pair, charset=None ):\n name, address = pair\n name = name and name.strip()\n address = address and address.strip()\n\n if not name:\n return address\n\n if _is8bitstring( name ):\n header = Header( '\"%s\"' % name, charset )\n header.append( ' <%s>' % address, '8bit' )\n return header\n\n quotes = ''\n if specialsre.search( name ):\n quotes = '\"'\n name = escapesre.sub( r'\\\\\\g<0>', name )\n\n return '%s%s%s <%s>' % ( quotes, name, quotes, address )" ]
[ "0.6570045", "0.6097962", "0.5951029", "0.5886211", "0.54803467", "0.53652436", "0.5291592", "0.52783734", "0.52346325", "0.5228517", "0.5217647", "0.51812303", "0.51507074", "0.50619113", "0.5045487", "0.5031636", "0.5029411", "0.49807203", "0.49727854", "0.4952022", "0.49495032", "0.49492", "0.49215186", "0.491975", "0.48967367", "0.48931077", "0.48916948", "0.48796898", "0.48754492", "0.48717207" ]
0.6768701
0
An otherwise invalid IPv6 address literal is still invalid with a trailing scope identifier.
def test_invalidWithScopeID(self): self.assertFalse(isIPv6Address("%eth0")) self.assertFalse(isIPv6Address(":%eth0")) self.assertFalse(isIPv6Address("hello%eth0"))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_scopeID(self):\n self.assertTrue(isIPv6Address(\"fe80::1%eth0\"))\n self.assertTrue(isIPv6Address(\"fe80::2%1\"))\n self.assertTrue(isIPv6Address(\"fe80::3%en2\"))", "def is_valid_ipv6_address(address, allow_brackets = False):\n\n if allow_brackets:\n if address.startswith('[') and address.endswith(']'):\n address = address[1:-1]\n\n if address.count('.') == 3:\n # Likely an ipv4-mapped portion. Check that its vaild, then replace with a\n # filler.\n\n ipv4_start = address.rfind(':', 0, address.find('.')) + 1\n ipv4_end = address.find(':', ipv4_start + 1)\n\n if ipv4_end == -1:\n ipv4_end = None # don't crop the last character\n\n if not is_valid_ipv4_address(address[ipv4_start:ipv4_end]):\n return False\n\n addr_comp = [address[:ipv4_start - 1] if ipv4_start != 0 else None, 'ff:ff', address[ipv4_end + 1:] if ipv4_end else None]\n address = ':'.join(filter(None, addr_comp))\n\n # addresses are made up of eight colon separated groups of four hex digits\n # with leading zeros being optional\n # https://en.wikipedia.org/wiki/IPv6#Address_format\n\n colon_count = address.count(':')\n\n if colon_count > 7:\n return False # too many groups\n elif colon_count != 7 and '::' not in address:\n return False # not enough groups and none are collapsed\n elif address.count('::') > 1 or ':::' in address:\n return False # multiple groupings of zeros can't be collapsed\n\n for entry in address.split(':'):\n if not re.match('^[0-9a-fA-f]{0,4}$', entry):\n return False\n\n return True", "def clean_ipv6_address(\n ip_str, unpack_ipv4=False, error_message=_(\"This is not a valid IPv6 address.\")\n):\n try:\n addr = ipaddress.IPv6Address(int(ipaddress.IPv6Address(ip_str)))\n except ValueError:\n raise ValidationError(error_message, code=\"invalid\")\n\n if unpack_ipv4 and addr.ipv4_mapped:\n return str(addr.ipv4_mapped)\n elif addr.ipv4_mapped:\n return \"::ffff:%s\" % str(addr.ipv4_mapped)\n\n return str(addr)", "def expand_ipv6_address(address):\n\n if not is_valid_ipv6_address(address):\n raise ValueError(\"'%s' isn't a valid IPv6 address\" % address)\n\n # expand ipv4-mapped portions of addresses\n if address.count('.') == 3:\n ipv4_start = address.rfind(':', 0, address.find('.')) + 1\n ipv4_end = address.find(':', ipv4_start + 1)\n\n if ipv4_end == -1:\n ipv4_end = None # don't crop the last character\n\n # Converts ipv4 address to its hex ipv6 representation. For instance...\n #\n # '5.9.158.75' => '0509:9e4b'\n\n ipv4_bin = _get_address_binary(address[ipv4_start:ipv4_end])\n groupings = [ipv4_bin[16 * i:16 * (i + 1)] for i in range(2)]\n ipv6_snippet = ':'.join(['%04x' % int(group, 2) for group in groupings])\n\n addr_comp = [address[:ipv4_start - 1] if ipv4_start != 0 else None, ipv6_snippet, address[ipv4_end + 1:] if ipv4_end else None]\n address = ':'.join(filter(None, addr_comp))\n\n # expands collapsed groupings, there can only be a single '::' in a valid\n # address\n if '::' in address:\n missing_groups = 7 - address.count(':')\n address = address.replace('::', '::' + ':' * missing_groups)\n\n # inserts missing zeros\n for index in range(8):\n start = index * 5\n end = address.index(':', start) if index != 7 else len(address)\n missing_zeros = 4 - (end - start)\n\n if missing_zeros > 0:\n address = address[:start] + '0' * missing_zeros + address[start:]\n\n return address", "def test_ipv6_validation_failure():\n with pytest.raises(socket.error):\n is_ipv6('2001::0234:C1ab::A0:aabc:003F')", "def testSPFInvalidIPv6Range(self):\n record = \"v=spf1 ip6:1200:0000:AB00:1234:0000:2552:7777:1313/130 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, record, domain)", "def testSPFInvalidIPv6inIPv4(self):\n spf_record = \"v=spf1 ip4:1200:0000:AB00:1234:0000:2552:7777:1313 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, spf_record, domain)", "def test_ipv6_validation_success():\n assert is_ipv6('2001:db8::ff00:42:8329')", "def testSPFInvalidIPv6(self):\n spf_record = \"v=spf1 ip6:1200:0000:AB00:1234:O000:2552:7777:1313 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, spf_record, domain)", "def is_valid_ipv6_address(address):\n try:\n socket.inet_pton(socket.AF_INET6, address)\n except (socket.error, TypeError):\n return False\n return True", "def testSPFInvalidIPv4inIPv6(self):\n spf_record = \"v=spf1 ip6:78.46.96.236 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, spf_record, domain)", "def check_ipv4_ipv6_fqdn(val):\n\n try:\n val = u'{0}'.format(val)\n ip = ip_network(val, strict=False)\n return ip.version\n except ValueError:\n return 0", "def is_valid_ipv6_address(address):\n try:\n socket.inet_pton(socket.AF_INET6, address)\n except socket.error: # not a valid address\n return False\n return True", "def format_url_address(address):\n try:\n addr = netaddr.IPAddress(address)\n if addr.version == constants.IPV6_FAMILY:\n return \"[%s]\" % address\n else:\n return str(address)\n except netaddr.AddrFormatError:\n return address", "def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address\")", "def is_valid_ipv6_address(ip_str):\n try:\n ipaddress.IPv6Address(ip_str)\n except ValueError:\n return False\n return True", "def test_ip4_cidr_syntax_internal_v6(self):\n \n test_ip = ip_address.IPAddress(\"192.168.0.1/24\")\n \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff, 192, 168, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/16\") \n assert test_ip.addr == [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1/8\")\n assert test_ip.subnet == [0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0]\n \n test_ip = ip_address.IPAddress(\"127.0.0.1\")\n assert test_ip.subnet == []", "def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"use `ipv6_addresses` attribute instead\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: use `ipv6_addresses` attribute instead\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")", "def test_empty(self):\n self.assertFalse(isIPv6Address(\"\"))", "def SupportsIPv6(self) -> bool:", "def test_ipv6_in_range(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n \n assert test_ip.in_range(\"2000:0db8:85a3:08d3:1319:8a2e:0370:7344\",\"2002:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n assert test_ip.in_range(\"2001:0db8:85a3:07d3:1319:8a2e:0370:7344\",\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344\")\n assert test_ip.in_range(\"::ffff:1.1.1.1\",\"2501:0db8:85a3:08d3:1319:8a2e:0370:7344\")", "def ipv6_address_space(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_address_space\")", "def test_ipv6_in_net(self):\n test_ip = ip_address.IPAddress(\"2001:0db8:85a3:08d3:1319:8a2e:0370:7344/24\")\n assert test_ip.in_network(\"2001:0d00::/24\")\n assert test_ip.in_network(\"2001:0d00::/29\")", "def OSSupportsIPv6(self) -> bool:", "def is_valid_ipv6_address(ip_str):\r\n # We need to have at least one ':'.\r\n if ':' not in ip_str:\r\n return False\r\n\r\n # We can only have one '::' shortener.\r\n if ip_str.count('::') > 1:\r\n return False\r\n\r\n # '::' should be encompassed by start, digits or end.\r\n if ':::' in ip_str:\r\n return False\r\n\r\n # A single colon can neither start nor end an address.\r\n if ((ip_str.startswith(':') and not ip_str.startswith('::')) or\r\n (ip_str.endswith(':') and not ip_str.endswith('::'))):\r\n return False\r\n\r\n # We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid)\r\n if ip_str.count(':') > 7:\r\n return False\r\n\r\n # If we have no concatenation, we need to have 8 fields with 7 ':'.\r\n if '::' not in ip_str and ip_str.count(':') != 7:\r\n # We might have an IPv4 mapped address.\r\n if ip_str.count('.') != 3:\r\n return False\r\n\r\n ip_str = _explode_shorthand_ip_string(ip_str)\r\n\r\n # Now that we have that all squared away, let's check that each of the\r\n # hextets are between 0x0 and 0xFFFF.\r\n for hextet in ip_str.split(':'):\r\n if hextet.count('.') == 3:\r\n # If we have an IPv4 mapped address, the IPv4 portion has to\r\n # be at the end of the IPv6 portion.\r\n if not ip_str.split(':')[-1] == hextet:\r\n return False\r\n if not is_valid_ipv4_address(hextet):\r\n return False\r\n else:\r\n try:\r\n # a value error here means that we got a bad hextet,\r\n # something like 0xzzzz\r\n if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:\r\n return False\r\n except ValueError:\r\n return False\r\n return True", "def ipv6_to_ipv4(ipv6_str):\n return '.'.join([str(b) for b in ipv6_str[12:]])", "def testIPv6noscheme(self):\n if self.needScheme: return\n \n self.assertEqual([\"[2001:a68:104:1337:250:daff:fe72:871c]/toimia\"], grab('foo [2001:a68:104:1337:250:daff:fe72:871c]/toimia', self.needScheme))", "def is_host_ip6(value):\n try:\n return bool(ipaddress.IPv6Address(value))\n\n except:\n pass", "def ipv6_address(self) -> Optional[pulumi.Input[str]]:\n warnings.warn(\"\"\"The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\", DeprecationWarning)\n pulumi.log.warn(\"\"\"ipv6_address is deprecated: The IPv6 address assigned to the instance. (Deprecated) This property was applicable only to First Generation instances.\"\"\")\n\n return pulumi.get(self, \"ipv6_address\")", "def is_IPv6Address(ipv6address):\n\n return bool(re.match(re_ipv6, ipv6address))" ]
[ "0.664798", "0.6541076", "0.64991224", "0.62932837", "0.6253037", "0.6219661", "0.5915969", "0.589761", "0.5897295", "0.58603305", "0.58561504", "0.5848769", "0.5847096", "0.5829653", "0.58218837", "0.57730556", "0.57442063", "0.57431483", "0.5640292", "0.562629", "0.55638087", "0.5544049", "0.5513738", "0.5492245", "0.541015", "0.54081905", "0.5404107", "0.5388201", "0.5381794", "0.5366924" ]
0.72068405
0
Map (P_fn, b) over basis_elt. Generic adem basis elements alternate [b_0, P_1, b_1, ..., P_n, b_n]. Apply P_fn to P_i and if b_i is a 1 replace it with b, otherwise drop it.
def adem_basis_elt_generic_map(*, P_fn, b, basis_elt): Ps = [P_fn(P) for P in basis_elt[1::2]] bs = [b if epsilon else None for epsilon in basis_elt[ ::2]] result = [None] * len(basis_elt) result[1::2] = Ps result[ ::2] = bs return [x for x in result if x is not None]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def adem_basis_elt_2_map(*, Sq_fn, basis_elt):\r\n return [Sq_fn(Sq) for Sq in basis_elt]", "def map(self, function=lambda item: item):\n for i, row in enumerate(self):\n for j, item in enumerate(row):\n row[j] = function(item)", "def apply(self,i,x):\n #applies the ith map to the point x\n y = self.A[i,:,:] @ x + self.b[i,:]\n return y", "def map(self, function=lambda value: value):\n for j, value in enumerate(self):\n self[j] = function(value)", "def applymap_nb(a, map_func_nb, *args):\n out = np.full_like(a, np.nan, dtype=np.float_)\n\n for col in range(out.shape[1]):\n idxs = np.flatnonzero(~np.isnan(a[:, col]))\n for i in idxs:\n out[i, col] = map_func_nb(i, col, a[i, col], *args)\n return out", "def map(self, fn, inv_fn):\r\n\t\treturn MapProjectedList(self, [fn], [inv_fn])", "def _apply_basis_state(self, state, wires):\n # translate to wire labels used by device\n device_wires = self.map_wires(wires)\n\n # length of basis state parameter\n n_basis_state = len(state)\n\n if not set(state.tolist()).issubset({0, 1}):\n raise ValueError(\"BasisState parameter must consist of 0 or 1 integers.\")\n\n if n_basis_state != len(device_wires):\n raise ValueError(\"BasisState parameter and wires must be of equal length.\")\n\n # get computational basis state number\n basis_states = 2 ** (self.num_wires - 1 - np.array(device_wires))\n basis_states = qml.math.convert_like(basis_states, state)\n num = int(qml.math.dot(state, basis_states))\n\n self._state = self._create_basis_state(num)", "def apply_pressure_boundary_from_function(self, \n boundary_marker, \n p_function):\n self.mfd.apply_dirichlet_from_function(boundary_marker, \n p_function)", "def _map_B(self, obs_seq):\n B_map = np.ones((self.n_states, len(obs_seq)))\n\n for j in range(self.n_states):\n for t, obs in enumerate(obs_seq):\n for i, symbol in enumerate(obs):\n if symbol == self.MISSING or (symbol is np.nan or symbol != symbol):\n # if the symbol is missing, use the maximum likelihood symbol for that state\n temp_symbol = np.argmax(\n self.B[i][j]\n )\n B_map[j][t] *= self.B[i][j][temp_symbol]\n else:\n B_map[j][t] *= self.B[i][j][symbol]\n return B_map", "def Power_in_the_Bucket(self,PIB_Func,a,b):\r\n \r\n Temp1 = integrate.dblquad(PIB_Func,0,2*np.pi,lambda x:0,lambda x:b)[0]\r\n Temp2 = integrate.dblquad(PIB_Func,0,2*np.pi,lambda x:0,lambda x:a)[0]\r\n PIB = Temp1/Temp2\r\n \r\n return PIB", "def convert(self, function=pointwise_mi):\n self.normalise()\n feat_prob = Counter()\n for feat_set in self.itervalues():\n for feat in feat_set:\n feat_prob[feat] += feat_set[feat]\n \n for feat_set in self.itervalues():\n code_prob = sum(feat_set.values())\n for feat in feat_set:\n feat_set[feat] = function(code_prob, feat_prob[feat], feat_set[feat])\n return self", "def apply_fn(self,fn):\r\n \r\n self.check_Data()\r\n for split,data_ in self.processed_data.items():\r\n x = data_['x']\r\n x = np.array([fn(xi) for xi in x])\r\n data_['x'] = x", "def local_basis_transform(basis1, basis0):\n\n U_local = np.conj(basis1).T @ basis0\n return(U_local)\n\n \"\"\"what bases? For each qubit sig_x sig_y sig_z\"\"\"", "def eval_basis(self, maps):\n if self.eval_method == ['grad']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n return val\n\n elif self.eval_method == ['val']:\n return self.bfref\n\n elif self.eval_method == ['div']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n val = nm.atleast_3d(nm.einsum('ijkk', val))\n return val\n\n elif self.eval_method == ['grad', 'sym', 'Man']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n from sfepy.terms.terms_general import proceed_methods\n val = proceed_methods(val, self.eval_method[1:])\n return val\n\n else:\n msg = \"Improper method '%s' for evaluation of basis functions\" \\\n % (self.eval_method)\n raise NotImplementedError(msg)", "def _map_fn(self):\n raise NotImplementedError", "def initBathy(self, func):\n # This way the bathy is only order 2\n for i in range(self.nElements):\n self.bathyCoord[2 * i] = Elements._all[i].coord[0]\n self.bathyCoord[2 * i + 1] = Elements._all[i].coord[1]\n self.bathy[2 * i] = func(self.bathyCoord[2 * i])\n self.bathy[2 * i + 1] = func(self.bathyCoord[2 * i + 1])", "def _compose_transforms(basis_transforms, source_basis, source_dag):\n example_gates = _get_example_gates(source_dag)\n mapped_instrs = {}\n\n for gate_name, gate_num_qubits in source_basis:\n # Need to grab a gate instance to find num_qubits and num_params.\n # Can be removed following https://github.com/Qiskit/qiskit-terra/pull/3947 .\n example_gate = example_gates[gate_name, gate_num_qubits]\n num_params = len(example_gate.params)\n\n placeholder_params = ParameterVector(gate_name, num_params)\n placeholder_gate = Gate(gate_name, gate_num_qubits, list(placeholder_params))\n placeholder_gate.params = list(placeholder_params)\n\n dag = DAGCircuit()\n qr = QuantumRegister(gate_num_qubits)\n dag.add_qreg(qr)\n dag.apply_operation_back(placeholder_gate, qr[:], [])\n mapped_instrs[gate_name, gate_num_qubits] = placeholder_params, dag\n\n for gate_name, gate_num_qubits, equiv_params, equiv in basis_transforms:\n logger.debug(\n \"Composing transform step: %s/%s %s =>\\n%s\",\n gate_name,\n gate_num_qubits,\n equiv_params,\n equiv,\n )\n\n for mapped_instr_name, (dag_params, dag) in mapped_instrs.items():\n doomed_nodes = [\n node\n for node in dag.op_nodes()\n if (node.op.name, node.op.num_qubits) == (gate_name, gate_num_qubits)\n ]\n\n if doomed_nodes and logger.isEnabledFor(logging.DEBUG):\n\n logger.debug(\n \"Updating transform for mapped instr %s %s from \\n%s\",\n mapped_instr_name,\n dag_params,\n dag_to_circuit(dag, copy_operations=False),\n )\n\n for node in doomed_nodes:\n\n replacement = equiv.assign_parameters(\n dict(zip_longest(equiv_params, node.op.params))\n )\n\n replacement_dag = circuit_to_dag(replacement)\n\n dag.substitute_node_with_dag(node, replacement_dag)\n\n if doomed_nodes and logger.isEnabledFor(logging.DEBUG):\n\n logger.debug(\n \"Updated transform for mapped instr %s %s to\\n%s\",\n mapped_instr_name,\n dag_params,\n dag_to_circuit(dag, copy_operations=False),\n )\n\n return mapped_instrs", "def apply_to(self, b):\n raise NotImplementedError(\"base class called\")", "def applyToEach(L,f):\n for i in range(len(L)):\n L[i] = f(L[i])", "def affine_map(self, W, b):\n self.base_vertices = np.dot(W, self.base_vertices) + b\n self.base_vectors = np.dot(W, self.base_vectors)", "def map2(f, A, B):\n return tuple(map(lambda ra, rb: tuple(map(f, ra, rb)),\n A, B))", "def _apply_individual_nbody1_accumulate_python(\n coeff: 'Nparray', ocoeff: 'Nparray', icoeff: 'Nparray',\n amap: 'Nparray', btarget: 'Nparray', bsource: 'Nparray',\n bparity: 'Nparray') -> None:\n for sourcea, targeta, paritya in amap:\n ocoeff[targeta, btarget] += coeff * paritya * numpy.multiply(\n icoeff[sourcea, bsource], bparity)", "def set_basis(self, maps, methods):\n self.eval_method = methods\n\n def get_grad(maps, shape):\n bfref0 = eval_base(maps.qp_coor, diff=True).swapaxes(1, 2)\n if shape == (1,): # scalar variable\n bfref = bfref0\n elif len(shape) == 1: # vector variable\n vec_shape = nm.array(bfref0.shape + shape)\n vec_shape[1] *= shape[0]\n bfref = nm.zeros(vec_shape)\n for ii in nm.arange(shape[0]):\n slc = slice(ii*bfref0.shape[1], (ii+1)*bfref0.shape[1])\n bfref[:, slc, ii] = bfref0\n else: # higher-order tensors variable\n msg = \"Evaluation of basis has not been implemented \\\n for higher-order tensors yet.\"\n raise NotImplementedError(msg)\n return bfref\n\n def get_val(maps, shape):\n bfref0 = eval_base(maps.qp_coor, diff=False).swapaxes(1, 2)\n\n if self.shape == (1,): # scalar variable\n bfref = bfref0\n elif len(shape) == 1:\n vec_shape = nm.array(bfref0.shape)\n vec_shape[1:3] *= shape[0]\n bfref = nm.zeros(vec_shape)\n for ii in nm.arange(shape[0]):\n slc = slice(ii*bfref0.shape[1], (ii+1)*bfref0.shape[1])\n bfref[:, slc] = bfref0\n else: # higher-order tensors variable\n msg = \"Evaluation of basis has not been implemented \\\n for higher-order tensors yet.\"\n raise NotImplementedError(msg)\n return bfref\n\n eval_base = self.interp.poly_spaces['v'].eval_base\n if self.eval_method[0] == 'val':\n bfref = get_val(maps, self.shape)\n\n elif self.eval_method[0] == 'grad':\n bfref = get_grad(maps, self.shape)\n\n elif self.eval_method[0] == 'div':\n bfref = get_grad(maps, self.shape)\n\n else:\n raise NotImplementedError(\"The method '%s' is not implemented\" \\\n % (self.eval_method))\n\n self.bfref = bfref\n self.n_basis = self.bfref.shape[1]", "def apply_along_1_nb(a, apply_func_nb, *args):\n out = np.empty_like(a, dtype=np.float_)\n for i in range(a.shape[0]):\n out[i, :] = apply_func_nb(i, a[i, :], *args)\n return out", "def map(self, function):\n pass", "def simple_map_2(f, l):\n # Same as above without comprehension:\n mapped_l = []\n for item in l:\n mapped_l.append( f(item) ) # the extra blanks are just for readability\n return mapped_l", "def basis(T, dimensions, t):\n # Undo change of basis from realizer, and then transform into window\n B = canonical_basis(dimensions, t)\n return B.dot(T)", "def map(self, func):\n return _(map(func, self._))", "def map(self, fn, *iterables, **kwargs):\n fn = self._prepare_fn(fn)\n return self._self.map(fn, *iterables, **kwargs)", "def basis(n, *, algebra):\r\n if algebra.generic:\r\n return basis_generic(n, p=algebra.p)\r\n else:\r\n return basis_2(n)" ]
[ "0.7101953", "0.5635103", "0.52208865", "0.52161103", "0.5214447", "0.51587725", "0.51167256", "0.5001927", "0.49330238", "0.49207336", "0.49103978", "0.49085516", "0.48984176", "0.4893959", "0.48832047", "0.4866596", "0.48661175", "0.48558724", "0.48434997", "0.48409623", "0.48355237", "0.48317382", "0.4811751", "0.48088357", "0.4791663", "0.4745311", "0.47328755", "0.47327656", "0.47291598", "0.47231346" ]
0.794436
0
Map Sq_fn over basis_elt We don't actually use this since it seems silly.
def adem_basis_elt_2_map(*, Sq_fn, basis_elt): return [Sq_fn(Sq) for Sq in basis_elt]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def adem_basis_elt_generic_map(*, P_fn, b, basis_elt):\r\n Ps = [P_fn(P) for P in basis_elt[1::2]]\r\n bs = [b if epsilon else None for epsilon in basis_elt[ ::2]]\r\n result = [None] * len(basis_elt)\r\n result[1::2] = Ps\r\n result[ ::2] = bs\r\n return [x for x in result if x is not None]", "def q_mapping(obs,ctr,scn,nq):\n \n # Calculate quantile locations to be used in the next step\n q_intrvl = 100/float(nq); qtl_locs = np.arange(0,100+q_intrvl,q_intrvl) \n\n # Calculate quantiles\n q_obs = np.percentile(obs, list(qtl_locs), axis=0)\n q_ctr = np.percentile(ctr, list(qtl_locs), axis=0) \n \n if(len(obs.shape)==1):\n # Project the data using the correction function \n return interp_extrap(scn,q_ctr,q_obs)\n \n if(len(obs.shape)==2):\n # Project the data using the correction function, separately for each location \n out = np.full(scn.shape,np.nan)\n for i in range(out.shape[1]):\n out[:,i] = interp_extrap(scn[:,i],q_ctr[:,i],q_obs[:,i])\n\n return out", "def eval_basis(self, maps):\n if self.eval_method == ['grad']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n return val\n\n elif self.eval_method == ['val']:\n return self.bfref\n\n elif self.eval_method == ['div']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n val = nm.atleast_3d(nm.einsum('ijkk', val))\n return val\n\n elif self.eval_method == ['grad', 'sym', 'Man']:\n val = nm.tensordot(self.bfref, maps.inv_jac, axes=(-1, 0))\n from sfepy.terms.terms_general import proceed_methods\n val = proceed_methods(val, self.eval_method[1:])\n return val\n\n else:\n msg = \"Improper method '%s' for evaluation of basis functions\" \\\n % (self.eval_method)\n raise NotImplementedError(msg)", "def basis_fns(n=0):\n return lambda x: np.sum(x ** (n+1), axis=1)", "def new_basis(abc, lattice):\n return np.dot(abc.T, lattice.inv_matrix.T)", "def f(self, (k,t), (J,q,dq), **params):\n f = 0.*q\n return f", "def _F_to_m_on_basis(self, la):\n return self._weyl.from_reduced_word(Partition(la).from_kbounded_to_reduced_word(self.k)).stanley_symmetric_function()", "def __call__(self, xi, p=None):\n\n # print('basis function is called {0}, {1}'.format(xi, p))\n\n return self.__basis(xi, self.p if p is None else p)", "def local_basis_transform(basis1, basis0):\n\n U_local = np.conj(basis1).T @ basis0\n return(U_local)\n\n \"\"\"what bases? For each qubit sig_x sig_y sig_z\"\"\"", "def apply_fn(self,fn):\r\n \r\n self.check_Data()\r\n for split,data_ in self.processed_data.items():\r\n x = data_['x']\r\n x = np.array([fn(xi) for xi in x])\r\n data_['x'] = x", "def apply_eqn(eqn, x):\n return eqn[0] * x + eqn[1]", "def map(self, func):\n return _(map(func, self._))", "def map(self, function=lambda item: item):\n for i, row in enumerate(self):\n for j, item in enumerate(row):\n row[j] = function(item)", "def _eta_sfr_scaling(self,x,q):\n i = self.enum[q]\n A = self.scaling_params['A'][i]\n b = self.scaling_params['b'][i]\n return A*x**b", "def apply_beam(self, Q, R, resolution=True, interpolation=0):\n return [(xs.apply_beam(Q, Ri, resolution, interpolation) if xs else None)\n for xs, Ri in zip(self.xs, R)]", "def basis(T, dimensions, t):\n # Undo change of basis from realizer, and then transform into window\n B = canonical_basis(dimensions, t)\n return B.dot(T)", "def hadamard_function(self, qubits):\n\n for qubit in qubits:\n H | qubit", "def q2xyz_func(q): \n if use_scale:\n q = scale_up(q)\n t = calc_T(q) \n return t[0], t[1], t[2]", "def transform(fn):\n def _(vec, dt):\n return np.einsum(\n 'ji,i,ki,k...->j...',\n evecs, fn(evals, dt), evecs, vec, optimize=True)\n\n return _", "def _map_fn(self):\n raise NotImplementedError", "def ComputeBasis(self, p_float=..., p_float=..., p_float=..., *args, **kwargs):\n ...", "def form(func, dist_list, init_search_point, alg):\n \n def SLSQP(func, dist_list, init_search_point):\n \n dim = len(dist_list)\n current_beta = 0\n new_beta = 1\n sig = np.empty((1, dim))\n mu = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n def f_l(x_l):\n return(func([x_l[i,:]*sig[0,i] + mu[0,i] for i in range(0, dim)]))\n \n while abs(current_beta-new_beta) > 0.001:\n current_search_point = new_search_point\n current_beta = new_beta\n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n mu[0,i], sig[0, i] = Rosenblatt_Transform(dist_list[i][0], current_search_point[0,i])\n else:\n mu[0,i], sig[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n dist_fun = lambda u: np.linalg.norm(u) \n \n alg = 'SLSQP'\n \n H = lambda u: f_l(u)\n cons = ({'type': 'eq', 'fun': lambda u: -(H(u.reshape(-1,1)))})\n \n result = scipy.optimize.minimize(dist_fun, x0 = current_search_point, constraints = cons, method=alg)\n \n new_beta = result.fun\n u = np.array(result.x).reshape((1,dim))\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = mu[0,i] + u[0,i]*sig[0,i]\n \n beta_value = new_beta \n p_f = sst.norm.cdf(-beta_value)\n iterations = result.nit\n u = result.x\n x = u[:]*sig[0,:] + mu[0,:]\n print(x)\n grad_val = scipy.optimize.approx_fprime(x, func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(sig[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*sig[0,i]/np.sqrt(sum1) \n \n print('------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: slsqp solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('------------------------')\n \n return(beta_value, p_f, x, u, mu, sig, cosines) \n \n def HL_R(func, dist_list, init_search_point):\n \n iterations = 0\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n new_search_point = np.array(init_search_point).reshape((1, dim))\n \n while abs(cur_beta - new_beta) > 0.001:\n cur_beta = new_beta\n cur_cosines = np.zeros((1, dim))\n new_cosines = np.ones((1, dim))\n \n while max((abs(cur_cosines - new_cosines))[0]) > 0.005:\n \n cur_cosines = new_cosines\n \n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0, i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0, i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n \n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.00000001)\n grad_val = grad_val.reshape((1, dim))\n \n sum1 = np.sum((grad_val[0,:]**2)*(global_std_arr[0,:]**2))\n cosines = np.empty((1, dim))\n \n for i in range(0, dim):\n cosines[0,i] = grad_val[0,i]*global_std_arr[0,i]/np.sqrt(sum1)\n \n new_cosines = cosines\n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = global_mean_arr[0,i] - new_cosines[0,i]*global_std_arr[0,i]*cur_beta\n \n iterations = iterations + 1\n \n \n B = Symbol('B')\n coordinates = []\n for i in range(0, dim):\n coordinates.append(global_mean_arr[0, i] - new_cosines[0,i]*global_std_arr[0, i]*B)\n new_beta = float(solve(func(coordinates), B)[0])\n \n cosines = new_cosines \n beta_value = new_beta\n p_f = sst.norm.cdf(-new_beta)\n x = new_search_point\n u = (x[0,:] - global_mean_arr[0,:])/global_std_arr\n \n print('-------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: HL-R solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('-------------------------')\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines)\n \n def HL_RF(func, dist_list, init_search_point):\n\n cur_beta = 3\n new_beta = 0\n dim = len(dist_list)\n\n new_search_point = np.array(init_search_point).reshape((1, dim))\n iterations = 0\n while abs(cur_beta - new_beta) > 0.001 and abs(func(new_search_point[0])) > 0.001:\n global_mean_arr = np.empty((1, dim))\n global_std_arr = np.empty((1, dim))\n cur_beta = new_beta\n cur_search_point = new_search_point\n \n for i in range(0, dim):\n if dist_list[i][1] != 'norm':\n global_mean_arr[0,i], global_std_arr[0, i] = Rosenblatt_Transform(dist_list[i][0], cur_search_point[0,i])\n else:\n global_mean_arr[0,i], global_std_arr[0, i] = dist_list[i][0].mean(), dist_list[i][0].std()\n \n f_val = func(cur_search_point[0])\n \n x_ast = np.empty((1, dim))\n for i in range(0, dim):\n x_ast[0,i] =(cur_search_point[0,i] - global_mean_arr[0,i])/global_std_arr[0,i]\n\n grad_val = scipy.optimize.approx_fprime(cur_search_point[0], func, 0.000001)\n grad_val = grad_val.reshape((1, dim)) \n \n grad_val_ast = np.empty(grad_val.shape)\n for i in range(0, dim):\n grad_val_ast[0,i] = grad_val[0,i]*global_std_arr[0,i]\n \n t1 = 1/np.sum(grad_val_ast[0,:]**2)\n\n t2 = sum(grad_val_ast[0,:]*x_ast[0,:]) - f_val\n \n t3 = t1*t2\n \n new_x_ast = np.empty(x_ast.shape)\n for i in range(0, dim):\n new_x_ast[0,i] = t3*grad_val_ast[0,i]\n u = new_x_ast\n new_beta = np.linalg.norm(new_x_ast)\n \n new_search_point = np.empty((1, dim))\n for i in range(0, dim):\n new_search_point[0,i] = new_x_ast[0,i]*global_std_arr[0,i] + global_mean_arr[0,i]\n iterations = iterations + 1\n \n grad_val_ast_sum = sum(grad_val_ast[0,:]**2)\n cosines = grad_val_ast/(grad_val_ast_sum**0.5)\n beta_value = new_beta\n x = new_search_point\n p_f = sst.norm.cdf(-beta_value)\n \n print('-------------------------')\n print('First-Order Reliability Analysis')\n print('Algorithm: HL-RF solver')\n print('Iterations: {}\\nReliability index = {}\\nProbability of failure = {}'.format(iterations, beta_value, p_f))\n print('-------------------------')\n \n return(beta_value, p_f, x, u, global_mean_arr, global_std_arr, cosines)\n \n if alg == 'slsqp':\n return(SLSQP(func, dist_list, init_search_point))\n elif alg == 'HL-R':\n return(HL_R(func, dist_list, init_search_point))\n elif alg == 'HL-RF':\n return(HL_RF(func, dist_list, init_search_point))", "def apply(self,i,x):\n #applies the ith map to the point x\n y = self.A[i,:,:] @ x + self.b[i,:]\n return y", "def qq_to_qs_function(func, q_1, q_2):\n\n return Qs([func(q, r) for q, r in zip(q_1.qs, q_2.qs)], qs_type=q_1.qs_type, rows=q_1.rows, columns=q_1.columns)", "def __getPolynomial(self) -> 'func':\n return lambda x: sum(self.pts[i]*base(x)\n for i, base in enumerate(self.basis))", "def map(self, function):\n pass", "def create_query_vector(ix, term_dict, bow):\n\n\tqfv = list()\n\tfor idx, tf in bow:\n\t\t# get term from dict index\n\t\tterm = ix[idx]\n\t\t# filter out terms not contained in self.term_dict\n\t\tif term not in term_dict:\n\t\t\tcontinue\n\t\t# append term w/ tf to tfv\n\t\tqfv.append((term, tf))\n\treturn scale_to_L1_norm(defaultdict(float, qfv))", "def invert_function(self, qubits):\n\n for qubit in qubits:\n X | qubit", "def map(self, fn, inv_fn):\r\n\t\treturn MapProjectedList(self, [fn], [inv_fn])", "def lambda_fun_mat(icimeq_funk, x):\n # input must be 1d numpy.ndarray\n assert isinstance(x, np.ndarray) and x.ndim == 1\n\n # result from the lambdified function with array input\n abo = icimeq_funk(x)\n\n # for each row in the array\n for i,a in enumerate(abo):\n # fix the term not properly broadcast in the lambdified funtion\n a_fix = np.broadcast_arrays(*a)\n abo[i] = a_fix\n\n # cleaning to work with a proper array (no array of arrays of arrays !)\n abo_fix = np.array(abo.tolist())\n\n # roll the axis to obtain an array of matrix, and not a matrix of arrays\n abo_fix = np.moveaxis(abo_fix, -1, 0)\n\n return abo_fix" ]
[ "0.62987316", "0.5471408", "0.53439796", "0.53395426", "0.5315071", "0.5250455", "0.5165612", "0.5135809", "0.51355493", "0.51285654", "0.5109317", "0.51041394", "0.51037663", "0.5101545", "0.50495", "0.50141466", "0.49532974", "0.49482656", "0.49316648", "0.4930328", "0.4926972", "0.48856184", "0.48563355", "0.4825227", "0.48234487", "0.48208702", "0.48198295", "0.4801498", "0.47975183", "0.47961307" ]
0.82150215
0
Multiply monomials m1 and m2 and write the result in the Adem basis for p = 2.
def product_2(m1, m2): return make_mono_admissible_2(list(m1) + list(m2))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Multiply(M1,M2):\r\n M3=[]\r\n w=0\r\n while w<len(M2[0]):\r\n tap=[]\r\n t=0\r\n while t<len(M2):\r\n tap.append(M2[t][w])\r\n t=t+1\r\n M3.append(tap)\r\n w=w+1\r\n M=[]\r\n # Multiplying matrices\r\n k=0\r\n sums=0\r\n while k<len(M1):\r\n j=0\r\n mpy=[]\r\n while j<len(M3):\r\n p=0\r\n sums=0\r\n while p<len(M3[j]):\r\n temp = (M1[k][p])*(M3[j][p])\r\n sums=sums+temp\r\n p=p+1\r\n mpy.append(sums)\r\n j=j+1\r\n M.append(mpy)\r\n k=k+1\r\n return M", "def print_m(seq1, seq2, m):\n seq1 = '-' + seq1; seq2 = '-' + seq2\n print()\n print(' '.join(['%3s' % i for i in ' '+seq2]))\n for i, p in enumerate(seq1):\n line = [p] + [m[i][j] for j in range(len(seq2))]\n print(' '.join(['%3s' % i for i in line]))\n print()\n return", "def matrix_mult(m1, m2):\n pass", "def product_on_basis(self, g1, g2):\n return self.monomial(g1 * g2)", "def product(m1, m2, *, algebra):\r\n if algebra.generic:\r\n return product_generic(m1, m2, algebra.p)\r\n else:\r\n return product_2(m1, m2)", "def two_boson_amplitude(matrix, input_mal, output_mal):\n sub = matrix[np.ix_(input_mal, output_mal)]\n out = sub[0, 0] * sub[1, 1] + sub[0, 1] * sub[1, 0]\n mu_factor = _compute_mu_factor(\n _mal_to_mol(input_mal), _mal_to_mol(output_mal))\n return out / mu_factor", "def mult(m1, m2):\n assert np.shape(m1) == (2, 3)\n assert np.shape(m2) == (2, 3)\n\n m1_temp = np.vstack((m1, [0, 0, 1]))\n m2_temp = np.vstack((m2, [0, 0, 1]))\n result = m1_temp * m2_temp\n\n return result[:2, :]", "def product_on_basis(self, t1, t2):\n return tensor( (module.monomial(x1)*module.monomial(x2) for (module, x1, x2) in zip(self._sets, t1, t2)) ) #.", "def classical(m1,m2):\n \n n = m1.shape\n result = np.zeros(n, dtype = int)\n\n for i in range(n[0]):\n for j in range(n[0]):\n for k in range(n[0]):\n result[i][j] += m1[i][k] * m2[k][j]\n return result", "def matrix_mult(m1, m2):\n output = []\n for rowIndex, row in enumerate(m1): #go through rows in m1\n new_row = []\n for columnIndex in range(len(m2[0])): #go through indices for each column of m2\n sum = 0\n for index3 in range(len(row)):\n product = m1[rowIndex][index3] * m2[index3][columnIndex]\n sum += product\n new_row.append(sum)\n output.append(new_row)\n return output\n \n \n #output = []\n #first for loop corresponds to the rows of my output matrix and loops through the rows of m1 (enumerate)\n #create an empty new row\n # second for loop, loops through columns of m2\n # create sum variable, initialize it with zero\n # third for loop, multiplies the index of the row in m1 times the index of the column in m2\n # add sum to product and assign this to the sum variable\n # append sum to new row\n # append new row to output\n # return output", "def distribute_monom(mul):\n args = mul.args\n if len(args) == 2:\n from .matadd import MatAdd\n if args[0].is_MatAdd and args[1].is_Rational:\n return MatAdd(*[MatMul(mat, args[1]).doit() for mat in args[0].args])\n if args[1].is_MatAdd and args[0].is_Rational:\n return MatAdd(*[MatMul(args[0], mat).doit() for mat in args[1].args])\n return mul", "def multiply(n1, n2):\n return n1 * n2", "def multiply(n1, n2):\r\n type_1 = check_type(n1)\r\n type_2 = check_type(n2)\r\n if type_2 == 1 and type_1 == 1:\r\n # both imaginary numbers\r\n return str(int(''.join(list(n1)[:-1])) * int(''.join(list(n2)[:-1])) * -1)\r\n if type_2 == 0 and type_1 == 0:\r\n return str(int(n1) * int(n2))\r\n if type_1 == 1 and type_2 == 0:\r\n return str(int(n2) * int(''.join(list(n1)[:-1]))) + \"i\"\r\n if type_2 == 1 and type_1 == 0:\r\n return str(int(n1) * int(''.join(list(n2)[:-1]))) + \"i\"", "def multiplicand_2(p):\n m2 = cddr(p) # (m2 m3 ...)\n rests = cdr(m2) # (m3...)\n if isNull(rests):\n return car(m2)\n else:\n restp = convertToPythonList(cdr(rests))\n return make_product_2(car(m2), car(rests), *restp)", "def matrix_multiply(m1, m2):\n\n\tproduct = numpy.matmul(m1, m2)\n\tif type(product) == numpy.int64:\n\t\treturn float(product)\n\telse:\n\t\tresult = list(product)\n\t\treturn result", "def enc_add(pub, m1, m2):\n add_result = m1 * m2 % pub.n_sq\n return add_result", "def a_to_P(M1, M2, a):\n mu = c.GGG * (M1 + M2) * c.Msun_to_g\n n = np.sqrt(mu/(a**3 * c.Rsun_to_cm**3))\n return 2.0*np.pi / n / c.day_to_sec", "def divide(m1,m2):\n \n if ((m1.shape[0] % 2 == 0) or (m1.shape[0] == 1)):\n n = m1.shape[0]\n else:\n n = m1.shape[0] + 1\n result = np.zeros((n, n), dtype = int)\n \n if (n == 1):\n result[0][0] = m1[0][0] * m2[0][0]\n else:\n new = n//2\n \n a11, a12, a21, a22 = m1[:new, :new], m1[new:, :new], m1[:new, new:], m1[new:, new:]\n b11, b12, b21, b22 = m2[:new, :new], m2[new:, :new], m2[:new, new:], m2[new:, new:]\n \n result[:new, :new] = divide(a11,b11) + divide(a12,b21)\n result[new:, :new] = divide(a11,b12) + divide(a12,b22)\n result[:new, new:] = divide(a21,b11) + divide(a22,b21)\n result[new:, new:] = divide(a21,b12) + divide(a22,b22)\n \n return result", "def multiply_nums(n1, n2):\n\n result = n1 * n2\n return result", "def ndom_multiply (a, b):\r\n x=ndom_to_decimal(a)\r\n y=ndom_to_decimal(b)\r\n multi=x*y\r\n mab=decimal_to_ndom(multi)\r\n return mab", "def strassen(m1, m2):\n \n if ((m1.shape[0] % 2 == 0) or (m1.shape[0] == 1)):\n n = m1.shape[0] \n else:\n n = m1.shape[0] + 1\n result = np.zeros((n, n), dtype = int)\n \n if (n == 1):\n result[0][0] = m1[0][0] * m2[0][0]\n else:\n new = n//2\n \n a11, a12, a21, a22 = m1[:new, :new], m1[new:, :new], m1[:new, new:], m1[new:, new:]\n b11, b12, b21, b22 = m2[:new, :new], m2[new:, :new], m2[:new, new:], m2[new:, new:]\n \n p1 = strassen(a11, b12 - b22)\n p2 = strassen(a11 + a12, b22)\n p3 = strassen(a21 + a22, b11)\n p4 = strassen(a22, b21 - b11)\n p5 = strassen(a11 + a22, b11 + b22)\n p6 = strassen(a12 - a22, b21 + b22)\n p7 = strassen(a11 - a21, b11 + b12)\n \n result[:new, :new] = p5 + p4 - p2 + p6\n result[new:, :new] = p1 + p2\n result[:new, new:] = p3 + p4 \n result[new:, new:] = p5 + p1 - p3 - p7\n \n return result", "def product_generic(m1, m2, p):\r\n if m1[-1] == m2[0] == 1:\r\n return {}\r\n else:\r\n return make_mono_admissible_generic(m1[:-1] + (m1[-1] + m2[0],) + m2[1:], p)", "def multiply(self, num1, num2):\n if num1 == '0' or num2 == '0':\n return '0'\n\n num = [0] * (len(num1) + len(num2))\n for i, x2 in enumerate(reversed(num2)):\n for j, x1 in enumerate(reversed(num1)):\n num[i+j] += int(x1) * int(x2)\n num[i+j+1] += num[i+j] / 10\n num[i+j] %= 10\n for i in xrange(len(num)-1):\n num[i+1] += num[i] / 10\n num[i] %= 10\n while len(num) > 1 and not num[-1]:\n num.pop()\n return ''.join(map(str, num[::-1]))", "def multiplication(numb1, numb2):\r\n return f\"Your result: {numb1*numb2}\"", "def mul(self, a, b):\n return a * b", "def mu(o1, o2):\n return o1*o2", "def mult(p1, p2):\r\n p = 0\r\n while p2:\r\n if p2 & 0b1:\r\n p ^= p1\r\n p1 <<= 1\r\n if p1 & 0b10000:\r\n p1 ^= 0b11\r\n p2 >>= 1\r\n return p & 0b1111", "def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out", "def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out", "def multiply(self, a, b):\n return a * b" ]
[ "0.63990766", "0.6220588", "0.61849815", "0.61759233", "0.61419266", "0.6095974", "0.6068614", "0.6066465", "0.60507303", "0.60430276", "0.60147953", "0.5993608", "0.5992707", "0.5939356", "0.59325916", "0.59205264", "0.591044", "0.59056205", "0.5866907", "0.5812846", "0.57922745", "0.57766014", "0.5767914", "0.5745329", "0.5740128", "0.5728698", "0.5707305", "0.5694044", "0.5694044", "0.5680343" ]
0.6638194
0
Multiply monomials m1 and m2 and write the result in the Adem basis. This is a dispatch to product_2 and product_generic.
def product(m1, m2, *, algebra): if algebra.generic: return product_generic(m1, m2, algebra.p) else: return product_2(m1, m2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def product_2(m1, m2):\r\n return make_mono_admissible_2(list(m1) + list(m2))", "def matrix_mult(m1, m2):\n pass", "def Multiply(M1,M2):\r\n M3=[]\r\n w=0\r\n while w<len(M2[0]):\r\n tap=[]\r\n t=0\r\n while t<len(M2):\r\n tap.append(M2[t][w])\r\n t=t+1\r\n M3.append(tap)\r\n w=w+1\r\n M=[]\r\n # Multiplying matrices\r\n k=0\r\n sums=0\r\n while k<len(M1):\r\n j=0\r\n mpy=[]\r\n while j<len(M3):\r\n p=0\r\n sums=0\r\n while p<len(M3[j]):\r\n temp = (M1[k][p])*(M3[j][p])\r\n sums=sums+temp\r\n p=p+1\r\n mpy.append(sums)\r\n j=j+1\r\n M.append(mpy)\r\n k=k+1\r\n return M", "def mult(m1, m2):\n assert np.shape(m1) == (2, 3)\n assert np.shape(m2) == (2, 3)\n\n m1_temp = np.vstack((m1, [0, 0, 1]))\n m2_temp = np.vstack((m2, [0, 0, 1]))\n result = m1_temp * m2_temp\n\n return result[:2, :]", "def product_on_basis(self, g1, g2):\n return self.monomial(g1 * g2)", "def matrix_multiply(m1, m2):\n\n\tproduct = numpy.matmul(m1, m2)\n\tif type(product) == numpy.int64:\n\t\treturn float(product)\n\telse:\n\t\tresult = list(product)\n\t\treturn result", "def product_on_basis(self, t1, t2):\n return tensor( (module.monomial(x1)*module.monomial(x2) for (module, x1, x2) in zip(self._sets, t1, t2)) ) #.", "def mul(self, a, b):\n return a * b", "def matrix_mult(m1, m2):\n output = []\n for rowIndex, row in enumerate(m1): #go through rows in m1\n new_row = []\n for columnIndex in range(len(m2[0])): #go through indices for each column of m2\n sum = 0\n for index3 in range(len(row)):\n product = m1[rowIndex][index3] * m2[index3][columnIndex]\n sum += product\n new_row.append(sum)\n output.append(new_row)\n return output\n \n \n #output = []\n #first for loop corresponds to the rows of my output matrix and loops through the rows of m1 (enumerate)\n #create an empty new row\n # second for loop, loops through columns of m2\n # create sum variable, initialize it with zero\n # third for loop, multiplies the index of the row in m1 times the index of the column in m2\n # add sum to product and assign this to the sum variable\n # append sum to new row\n # append new row to output\n # return output", "def __mul__(self, other):\r\n return self.prod(other)", "def multiply(n1, n2):\r\n type_1 = check_type(n1)\r\n type_2 = check_type(n2)\r\n if type_2 == 1 and type_1 == 1:\r\n # both imaginary numbers\r\n return str(int(''.join(list(n1)[:-1])) * int(''.join(list(n2)[:-1])) * -1)\r\n if type_2 == 0 and type_1 == 0:\r\n return str(int(n1) * int(n2))\r\n if type_1 == 1 and type_2 == 0:\r\n return str(int(n2) * int(''.join(list(n1)[:-1]))) + \"i\"\r\n if type_2 == 1 and type_1 == 0:\r\n return str(int(n1) * int(''.join(list(n2)[:-1]))) + \"i\"", "def multiply(n1, n2):\n return n1 * n2", "def multiply(self, a, b):\n return a * b", "def classical(m1,m2):\n \n n = m1.shape\n result = np.zeros(n, dtype = int)\n\n for i in range(n[0]):\n for j in range(n[0]):\n for k in range(n[0]):\n result[i][j] += m1[i][k] * m2[k][j]\n return result", "def matmul(a, b):\n raise NotImplementedError", "def distribute_monom(mul):\n args = mul.args\n if len(args) == 2:\n from .matadd import MatAdd\n if args[0].is_MatAdd and args[1].is_Rational:\n return MatAdd(*[MatMul(mat, args[1]).doit() for mat in args[0].args])\n if args[1].is_MatAdd and args[0].is_Rational:\n return MatAdd(*[MatMul(args[0], mat).doit() for mat in args[1].args])\n return mul", "def multiplication(self, a, b):\n if not check_arguments(a, b): # check if arguments are numbers\n self.last_result = a * b", "def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out", "def matrix_mult(M, vector1, vector2):\n out = None\n ### YOUR CODE HERE\n pass\n ### END YOUR CODE\n\n return out", "def mul(self,mat1,mat2):\n if(isinstance(mat2,int)==True):\n result = [[mat1[i][j] * mat2 for j in range(len(mat1[0]))] for i in range(len(mat1))]\n self.out = result\n return self.out\n elif(len(mat1[0])==len(mat2)):\n result = [[sum(a*b for a,b in zip(i,j)) for j in zip(*mat2)] for i in mat1]\n self.out = result\n return self.out", "def multiply(self, other):\n from divisi2 import operators\n return operators.multiply(self, other)", "def product_generic(m1, m2, p):\r\n if m1[-1] == m2[0] == 1:\r\n return {}\r\n else:\r\n return make_mono_admissible_generic(m1[:-1] + (m1[-1] + m2[0],) + m2[1:], p)", "def matrixMult( self, matrix0, matrix1 ):\r\n result = {}\r\n keys = sorted( set( matrix0.keys() ) )\r\n count = range( len( matrix0.keys() ) )\r\n \r\n for key in keys:\r\n result[ key ] = []\r\n for i in count:\r\n sum = 0\r\n for j in count:\r\n sum += matrix0[ key ][j] * matrix1[ keys[j] ][i]\r\n result[ key ].insert( i, sum )\r\n \r\n return result", "def _prod_vectorized(M1, M2):\n sh1 = M1.shape\n sh2 = M2.shape\n assert len(sh1) >= 2\n assert len(sh2) >= 2\n assert sh1[-1] == sh2[-2]\n\n ndim1 = len(sh1)\n t1_index = list(xrange(ndim1-2)) + [ndim1-1, ndim1-2]\n return np.sum(np.transpose(M1, t1_index)[..., np.newaxis] *\n M2[..., np.newaxis, :], -3)", "def __mul__(self, other):\n if self.n != other.m:\n raise TypeError(\"Illegal dimensions for mul operator\")\n tmp = [[0 for _ in xrange(self.n)] for _ in xrange(other.m)]\n for i in xrange(self.n):\n for j in xrange(other.m):\n for k in xrange(other.n):\n tmp[i][j] += self.values[i][k] * other.values[k][j]\n res = []\n for i in tmp:\n res += i\n return simplematrix(self.n, other.m, res)", "def multiplicand_2(p):\n m2 = cddr(p) # (m2 m3 ...)\n rests = cdr(m2) # (m3...)\n if isNull(rests):\n return car(m2)\n else:\n restp = convertToPythonList(cdr(rests))\n return make_product_2(car(m2), car(rests), *restp)", "def multiply(self, num1, num2):\n if num1 == '0' or num2 == '0':\n return '0'\n\n num = [0] * (len(num1) + len(num2))\n for i, x2 in enumerate(reversed(num2)):\n for j, x1 in enumerate(reversed(num1)):\n num[i+j] += int(x1) * int(x2)\n num[i+j+1] += num[i+j] / 10\n num[i+j] %= 10\n for i in xrange(len(num)-1):\n num[i+1] += num[i] / 10\n num[i] %= 10\n while len(num) > 1 and not num[-1]:\n num.pop()\n return ''.join(map(str, num[::-1]))", "def matrix_multiply(self, Am, Bm):\r\n # Section 1: Ensure A & B dimensions are correct for multiplication\r\n rowsA = len(Am)\r\n colsA = len(Am[0])\r\n rowsB = len(Bm)\r\n colsB = len(Bm[0])\r\n if colsA != rowsB:\r\n raise ArithmeticError(\r\n 'Number of A columns must equal number of B rows.')\r\n \r\n # Section 2: Store matrix multiplication in a new matrix\r\n C = self.zeros_matrix(rowsA, colsB)\r\n for i in range(rowsA):\r\n for j in range(colsB):\r\n total = 0\r\n for ii in range(colsA):\r\n total += Am[i][ii] * Bm[ii][j]\r\n C[i][j] = total\r\n \r\n return C", "def multiply_reducer(self, key, values):\n product = 0\n # for huge rows/columns having both of the row/col vectors as a dict helps in reducing\n # time complexity to join both vectors(which now becomes O(k) from O(k^2));\n m1_vector = {}\n m2_vector = {}\n for vector in values:\n if vector[0] == 'm1':\n m1_vector[vector[1]] = vector[2]\n else:\n m2_vector[vector[1]] = vector[2]\n\n for k in xrange(int(self.options.m1_colcount)):\n product += (m1_vector.get(k, 0.0) * m2_vector.get(k, 0.0))\n yield key, product", "def multiply(num1, num2):\n product = num1 * num2\n return product" ]
[ "0.7025518", "0.6690894", "0.66277486", "0.6581569", "0.6568176", "0.6566743", "0.6516602", "0.6422083", "0.63616985", "0.632454", "0.6263163", "0.6245209", "0.6222676", "0.62163186", "0.6145855", "0.61392516", "0.60905725", "0.60873526", "0.60873526", "0.6052474", "0.6048148", "0.6033771", "0.60224843", "0.6022249", "0.60067654", "0.5993575", "0.5988973", "0.59839636", "0.59669703", "0.59668815" ]
0.6928319
1
Get the basis for the n dimensional part of the Steenrod algebra. Build basis recursively. last = last term. last is >= bound, and we will append (last,) to the end of elements from basis (n last, bound=2 last). This means that 2 last <= n last, or 3 last <= n.
def basis_2(n, *, bound=1): if n == 0: return ((),) result = [(n,)] for last in range(bound, 1 + n // 3): for vec in basis_2(n - last, bound=2 * last): result.append(vec + (last,)) return tuple(result)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def basis_generic(n, *, p, bound=1):\r\n if n == 0:\r\n return ((0,),) # \r\n if n == 1:\r\n return ((1,),)\r\n result = []\r\n \r\n # append P^{last} beta^{epsilon}\r\n for epsilon in [0,1]:\r\n # Without this lower bound edge case we lose the element (0, 1, 1) in degree 5.\r\n # I don't have a good explanation for what it means yet.\r\n lower_bound = bound + epsilon if bound > 1 else 1\r\n for last in range(lower_bound, 1 + (n // (2*(p - 1)))):\r\n remaining_degree = n - 2*(p-1)*last - epsilon\r\n basis_in_remaining_degree = basis_generic(remaining_degree, p=p, bound=p * last)\r\n for vec in basis_in_remaining_degree:\r\n result.append(vec + (last, epsilon))\r\n return tuple(result)", "def basis(n, *, algebra):\r\n if algebra.generic:\r\n return basis_generic(n, p=algebra.p)\r\n else:\r\n return basis_2(n)", "def _build_basis_slow(weight, n_bits):\n _warnings.warn((\"You're using a slow version of the basis-building code used by the disturbance calculations\"\n \" - compile pyGSTi's C extensions to make this go faster.\"))\n n_w = n_parameters_per_matrix(weight, n_bits)\n n_a = n_matrices_per_weight(weight, n_bits)\n dim = 2**n_bits\n\n my_basis = []\n my_constraints = []\n # All sets of qubits of given weight on n_bits\n pairs = list(_itertools.combinations(_np.arange(n_bits), weight))\n\n for ind in range(n_w * n_a):\n v = unit_vector(ind, n_w * n_a)\n vs = _np.reshape(v, (n_a, n_w))\n ctm = sum((swell_slow(transition_matrix(v, 2**weight), pair, n_bits)\n for v, pair in zip(vs, pairs))) - n_a * _np.eye(dim)\n my_basis += [ctm]\n my_constraints += [-_np.diag(ctm)]\n\n return my_basis, _np.array(my_constraints, dtype='int').T", "def basis_function(t, i, knots):\n out = 0.\n t_this = knots[i]\n t_next = knots[i+1]\n t_precog = knots[i+degree]\n t_horizon = knots[i+degree+1]\n\n top = (t-t_this)\n bottom = (t_precog-t_this)\n\n if bottom != 0:\n out = top/bottom * basis_factory(degree-1)(t, i, knots)\n\n top = (t_horizon-t)\n bottom = (t_horizon-t_next)\n if bottom != 0:\n out += top/bottom * basis_factory(degree-1)(t, i+1, knots)\n\n return out", "def _build_basis_fast(weight, n_bits):\n n_w = n_parameters_per_matrix(weight, n_bits)\n n_a = n_matrices_per_weight(weight, n_bits)\n dim = 2**n_bits\n\n my_basis = []\n my_constraints = []\n # All sets of qubits of given weight on n_bits\n pairs = list(_itertools.combinations(_np.arange(n_bits), weight))\n\n for ind in range(n_w * n_a):\n v = unit_vector(ind, n_w * n_a)\n vs = _np.reshape(v, (n_a, n_w))\n ctm = sum((swell(transition_matrix(v, 2**weight), pair, n_bits)\n for v, pair in zip(vs, pairs)))\n ctm -= n_a * _np.eye(dim)\n my_basis += [ctm]\n my_constraints += [-_np.diag(ctm)]\n\n return my_basis, _np.array(my_constraints, dtype='int').T", "def basis_factory(degree):\n if degree == 0:\n @memoize\n def basis_function(t, i, knots):\n \"\"\"The basis function for degree = 0 as per eq. 7\"\"\"\n t_this = knots[i]\n t_next = knots[i+1]\n out = 1. if (t>=t_this and t<t_next) else 0.\n return out\n\n else:\n @memoize\n def basis_function(t, i, knots):\n \"\"\"The basis function for degree > 0 as per eq. 8\"\"\"\n out = 0.\n t_this = knots[i]\n t_next = knots[i+1]\n t_precog = knots[i+degree]\n t_horizon = knots[i+degree+1]\n\n top = (t-t_this)\n bottom = (t_precog-t_this)\n\n if bottom != 0:\n out = top/bottom * basis_factory(degree-1)(t, i, knots)\n\n top = (t_horizon-t)\n bottom = (t_horizon-t_next)\n if bottom != 0:\n out += top/bottom * basis_factory(degree-1)(t, i+1, knots)\n\n return out\n\n basis_function.lower = None if degree == 0 else basis_factory(degree-1)\n basis_function.degree = degree\n return basis_function", "def get_basis(n):\n\treturn ' '.join('e{}'.format(i) for i in range(n))", "def all_basis_vectors(n: int) -> list:\n assert n >= 0, \"n must be > 0\"\n basis_1dim = ['0', '1']\n\n if n == 0:\n return []\n if n == 1:\n return basis_1dim\n else:\n current_basis = basis_1dim\n for i in range(1, n):\n # can be made more efficient (e.g. by current_basis, current basis until we reach sqrt(n))\n current_basis = outer_subspace_product(basis_1dim, current_basis)\n\n return current_basis", "def bspline_basis(c, n, degree):\n # Create knot vector and a range of samples on the curve\n kv = np.array([0] * degree + [i for i in range(c - degree + 1)] +\n [c - degree] * degree, dtype='int') # knot vector\n u = np.linspace(0, c - degree, n) # samples range\n\n # Cox - DeBoor recursive function to calculate basis\n @memo\n def coxDeBoor(k, d):\n # Test for end conditions\n if (d == 0):\n return ((u - kv[k] >= 0) & (u - kv[k + 1] < 0)).astype(int)\n\n denom1 = kv[k + d] - kv[k]\n term1 = 0\n if denom1 > 0:\n term1 = ((u - kv[k]) / denom1) * coxDeBoor(k, d - 1)\n\n denom2 = kv[k + d + 1] - kv[k + 1]\n term2 = 0\n if denom2 > 0:\n term2 = ((-(u - kv[k + d + 1]) / denom2) * coxDeBoor(k + 1, d - 1))\n\n return term1 + term2\n\n # Compute basis for each point\n b = np.column_stack([coxDeBoor(k, degree) for k in range(c)])\n b[n - 1][-1] = 1\n\n return b", "def get_nd_basis(n=5):\n assert n >= 0\n t = np.linspace(0, np.pi, n + 1)[:-1]\n xs = np.cos(t)\n ys = np.sin(t)\n return [(x, y) for x, y in zip(xs, ys)]", "def generate_basis(n, dim):\n planes = [np.random.randn(dim) for i in range(n)]\n return [p / np.linalg.norm(p) for p in planes]", "def dual_vector_valued_bernstein_basis_fn_simplex(mu, r, i, vertices, n):\n if n == 1:\n assert i == 0\n return dual_bernstein_basis_fn_simplex(mu, r, vertices)\n assert i >= 0\n assert i < n\n\n qs = dual_bernstein_basis_fn_simplex(mu, r, vertices)\n\n def q(p):\n assert p.target_dimension() == n\n return qs(p[i])\n\n return q", "def basis(T, dimensions, t):\n # Undo change of basis from realizer, and then transform into window\n B = canonical_basis(dimensions, t)\n return B.dot(T)", "def get_basis(self, t):\n return np.vstack([self._Phi[:, t], self._PhiD[:, t]]), np.vstack([self._PhiD[:, t], self._PhiDD[:, t]])", "def basis_fns(n=0):\n return lambda x: np.sum(x ** (n+1), axis=1)", "def __basis(self, xi, p, deriv_order=0):\n\n if p == 0:\n return self.__basis0(xi)\n else:\n # basis_p_minus_1 = self.__basis(xi, p - 1)\n deriv_order_minus_1 = max(deriv_order - 1, 0)\n\n if p > self.p: # Assume one order higher\n basis_p_minus_1 = self.__call__(xi)\n elif deriv_order_minus_1 == 0:\n basis_p_minus_1 = self.__basis(xi, p - 1)\n else:\n basis_p_minus_1 = self.d(xi, p - 1, deriv_order_minus_1)\n\n first_term_numerator = xi - self.knot_vector[:-p]\n first_term_denominator = self.knot_vector[p:] - self.knot_vector[:-p]\n\n second_term_numerator = self.knot_vector[(p + 1):] - xi\n second_term_denominator = (self.knot_vector[(p + 1):] -\n self.knot_vector[1:-p])\n\n # Change numerator in last recursion if derivatives are desired\n # if deriv_order > 0 and p == self.p:\n if deriv_order > 0:\n first_term_numerator = p\n second_term_numerator = -p\n\n # Disable divide by zero error because we check for it\n with np.errstate(divide='ignore', invalid='ignore'):\n first_term = np.where(first_term_denominator != 0.0,\n (first_term_numerator /\n first_term_denominator), 0.0)\n second_term = np.where(second_term_denominator != 0.0,\n (second_term_numerator /\n second_term_denominator), 0.0)\n\n # Each return trim off the last basis function value\n # that is why the number the element decrease as order increases\n tmp = (first_term[:-1] * basis_p_minus_1[:-1] +\n second_term * basis_p_minus_1[1:])\n\n return tmp", "def __getBasis(self) -> 'matrix':\n return array(list(BernBasis(self.domain, self.n, idx) for idx in range(self.n + 1)))", "def reduce_basis(blst):\n if blst == []: # blst represents scalar\n blst_coef = [S.One]\n blst_expand = [[]]\n return blst_coef, blst_expand\n blst_expand = [blst]\n blst_coef = [S.One]\n blst_flg = [False]\n # reduce untill all blst revise flgs are True\n while not reduce(operator.and_, blst_flg):\n for i in range(len(blst_flg)):\n if not blst_flg[i]: # keep revising if revise flg is False\n tmp = MV.reduce_basis_loop(blst_expand[i])\n if isinstance(tmp, bool):\n blst_flg[i] = tmp # revision of blst_expand[i] complete\n elif len(tmp) == 3: # blst_expand[i] contracted\n blst_coef[i] = tmp[0] * blst_coef[i]\n blst_expand[i] = tmp[1]\n blst_flg[i] = tmp[2]\n else: # blst_expand[i] revised\n blst_coef[i] = -blst_coef[i]\n # if revision force one more pass in case revision\n # causes repeated index previous to revised pair of\n # indexes\n blst_flg[i] = False\n blst_expand[i] = tmp[3]\n blst_coef.append(-blst_coef[i] * tmp[0])\n blst_expand.append(tmp[1])\n blst_flg.append(tmp[2])\n new_blst_coef = []\n new_blst_expand = []\n for (coef, expand) in zip(blst_coef, blst_expand):\n if expand in new_blst_expand:\n i = new_blst_expand.index(expand)\n new_blst_coef[i] += coef\n else:\n new_blst_expand.append(expand)\n new_blst_coef.append(coef)\n return new_blst_coef, new_blst_expand", "def _radial_basis(self):\n errexp = 10\n cutbasis = self.rcut + self.sigma*np.sqrt(2.*errexp*np.log(10.))\n spacebasis = cutbasis/self.nmax\n rbasis = np.zeros(self.nmax)\n rbasis[0] = 1.\n for i in range(1, self.nmax):\n rbasis[i] = rbasis[i-1] + spacebasis\n return rbasis", "def basis_function(t, i, knots):\n t_this = knots[i]\n t_next = knots[i+1]\n out = 1. if (t>=t_this and t<t_next) else 0.\n return out", "def bernstein_basis_fn_simplex(nu, r, vertices):\n try:\n n = len(nu)\n except TypeError:\n n = 1\n nu = (nu,)\n dim = get_dimension(r, n)\n coeff = np.zeros(dim, dtype=int)\n i = multiindex.get_index(nu, r)\n coeff[i] = 1\n return PolynomialBernsteinSimplex(coeff, vertices, r)", "def calcBasis2(xpts,basis_size,R):\n out = np.zeros((len(xpts),basis_size))\n for n in range(1,basis_size+1):\n out[:,n-1] = n*spherical_jn(0,n*np.pi*xpts/R)\n # Alturnatively\n #out[:,n-1] = (R/xpts)*np.sin(n*np.pi*xpts/R)\n return out", "def bernstein_poly(i, n, t):\n\n return comb(n, i) * ( t**(n-i) ) * (1 - t)**i", "def bernstein_poly(i, n, t):\n\n return comb(n, i) * ( t**(n-i) ) * (1 - t)**i", "def build(n, registry, points, is_vertical):\n\n # Error case\n if n == 0:\n raise ValueError('Points should not have zero length.')\n\n # Base case\n if n == 1:\n return registry\n\n # Find the half spaces\n l, g = pns(n, points, is_vertical)\n\n # Update registry\n registry = update(update(registry, l, 0), g, 1)\n\n # Recursive step\n registry = build(n >> 1, registry, l, not is_vertical)\n return build(n >> 1, registry, g, not is_vertical)", "def build_basis(self):\n if self.debug:\n print('sps_basis: rebuilding basis')\n # Setup the internal component basis arrays\n inwave = self.ssp.wavelengths\n nbasis = len(np.atleast_1d(self.params['mass']))\n self.nbasis = nbasis\n # nbasis = ( len(np.atleast_1d(self.params['zmet'])) *\n # len(np.atleast_1d(self.params['tage'])) )\n self.basis_spec = np.zeros([nbasis, len(inwave)])\n self.basis_mass = np.zeros(nbasis)\n\n i = 0\n tesc = self.params['dust_tesc']\n dust1, dust2 = self.params['dust1'], self.params['dust2']\n for j, zmet in enumerate(self.params['zmet']):\n for k, tage in enumerate(self.params['tage']):\n # get the intrinsic spectrum at this metallicity and age\n if self.safe:\n # do it using compsp\n if self.ssp._zcontinuous > 0:\n self.ssp.params['logzsol'] = zmet\n else:\n self.ssp.params['zmet'] = zmet\n w, spec = self.ssp.get_spectrum(tage=tage, peraa=True)\n mass = self.ssp.stellar_mass\n else:\n # do it by hand. Faster but dangerous\n spec, mass, lbol = self.ssp.ztinterp(zmet, tage, peraa=True)\n self.basis_spec[i, :] = spec\n self.basis_mass[i] = mass\n i += 1\n self.basis_dirty = False", "def basis_function(self, basis_function_index, polynomial_order):\n if polynomial_order == 0: # base case\n return self.basis_null(basis_function_index)\n else:\n # recursion formula from Hughes et. al. 2004, p. 4140\n first_num = self.xi - self.knot_vector[basis_function_index]\n first_den = self.knot_vector[basis_function_index + polynomial_order] - self.knot_vector[basis_function_index]\n first_basis = self.basis_function(basis_function_index, polynomial_order - 1)\n\n second_num = self.knot_vector[basis_function_index + polynomial_order + 1] - self.xi\n second_den = self.knot_vector[basis_function_index + polynomial_order + 1] - self.knot_vector[basis_function_index + 1]\n second_basis = self.basis_function(basis_function_index + 1, polynomial_order - 1)\n\n with np.errstate(divide = 'ignore', invalid = 'ignore'): # ignore divide by zero errors, the np.where calls bypass them\n first_term = np.where(np.not_equal(first_den, 0), first_num * first_basis / first_den, 0)\n second_term = np.where(np.not_equal(second_den, 0), (second_num * second_basis / second_den), 0)\n\n return first_term + second_term", "def GetMonomialBasis(dimension, degree):\n monomial_basis = []\n for deg in range(degree,0,-1):\n monomials_of_current_degree = GetMonomialsOfFixedDegree(dimension,deg)\n monomial_basis += monomials_of_current_degree\n # Now, let's append the \"constant\" monomial of zero powers\n monomial_basis += [dimension*[0]]\n return monomial_basis", "def bsplineBasis(n, k,deg):\n knots = np.r_[np.zeros(deg),np.linspace(0,n-1,k),(n-1) * np.ones(deg)]\n x = np.arange(n)\n m = len(knots) - deg - 1\n v = np.zeros((m, len(x)))\n d = np.eye(m, len(knots))\n for i in range(m):\n v[i] = spl.splev(x, (knots, d[i], deg))\n return v.T", "def site_basis(self,n,manifold_num):\n basis_matrix = np.zeros((len(self.energies[manifold_num]),len(self.energies[manifold_num])))\n basis_matrix[n,n] = 1\n return basis_matrix" ]
[ "0.6352027", "0.62040514", "0.61184764", "0.6047273", "0.5897896", "0.5890074", "0.5885658", "0.57801056", "0.5759169", "0.5620241", "0.5595064", "0.5581936", "0.5580318", "0.55353695", "0.54639876", "0.54491794", "0.54146963", "0.5369703", "0.5367151", "0.5311752", "0.52955806", "0.52866584", "0.52751905", "0.52751905", "0.5254535", "0.5252616", "0.5228859", "0.5213687", "0.5208352", "0.51913273" ]
0.63450444
1
Adem basis in dimension `n`. Dispatch to basis_2 and basis_generic.
def basis(n, *, algebra): if algebra.generic: return basis_generic(n, p=algebra.p) else: return basis_2(n)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def generate_basis(n, dim):\n planes = [np.random.randn(dim) for i in range(n)]\n return [p / np.linalg.norm(p) for p in planes]", "def basis_fns(n=0):\n return lambda x: np.sum(x ** (n+1), axis=1)", "def __init__(self, dimension, n):\n self.dimension = dimension\n self.n = n\n self.basis = None", "def site_basis(self,n,manifold_num):\n basis_matrix = np.zeros((len(self.energies[manifold_num]),len(self.energies[manifold_num])))\n basis_matrix[n,n] = 1\n return basis_matrix", "def all_basis_vectors(n: int) -> list:\n assert n >= 0, \"n must be > 0\"\n basis_1dim = ['0', '1']\n\n if n == 0:\n return []\n if n == 1:\n return basis_1dim\n else:\n current_basis = basis_1dim\n for i in range(1, n):\n # can be made more efficient (e.g. by current_basis, current basis until we reach sqrt(n))\n current_basis = outer_subspace_product(basis_1dim, current_basis)\n\n return current_basis", "def get_basis(n):\n\treturn ' '.join('e{}'.format(i) for i in range(n))", "def get_nd_basis(n=5):\n assert n >= 0\n t = np.linspace(0, np.pi, n + 1)[:-1]\n xs = np.cos(t)\n ys = np.sin(t)\n return [(x, y) for x, y in zip(xs, ys)]", "def decomposition_into_s_n_irreducibles(self, n):\r\n w5 = partitions_list(n)\r\n M5 = form_matrix_yt(w5)\r\n card = math.factorial(n)\r\n vec_dic = {}\r\n for k in range(self.dimension()+1):\r\n D = {}\r\n uu = []\r\n vv = []\r\n p = k \r\n A = self.matrix_simmetric_representate(p)\r\n if (p >0 and (p <= self.dimension())):\r\n null = nullspace(A)\r\n w3 = []\r\n for i in range(len(null[0])):\r\n w = []\r\n for j in range(len(null)):\r\n w.append(null[j][i])\r\n w3.append(w) \r\n null = w3\r\n M = np.matrix(w3, dtype= np.float64).transpose()\r\n Mi = np.linalg.pinv(M)\r\n else:\r\n if (p == 0):\r\n M = A\r\n null = []\r\n for i in range(A.shape[0]):\r\n aux = []\r\n for j in range(A.shape[1]):\r\n aux.append(M[i,j])\r\n null.append(aux)\r\n M = np.matrix(null, dtype=np.float64)\r\n Mi = M\r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n A1=self.matrix_simmetric_representate(p)\r\n col = columnspace(A1)\r\n w4 = []\r\n for i in range(len(col[0])):\r\n w = []\r\n for j in range(len(col)):\r\n w.append(col[j][i])\r\n w4.append(w)\r\n col = w4\r\n M1 = np.matrix(w4, dtype=np.float64).transpose()\r\n Mii = np.linalg.pinv(M1)\r\n for h in w5:\r\n p = k \r\n if (p >0 and (p <= self.dimension())):\r\n if (all(elem == 0 for elem in null[0])):\r\n l1 = 0\r\n else:\r\n he = self.basis_group_oriented_p_chains(p) \r\n on1 = np.ones(len(list(he.dic.keys())), dtype=np.float64) \r\n v = P_chains([],[])\r\n v = P_chains(list(he.dic.keys()),on1)\r\n v1 = permutation_in_simplex_test(v, make_permutation(h))\r\n D1={}\r\n c1 = 0\r\n for i in list(v1.dic.keys()):\r\n c2 = 1\r\n for j in list(he.dic.keys()):\r\n if (i == j):\r\n if (v1.dic[i] == he.dic[j]):\r\n D1[c1] = c2\r\n else:\r\n D1[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M.shape[0]\r\n cc = M.shape[1]\r\n Ma = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Ma[i,:] = (M[(abs(D1[i])-1),:]*(np.sign(D1[i])))\r\n l1 = 0\r\n for j in range(cc):\r\n l1 = np.dot(Mi[j,:],Ma[:,j])[0,0] + l1\r\n else:\r\n if (p == 0):\r\n he = self.basis_group_oriented_p_chains(p) \r\n on1 = np.ones(len(list(he.dic.keys())), dtype=np.float64) \r\n v = P_chains([],[])\r\n v = P_chains(list(he.dic.keys()),on1)\r\n v1 = permutation_in_simplex_test(v, make_permutation(h))\r\n D1={}\r\n c1 = 0\r\n for i in list(v1.dic.keys()):\r\n c2 = 1\r\n for j in list(he.dic.keys()):\r\n if (i == j):\r\n if (v1.dic[i] == he.dic[j]):\r\n D1[c1] = c2\r\n else:\r\n D1[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M.shape[0]\r\n cc = M.shape[1]\r\n Ma = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Ma[i,:] = (M[(abs(D1[i])-1),:]*(np.sign(D1[i])))\r\n l1 = 0\r\n for j in range(cc):\r\n l1 = np.dot(Mi[j,:],Ma[:,j])[0,0] + l1\r\n else:\r\n l1 = 0\r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n hi = self.basis_group_oriented_p_chains(p-1) \r\n on1i = np.ones(len(list(hi.dic.keys())), dtype=np.float64) \r\n vi = P_chains([],[])\r\n vi = P_chains(list(hi.dic.keys()),on1i)\r\n v1i = permutation_in_simplex_test(vi, make_permutation(h))\r\n D1i={}\r\n c1 = 0\r\n for i in list(v1i.dic.keys()):\r\n c2 = 1\r\n for j in list(hi.dic.keys()):\r\n if (i == j):\r\n if (v1i.dic[i] == hi.dic[j]):\r\n D1i[c1] = c2\r\n else:\r\n D1i[c1] = -c2\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n rr = M1.shape[0]\r\n cc = M1.shape[1]\r\n Mai = np.zeros([rr,cc],dtype=np.float64)\r\n for i in range(rr):\r\n Mai[i,:] = (M1[(abs(D1i[i])-1),:]*(np.sign(D1i[i])))\r\n l2 = 0\r\n for j in range(cc):\r\n l2 = np.dot(Mii[j,:],Mai[:,j])[0,0] + l2\r\n else:\r\n l2 = 0\r\n uu.append(l1-l2) \r\n vv.append(size_conjugacy_class(h,n))\r\n for i in range(M5.shape[0]):\r\n Ip = 0\r\n for j in range(M5.shape[1]):\r\n Ip = Ip + M5[i,j]*uu[j]*vv[j]\r\n Ip = Ip/card\r\n D[tuple(w5[i])] = abs(round(Ip))\r\n '''Note that I am using round, only because the results obtained are \r\n not esthetics'''\r\n vec_dic[k] = D\r\n return vec_dic", "def _relax_matrix(self, n=1):\n\n for i in range(n):\n self.level.mid.reshape(-1)[:] = self.R_w.dot(self.level.mid.reshape(-1)) \\\n + self.omega * self.level.rhs / self.D", "def HyperbolicSpace(n):\n if n == 2:\n return HyperbolicPlane()\n raise NotImplementedError(\"currently only implemented in dimension 2\")", "def get_A(n):\n # Create a matrix B\n Bdiag = -4 * np.eye(n)\n Bupper = np.diag([1] * (n - 1), 1)\n Blower = np.diag([1] * (n - 1), -1)\n B = Bdiag + Bupper + Blower\n\n # Creat a list [B,B,B,...,B] with n Bs\n blst = [B] * n\n\n # Unpack and rearrange list of Bs into diagonal of matrix A\n A = sp.linalg.block_diag(*blst)\n\n # Upper diagonal array offset by n: we've got (n-1) I blocks\n # each containing n ones\n Dupper = np.diag(np.ones(n * (n - 1)), n)\n\n # Lower diagonal array offset by -n\n Dlower = np.diag(np.ones(n * (n - 1)), -n)\n A += Dupper + Dlower\n return A", "def canonical_basis(dimensions, t):\n t = np.atleast_1d(t)\n B = np.asarray([readout(dimensions, r) for r in t])\n return B", "def orthonormal_1(dim_n=5):\n pb = []\n for i in range(0,dim_n-1):\n pb.append([1.0/(i+1)]*(i+1) + [-1] + [0]*(dim_n-i-2))\n m = matrix(RDF,pb)\n new_m = []\n for i in range(0,dim_n-1):\n new_m.append([RDF(100000*q/norm(m[i])).ceil()/100000 for q in m[i]])\n return matrix(QQ,new_m)", "def basis(T, dimensions, t):\n # Undo change of basis from realizer, and then transform into window\n B = canonical_basis(dimensions, t)\n return B.dot(T)", "def __init__(self, n):\n self.n = n\n self.w = np.zeros(self.n)\n self.z = np.zeros(self.n)\n self.F = 0\n self.M = 0", "def efft(a, n=None, axis=-1, norm=None):\n \n axes = np.arange(0, len(a.shape))\n \n if n == None:\n n = a.shape[axis] \n \n k = np.arange(0, n)\n \n m = np.arange(0, n) / n\n \n if axis == -1:\n A = np.matmul(a, np.cos(-2 * np.pi * np.outer(m, k)))\n else:\n A = np.matmul(a.transpose(), np.cos(-2 * np.pi * np.outer(m, k))).transpose()\n \n return A", "def _build_basis_slow(weight, n_bits):\n _warnings.warn((\"You're using a slow version of the basis-building code used by the disturbance calculations\"\n \" - compile pyGSTi's C extensions to make this go faster.\"))\n n_w = n_parameters_per_matrix(weight, n_bits)\n n_a = n_matrices_per_weight(weight, n_bits)\n dim = 2**n_bits\n\n my_basis = []\n my_constraints = []\n # All sets of qubits of given weight on n_bits\n pairs = list(_itertools.combinations(_np.arange(n_bits), weight))\n\n for ind in range(n_w * n_a):\n v = unit_vector(ind, n_w * n_a)\n vs = _np.reshape(v, (n_a, n_w))\n ctm = sum((swell_slow(transition_matrix(v, 2**weight), pair, n_bits)\n for v, pair in zip(vs, pairs))) - n_a * _np.eye(dim)\n my_basis += [ctm]\n my_constraints += [-_np.diag(ctm)]\n\n return my_basis, _np.array(my_constraints, dtype='int').T", "def ar_basis(max_m=20, *args, **kwargs):\n return np.eye(max_m)", "def get_A3(n):\n # Create a matrix B\n Bdiag = -60 * np.eye(n)\n Bupper1 = np.diag([16] * (n - 1), 1)\n Bupper2 = np.diag([-1] * (n - 2), 2)\n Blower1 = np.diag([16] * (n - 1), -1)\n Blower2 = np.diag([-1] * (n - 2), -2)\n B = Bdiag + Bupper1 + Blower1 + Bupper2 + Blower2\n\n # Creat a list [B,B,B,...,B] with n Bs\n blst = [B] * n\n\n # Unpack and rearrange list of Bs into diagonal of matrix A\n A = sp.linalg.block_diag(*blst)\n\n # Upper diagonal array offset by n: we've got (n-1) I blocks\n # each containing n ones\n Dupper1 = np.diag(16*np.ones(n * (n - 1)), n)\n Dupper2 = np.diag(-1*np.ones(n * (n - 2)), 2*n)\n\n # Lower diagonal array offset by -n\n Dlower1 = np.diag(16*np.ones(n * (n - 1)), -n)\n Dlower2 = np.diag(-1*np.ones(n * (n - 2)), -2*n)\n A += Dupper1 + Dlower1 + Dupper2 + Dlower2\n\n # Print the A matrix\n # print A.astype(int) \n return A", "def U_QFT(n):\n dim = 2**n # Hilbert space dimensionality\n Gate= [[np.exp(2 * np.pi * 1j * x * y / dim) for x in range(dim)] for y in range(dim)]\n Gate = np.array(Gate)/np.sqrt(dim)\n return Gate", "def iterated_diagonal(self, n=1):\n def sign(p):\n \"\"\"Counts the number of pairs appearing in reversed order.\n\n \"\"\"\n to_count = filter(lambda x: x[0] > x[1], combinations(p, 2))\n sign_exp = sum(1 for _ in to_count) % 2\n return (-1)**sign_exp\n\n def elementary_summand(fixed, i):\n \"\"\"Models as a function the element 0,...,0,2,1,...,1 appearing\n as one of the summands of the iterated diagonal of an interval.\n\n \"\"\"\n if i < fixed:\n return 0\n elif i == fixed:\n return 2\n else:\n return 1\n\n if self.degree is None:\n raise TypeError(f'only for homogeneous elements')\n\n if self.arity != 1:\n raise TypeError(f'only for arity 1 elements')\n\n answer = self.zero()\n for k, v in self.items():\n cube = k[0]\n intervals = cube.intervals\n base = [i for idx, i in enumerate(cube) if idx not in intervals]\n for p in product(range(n + 1), repeat=self.degree):\n multibase = [list(base) for _ in range(n + 1)]\n for idx, fixed in enumerate(p):\n at = intervals[idx]\n for i, new_base in enumerate(multibase):\n to_insert = elementary_summand(fixed, i)\n new_base.insert(at, to_insert)\n new_k = tuple(Cube(x) for x in multibase)\n answer += answer.create({new_k: v * sign(p)})\n return answer", "def decomposition_into_s_n_irreducibles_chain_sp(self, n):\r\n w5 = partitions_list(n)\r\n M5 = form_matrix_yt(w5)\r\n card = math.factorial(n)\r\n vec_dic = {}\r\n for k in range(self.dimension()+1):\r\n D = {}\r\n uu = []\r\n vv = []\r\n he = self.basis_group_oriented_p_chains(k) \r\n for h in w5:\r\n v1 = P_chains([],[])\r\n v1 = permutation_in_simplex_test(he, make_permutation(h))\r\n rr = len(list(he.dic.keys()))\r\n Ma = np.zeros([rr,rr],dtype=np.float64)\r\n c1 = 0\r\n for i in list(he.dic.keys()):\r\n c2 = 0\r\n for j in list(v1.dic.keys()):\r\n if (i == j):\r\n Ma[c1,c2] = v1.dic[i]\r\n c2 = c2 + 1\r\n c1 = c1 + 1\r\n Ma = np.matrix(Ma, dtype='float64')\r\n uu.append(np.trace(Ma)) \r\n vv.append(size_conjugacy_class(h,n))\r\n for i in range(M5.shape[0]):\r\n Ip = 0\r\n for j in range(M5.shape[1]):\r\n Ip = Ip + M5[i,j]*uu[j]*vv[j]\r\n Ip = Ip/card\r\n D[tuple(w5[i])]=Ip\r\n vec_dic[k] = D\r\n return vec_dic", "def gen_plan_params(self, n):\r\n n = self.gen_input_check(n)\r\n # generate samples of semi-major axis\r\n ar = self.arange.to(\"AU\").value\r\n # check if constrainOrbits == True for eccentricity\r\n if self.constrainOrbits:\r\n # restrict semi-major axis limits\r\n arcon = np.array(\r\n [ar[0] / (1.0 - self.erange[0]), ar[1] / (1.0 + self.erange[0])]\r\n )\r\n a = (\r\n np.exp(\r\n np.random.uniform(\r\n low=np.log(arcon[0]), high=np.log(arcon[1]), size=n\r\n )\r\n )\r\n * u.AU\r\n )\r\n tmpa = a.to(\"AU\").value\r\n # upper limit for eccentricity given sma\r\n elim = np.zeros(len(a))\r\n amean = np.mean(ar)\r\n elim[tmpa <= amean] = 1.0 - ar[0] / tmpa[tmpa <= amean]\r\n elim[tmpa > amean] = ar[1] / tmpa[tmpa > amean] - 1.0\r\n elim[elim > self.erange[1]] = self.erange[1]\r\n elim[elim < self.erange[0]] = self.erange[0]\r\n\r\n # uniform distribution\r\n e = np.random.uniform(low=self.erange[0], high=elim, size=n)\r\n else:\r\n a = (\r\n np.exp(np.random.uniform(low=np.log(ar[0]), high=np.log(ar[1]), size=n))\r\n * u.AU\r\n )\r\n e = np.random.uniform(low=self.erange[0], high=self.erange[1], size=n)\r\n\r\n # generate geometric albedo\r\n pr = self.prange\r\n p = np.random.uniform(low=pr[0], high=pr[1], size=n)\r\n # generate planetary radius\r\n Rpr = self.Rprange.to(\"earthRad\").value\r\n Rp = (\r\n np.exp(np.random.uniform(low=np.log(Rpr[0]), high=np.log(Rpr[1]), size=n))\r\n * u.earthRad\r\n )\r\n\r\n return a, e, p, Rp", "def __getBasis(self) -> 'matrix':\n return array(list(BernBasis(self.domain, self.n, idx) for idx in range(self.n + 1)))", "def build_basis(self):\n if self.debug:\n print('sps_basis: rebuilding basis')\n # Setup the internal component basis arrays\n inwave = self.ssp.wavelengths\n nbasis = len(np.atleast_1d(self.params['mass']))\n self.nbasis = nbasis\n # nbasis = ( len(np.atleast_1d(self.params['zmet'])) *\n # len(np.atleast_1d(self.params['tage'])) )\n self.basis_spec = np.zeros([nbasis, len(inwave)])\n self.basis_mass = np.zeros(nbasis)\n\n i = 0\n tesc = self.params['dust_tesc']\n dust1, dust2 = self.params['dust1'], self.params['dust2']\n for j, zmet in enumerate(self.params['zmet']):\n for k, tage in enumerate(self.params['tage']):\n # get the intrinsic spectrum at this metallicity and age\n if self.safe:\n # do it using compsp\n if self.ssp._zcontinuous > 0:\n self.ssp.params['logzsol'] = zmet\n else:\n self.ssp.params['zmet'] = zmet\n w, spec = self.ssp.get_spectrum(tage=tage, peraa=True)\n mass = self.ssp.stellar_mass\n else:\n # do it by hand. Faster but dangerous\n spec, mass, lbol = self.ssp.ztinterp(zmet, tage, peraa=True)\n self.basis_spec[i, :] = spec\n self.basis_mass[i] = mass\n i += 1\n self.basis_dirty = False", "def calcBasis2(xpts,basis_size,R):\n out = np.zeros((len(xpts),basis_size))\n for n in range(1,basis_size+1):\n out[:,n-1] = n*spherical_jn(0,n*np.pi*xpts/R)\n # Alturnatively\n #out[:,n-1] = (R/xpts)*np.sin(n*np.pi*xpts/R)\n return out", "def _build_basis_fast(weight, n_bits):\n n_w = n_parameters_per_matrix(weight, n_bits)\n n_a = n_matrices_per_weight(weight, n_bits)\n dim = 2**n_bits\n\n my_basis = []\n my_constraints = []\n # All sets of qubits of given weight on n_bits\n pairs = list(_itertools.combinations(_np.arange(n_bits), weight))\n\n for ind in range(n_w * n_a):\n v = unit_vector(ind, n_w * n_a)\n vs = _np.reshape(v, (n_a, n_w))\n ctm = sum((swell(transition_matrix(v, 2**weight), pair, n_bits)\n for v, pair in zip(vs, pairs)))\n ctm -= n_a * _np.eye(dim)\n my_basis += [ctm]\n my_constraints += [-_np.diag(ctm)]\n\n return my_basis, _np.array(my_constraints, dtype='int').T", "def matrix_chain_dynamic(dimensions, n):\n\n m = [[-1 for _ in range(n)] for _ in range(n)]\n s = [[0 for _ in range(n)] for _ in range(n)]\n\n # multiplying matrix by itself\n for i in range(1, n):\n m[i][i] = 0\n\n for length in range(2, n):\n for i in range(1, n - length + 1):\n j = i + length - 1\n for k in range(i, j):\n cost = m[i][k] + m[k + 1][j] + dimensions[i - 1] * dimensions[k] * dimensions[j]\n if cost > m[i][j]:\n m[i][j] = cost\n # index if splitting\n s[i][j] = k\n return m, s", "def relax(self,n=1):\n # print(\"putin\", self.level.rhs.reshape(-1)[:])\n # print(\"getout\", self.solver(self.level.rhs.reshape(-1)))\n for i in range(n):\n self.level.mid[:] += (self.solver(self.level.rhs.reshape(-1)) -\n self.solver(self.stencil.eval_convolve(\n self.level.evaluable_view(self.stencil)).reshape(-1))).reshape(self.level.mid.shape)", "def restriction(n, I, axis=0):\n k = len(I)\n times1d = lambda x: x[I]\n trans1d = lambda x: jnp.zeros((n,), dtype=x.dtype).at[I].set(x)\n\n def times(x):\n if x.ndim == 1:\n return times1d(x)\n if x.ndim == 2:\n if axis == 0:\n # we apply column wise\n return x[I, :]\n # we apply row-wise\n return x[:, I]\n # general case\n return jnp.apply_along_axis(times1d, axis, x)\n\n def trans(x):\n if x.ndim == 1:\n return trans1d(x)\n # general case\n return jnp.apply_along_axis(trans1d, axis, x)\n\n return Operator(times=times, trans=trans, shape=(k,n))" ]
[ "0.65958965", "0.6492095", "0.6303911", "0.6270258", "0.61579895", "0.5821242", "0.5714319", "0.5677949", "0.5546939", "0.5542931", "0.5516291", "0.54665136", "0.5442311", "0.5409004", "0.53772366", "0.5372782", "0.53276885", "0.5309369", "0.5297436", "0.52918196", "0.5289455", "0.5234776", "0.5224768", "0.5215785", "0.52056974", "0.5174654", "0.5166053", "0.5164073", "0.51520705", "0.5143955" ]
0.7017382
0
Function that takes a number of nodes and returns a dictionary corresponding to a complete directed graph with the specified number of nodes
def make_complete_graph(num_nodes): digraph = {} if num_nodes < 1: return digraph else: edges = set(range(num_nodes)) for node in range(num_nodes): digraph[node] = edges.difference(set([node])) return digraph
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_complete_graph(num_nodes):\n\tif num_nodes <= 0:\n\t\treturn {}\n\tdict_graph = {}\n\tfor node in range(num_nodes):\n\t\tnode_set = set()\n\t\tfor neighbor in range(num_nodes):\n\t\t\tif node != neighbor:\n\t\t\t\tnode_set.add(neighbor)\n\t\tdict_graph[node] = node_set\n\n\treturn dict_graph", "def make_complete_graph(num_nodes):\r\n if num_nodes < 1:\r\n return dict()\r\n else:\r\n new_dict = dict()\r\n for node in range(num_nodes):\r\n other_nodes = range(num_nodes)\r\n other_nodes.pop(node)\r\n new_dict[node]=set(other_nodes)\r\n return new_dict", "def make_complete_graph(num_nodes):\n\tif num_nodes <= 0:\n\t\treturn {}\n\telse:\n\t\tdict_graph = {}\n\t\tfor node in range(num_nodes):\n\t\t\tnode_set = set()\n\t\t\tfor neighbor in range(num_nodes):\n\t\t\t\tif node != neighbor:\n\t\t\t\t\tnode_set.add(neighbor)\n\t\t\tdict_graph[node] = node_set\n\n\treturn dict_graph", "def make_complete_graph(num_nodes):\r\n if num_nodes <= 0:\r\n return dict()\r\n else:\r\n all_nodes_list = [node for node in range(num_nodes)]\r\n tmp_graph = dict()\r\n for node in range(num_nodes):\r\n adjacent_nodes_list = all_nodes_list[:]\r\n adjacent_nodes_list.remove(node)\r\n tmp_graph.update({node: set(adjacent_nodes_list)})\r\n return tmp_graph", "def make_complete_graph(num_nodes):\n complete_digraph = {}\n if num_nodes > 0 and type(num_nodes) == int:\n neighbors = set([idx for idx in range(num_nodes)])\n for idx in range(num_nodes):\n complete_digraph[idx] = neighbors.copy() #creates adjacency set\n complete_digraph[idx].remove(idx) # pop out self-loop \n return complete_digraph", "def make_complete_graph (num_nodes) :\n graph = dict ()\n if (num_nodes < 1) :\n return graph\n\n for node_ind in range (num_nodes) :\n # create a set containing nodes adjacent to node node_ind\n # node node_ind of the complete graph will have edges to all other nodes except itself\n adj_nodes = range (num_nodes) # list containing numbers from 0 - num_nodes-1\n adj_nodes.remove(node_ind)\n graph[node_ind] = set(adj_nodes)\n\n return graph", "def make_complete_graph(num_nodes):\n #initialize empty graph\n complete_graph = {}\n #consider each vertex\n for vertex in range(num_nodes):\n #add vertex with list of neighbours\n complete_graph[vertex] = list(set([j for j in range(num_nodes) if j != vertex]))\n return complete_graph", "def make_complete_graph(num_nodes):\n # initialize empty graph\n complete_graph = {}\n # consider each vertex\n for vertex in range(num_nodes):\n # add vertex with list of neighbours\n complete_graph[vertex] = set([j for j in range(num_nodes) if j != vertex])\n return complete_graph", "def make_complete_graph(num_nodes):\r\n result = {}\r\n for idx in range(0,num_nodes):\r\n result[idx] = set([])\r\n for jdx in range(0,num_nodes):\r\n if (idx!=jdx):\r\n result[idx].add(jdx)\r\n return result", "def make_complete_graph(num_nodes):\n graph = {}\n if num_nodes > 0:\n for dummy_i in range(num_nodes):\n edges = []\n for dummy_j in range(num_nodes):\n if dummy_i != dummy_j:\n edges.append(dummy_j)\n graph[dummy_i] = set(edges)\n \n return graph", "def make_complete_graph(num_nodes):\n xgraph = {} #Create a Blank Dict\n if num_nodes - 1 < 0: # checks to see if the num_nodes is less then 0 (negative number) if it is return empty graph (dict). Could probably combine the If statments for negative nodes and 1 node together\n return xgraph\n if num_nodes - 1 == 0: # If the number of nodes is 1 or returns a one node dict because there are no edges to compute\n xgraph[0] = set([]) # creates a dict that represents a single node graph as per the requirement\n return xgraph # the empty Graph\n else:\n for base_node in range(num_nodes): # This portion starts the build phase. for each node it will compute the theretical maximum amount of edges\n xlist = set([]) # defines an empty list. We first build a list for each node and the append to a dict. This list is erased with each iteration\n #print base_node # testing - REMOVE\n for edge_node in range(num_nodes):\n #print edge_node # testing - REMOVE\n if edge_node != base_node: #No Looping is allowed for this project. Therefor we check to insure the we are not counting a self node connection (edge_node NOT equal base_node)\n xlist.add(edge_node) # Populating list that will be added to dict\n\n xgraph[base_node] = xlist # Appending created list to the dict\n\n return xgraph # returning populated dict", "def make_complete_graph(num_nodes):\n xgraph = {} #Create a Blank Dict\n if num_nodes - 1 < 0: # checks to see if the num_nodes is less then 0 (negative number) if it is return empty graph (dict). Could probably combine the If statments for negative nodes and 1 node together\n return xgraph\n if num_nodes - 1 == 0: # If the number of nodes is 1 or returns a one node dict because there are no edges to compute\n xgraph[0] = set([]) # creates a dict that represents a single node graph as per the requirement\n return xgraph # the empty Graph\n else:\n for base_node in range(num_nodes): # This portion starts the build phase. for each node it will compute the theretical maximum amount of edges\n xlist = set([]) # defines an empty list. We first build a list for each node and the append to a dict. This list is erased with each iteration\n #print base_node # testing - REMOVE\n for edge_node in range(num_nodes):\n #print edge_node # testing - REMOVE\n if edge_node != base_node: #No Looping is allowed for this project. Therefor we check to insure the we are not counting a self node connection (edge_node NOT equal base_node)\n xlist.add(edge_node) # Populating list that will be added to dict\n\n xgraph[base_node] = xlist # Appending created list to the dict\n\n return xgraph # returning populated dict", "def make_complete_graph(num_nodes):\n graph = {}\n for dummy_node in range(num_nodes):\n graph[dummy_node] = set([dummy_x for dummy_x in range(num_nodes)])\n graph[dummy_node].remove(dummy_node)\n return graph", "def make_complete_graph(num_nodes):\n graph = {}\n for dummy_node in range(num_nodes):\n graph[dummy_node] = set([dummy_x for dummy_x in range(num_nodes)])\n graph[dummy_node].remove(dummy_node)\n return graph", "def get_graph_dictionary(self):\n nodes = {}\n n = 0\n for node in self.__nodes:\n nodes[n] = tuple(node.get_data())\n n += 1\n\n edges = set()\n for edge in self.__edges:\n new_edge = (edge.get_node_a().get_id(), edge.get_node_b().get_id())\n edges.add(new_edge)\n\n graph_dict = {}\n graph_dict[\"nodes\"] = nodes\n graph_dict[\"edges\"] = edges\n\n return graph_dict", "def generate_network_graph(network):\n num_nodes = len(network)\n network_graph = {}\n\n for y_node in range(num_nodes):\n neighbors = {}\n for x_node in range(num_nodes):\n if network[y_node][x_node] is not 0:\n neighbors[x_node] = network[y_node][x_node]\n network_graph[y_node] = neighbors\n\n return network_graph", "def construct_null_graph(num_nodes):\n # return the graph represented using dictionary format\n return dict({node: dict({}) for node in range(num_nodes)})", "def make_er_graph(num_nodes, probability):\n graph = {}\n for node in range(0, num_nodes):\n graph[node] = set([])\n\n for node in range(0, num_nodes - 1):\n for potential_neighbor in range(node + 1, num_nodes):\n if random.random() < probability:\n graph[node].add(potential_neighbor)\n graph[potential_neighbor].add(node)\n\n return graph", "def graph():\n\n graph = {'A': ['B', 'C'],\n 'B': ['C', 'D'],\n 'C': ['D'],\n 'D': ['C'],\n 'E': ['F'],\n 'F': ['C']}\n\n def generate_edges(graph):\n \"\"\" Convert the dict representation of a graph into a list one\n - https://www.geeksforgeeks.org/generate-graph-using-dictionary-python/\n \"\"\"\n edges = []\n\n # for each node in graph\n for node in graph:\n\n # for each neighbour node of a single node\n for neighbour in graph[node]:\n # if edge exists then append\n edges.append((node, neighbour))\n return edges\n\n a = generate_edges(graph=graph)\n print(a)", "def make_synthetic_undirected_graph(num_nodes, num_exist):\n graph = {}\n edges = 0\n graph = make_complete_graph(num_exist) #creating a complete directed graph on m nodes\n dpa_graph = UPATrial(num_exist)\n for dummy_node in range(num_exist, num_nodes):\n node_neighbors = dpa_graph.run_trial(num_exist)\n graph[dummy_node] = set(node_neighbors)\n for dummy_node_pair in node_neighbors:\n graph[dummy_node_pair] = graph.get(dummy_node_pair,set([]))\n graph[dummy_node_pair].add(dummy_node)\n edges += len(graph[dummy_node])\n\n print \"number of edges are \", edges/2\n return graph", "def create_graph_network(start_node, connections):\n graph = nx.Graph()\n graph.add_node(start_node)\n print(connections.index)\n graph.add_nodes_from(connections.index)\n edge_list = list(zip(itertools.repeat(start_node), connections.index))\n print(\"edge list is \", edge_list)\n graph.add_edges_from(edge_list)\n for i in graph.edges():\n graph[i[0]][i[1]]['weight'] = connections.loc[i[1]]['count']\n # graph[i[0]][i[1]]['proposal_number'] = connections.loc[i[1]]['proposal_number']\n # graph[i[0]][i[1]]['institution'] = connections.loc[i[1]]['institution']\n # graph[i[0]][i[1]]['proposal_title'] = connections.loc[i[1]]['proposal_title']\n # graph[i[0]][i[1]]['project_status'] = connections.loc[i[1]]['project_status']\n\n # Adding random position data to the graph.\n # pos = nx.spring_layout(graph, k=1)\n pos = nx.circular_layout(graph)\n nx.set_node_attributes(graph, 'pos', pos)\n return graph", "def er(num_nodes,p) :\n complete_graph = {}\n if (num_nodes <= 0):\n return complete_graph\n for node_index in range(num_nodes) :\n # Loop through all possible nodes and edges, adding edges to set\n complete_graph[node_index] = set([])\n for edge_index in range(num_nodes) :\n if (node_index != edge_index) :\n val = random.random()\n if (val < p):\n complete_graph[node_index].add(edge_index)\n return complete_graph", "def make_random_graph(num_nodes, prob):\n #initialize empty graph\n random_graph = {}\n #consider each vertex\n for i in range(num_nodes):\n random_graph[i] = []\n\n for vertex in range(num_nodes):\n for neighbour in range(vertex+1, num_nodes):\n random_number = random.random()\n if random_number < prob:\n random_graph[vertex] += [neighbour]\n random_graph[neighbour] += [vertex] \n #add vertex with list of out_ neighbours\n\n return random_graph", "def _build_nodes_dict(self, graph):\n nodes_dict = {}\n for node, data in graph.nodes_iter(data=True):\n nodes_dict.update({node: data['label']})\n return nodes_dict", "def build_graph():\n file = open(\"../data/data.json\", \"r\")\n data = json.load(file)\n node_dict = {}\n for id in data:\n node_dict[id] = Node(data[id][\"name\"], data[id][\"product\"], data[id][\"production_volume\"])\n for id in data:\n current_node = node_dict[id]\n for costumer_id in data[id][\"costumers\"]:\n current_node.costumers.append(node_dict[str(costumer_id)])\n current_node.out_edge_capacity_drop[node_dict[str(costumer_id)].name] = 0\n for supplier_id in data[id][\"suppliers\"]:\n current_node.suppliers.append(node_dict[str(supplier_id)])\n current_node.in_edge_capacity_drop[node_dict[str(supplier_id)].name] = 0\n return node_dict", "def __node_rep(self):\n node_list_dict = {}\n for (i, beam) in enumerate(self.beams):\n if str(beam['n1']) not in node_list_dict.keys():\n node_list_dict[str(beam['n1'])] = 1\n else:\n node_list_dict[str(beam['n1'])] += 1\n if str(beam['n2']) not in node_list_dict.keys():\n node_list_dict[str(beam['n2'])] = 1\n else:\n node_list_dict[str(beam['n2'])] += 1\n return node_list_dict", "def linkweights(self, nnodes):\n link_graph = zeros(nnodes)\n for node_index, weight in self.weights.items():\n link_graph[node_index] = weight\n return link_graph", "def get_node_slices(num_nodes: Dict[str, int]) -> Dict[str, Tuple[int, int]]:\n node_slices: Dict[NodeType, Tuple[int, int]] = {}\n cumsum = 0\n for node_type, N in num_nodes.items():\n node_slices[node_type] = (cumsum, cumsum + N)\n cumsum += N\n return node_slices", "def build_node_graph(self):\n G = pgv.AGraph(strict=False, directed=True)\n temp_dict = defaultdict(int) #key - from_to_ip, val - counter\n\n for i, ip in enumerate(self.node_graph_dict.keys()):\n G.add_node(ip, shape='rect', label='%d' % i)\n logger.info(\"All nodes added\")\n\n for ip, droplist in self.node_graph_dict.iteritems():\n for gnid, dropids in droplist:\n for did in dropids:\n tip = self.gnid_ip_dict[self.oid_gnid_dict[did]]\n k = '{0}_{1}'.format(ip, tip)\n temp_dict[k] += 1\n\n for k, v in temp_dict.iteritems():\n ks = k.split('_')\n G.add_edge(ks[0], ks[1], weight=v)\n\n return G", "def get_subgraphs(nodes):\n subs = collections.defaultdict(dict)\n for name, node in nodes.items():\n subs[node['sg']][name] = node\n return subs" ]
[ "0.8063795", "0.80589145", "0.80342263", "0.7942149", "0.78811663", "0.7867539", "0.7648591", "0.7568491", "0.75270027", "0.747973", "0.7331694", "0.7331694", "0.72387207", "0.72387207", "0.6977525", "0.69037884", "0.68886876", "0.6683224", "0.6630526", "0.6614204", "0.6563346", "0.65266347", "0.6507746", "0.64941263", "0.6493423", "0.64800227", "0.64558315", "0.6454442", "0.64536875", "0.6437285" ]
0.8065373
0
return json User objects
def user_ret(): user_list = [] all_objs = storage.all("User") for obj in all_objs.values(): user_list.append(obj.to_dict()) return jsonify(user_list)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def userJSON():\n user = session.query(User).all()\n result = []\n\n for i in user:\n result += [i.serialize]\n\n return jsonify(Users=result)", "def json(self):\n result = {}\n for user in self.users:\n result[user.user_id] = user.json\n return result", "def get_users():\n return jsonify([\n users.to_dict()\n for users in models.storage.all('User').values()\n ])", "def get_user():\n users = User.query.all()\n result = usersSchema.dump(users)\n return jsonify(result)", "def get_all_user():\n user = UserModel.objects()\n return jsonify(user), 200", "def get_users():\n selection = []\n try:\n selection = [{'id':usr.id, 'username':usr.username, 'email':usr.email} \n for usr in User.query.all()]\n except:\n selection = {'error':True}\n return json.dumps(selection)", "def list_users():\n return jsonify(user=\"joe\")", "def get(self):\n users = User.query.all()\n usersJSON = []\n for u in users:\n usersJSON.append({'id': u.id, 'admin': u.admin})\n return {'users': usersJSON}", "def get_users():\n users = storage.all('User')\n users_list = []\n for user in users.values():\n users_list.append(user.to_dict())\n return jsonify(users_list), 200", "def get_users():\n users = models.User.query.all()\n friends_json = []\n for u in users:\n user = {\n 'id': u.id,\n 'name': u.name,\n 'email': u.email,\n 'regID': u.regid,\n 'photo': u.photo\n }\n friends_json.append(user)\n return jsonify({'users': friends_json}), 200", "def get(self):\n users = User.query.all()\n usersJSON = []\n for u in users:\n usersJSON.append({'id':u.id, 'admin':u.admin})\n return { 'users' : usersJSON }", "def get_users():\n users = User.query # no need to order\n users_data = [user.to_dict() for user in users.all()]\n return jsonify(users=users_data)", "def all_Users():\n new_dict = []\n for usr in storage.all('User').values():\n new_dict.append(usr.to_dict())\n return jsonify(new_dict)", "def view_users(self):\n con = dbcon()\n cur = con.cursor()\n cur.execute(\"SELECT * FROM my_users\")\n res = cur.fetchall()\n user_list=[]\n for user in res:\n user_det = {\n 'user_id':user[0],\n 'username':user[1],\n 'password':user[2],\n 'confirmpass':user[3],\n 'addres':user[4],\n 'role':user[5]\n }\n user_list.append(user_det)\n return jsonify({'Users': user_list}), 200", "def json(self):\n return {\n 'id': self.id,\n 'email': self.email,\n 'username': self.username\n }", "def get_users():\n users = User.query.all()\n users_schema = UserSchema()\n result = users_schema.dump(users, many=True)\n return jsonify({'users': result.data})", "def users():\n if request.method == 'POST':\n json = request.get_json()\n if json is None:\n return abort(make_response('Not a JSON', 400))\n if 'email' not in json:\n return abort(make_response('Missing email', 400))\n if 'password' not in json:\n return abort(make_response('Missing password', 400))\n user = User(**json)\n user.save()\n return jsonify(user.to_dict()), 201\n\n return jsonify([user.to_dict() for user in storage.all('User').values()])", "def get(self, user_id):\n return jsonify(User.objects(user_id__exact=user_id))", "def get_json(self):\n url = 'http://lkd.to/api/' + self.user\n response = requests.get(url)\n return response.json()", "def get_users(request):\n\n users_list = User.objects.all().values(\n 'id', 'username', 'first_name', 'last_name'\n )\n\n return HttpResponse(json.dumps(\n {'users': list(users_list)}\n ))", "def display_users():\n users = storage.all(\"User\").values()\n users_list = []\n for obj in users:\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def get_all_users():\n users = []\n for mv in storage.all(\"User\").values():\n users.append(mv.to_dict())\n return jsonify(users)", "def show_users():\n users_list = []\n all_users = storage.all('User')\n for obj in all_users.values():\n users_list.append(obj.to_dict())\n return jsonify(users_list)", "def user_list():\n users = User.objects.all()\n return {\"users\": users}", "def users_no_id_get():\n all_users = []\n for user in storage.all(\"User\").values():\n all_users.append(user.to_dict())\n return jsonify(all_users)", "def get_users():\n users = User.query.order_by(User.id).all()\n users = {user.id: user.username for user in users}\n\n response = jsonify({\"success\": True, \"users\": users})\n\n return response", "def list_users():\n return json_response(\n status=200,\n response_data={\n \"success\": True,\n \"data\": {\n \"users\": [user.serialize() for user in User.all()]\n }\n }\n )", "def to_json(self):\n json_user = {'url': url_for('api.user_detail', id=self.id)}\n return json_user", "def all_users(request):\r\n user = User()\r\n return HttpResponse(json.dumps(user.parseFile()))", "def show_users(self):\n\n u = User(self.settings)\n users_list = u.find_users()\n\n # transform the results in a \"jsonifiable\"-form\n json_results = []\n for user in users_list:\n json_results.append(user.to_json())\n\n # return\n return json_results" ]
[ "0.82542247", "0.8115099", "0.7857184", "0.78480744", "0.7821726", "0.77648944", "0.7681941", "0.7616825", "0.7609725", "0.76026094", "0.760245", "0.7591317", "0.75394744", "0.75233406", "0.74818206", "0.7444616", "0.74334586", "0.7428471", "0.7423055", "0.7418192", "0.7416906", "0.7386716", "0.7334782", "0.7322796", "0.7315593", "0.73127437", "0.72658587", "0.72415674", "0.7240207", "0.72287124" ]
0.81571275
1
Creates a grade dict from an assignment form.
def form_to_grade_assignment(form): grade_id = "{student}-{assignment}-{course}".format(**form) grade = { "_id": grade_id, "student": form["student"], "assignment": form["assignment"], "course": form["course"], } if form["filename"]: grade["filename"] = form["filename"] scores = { int(k[5:]): float(v) for k, v in form.items() if k.startswith("score") } scores = sorted(scores.items()) grade["scores"] = [v for _, v in scores] return grade
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def form_to_grade_row(form):\n course = form[\"course\"]\n student = form[\"student\"]\n assignment, _, _ = form[\"assignment\"].partition(\"[\")\n rowdata = json.loads(form[\"rowdata\"])\n grade = {\"student\": student, \"assignment\": assignment, \"course\": course}\n grade[\"_id\"] = \"{student}-{assignment}-{course}\".format(**grade)\n scores = {}\n for k, v in rowdata.items():\n if not k.startswith(assignment):\n continue\n _, _, n = k.partition(\"[\")\n n, _, _ = n.partition(\"]\")\n scores[int(n)] = v\n scores = sorted(scores.items())\n grade[\"scores\"] = [v for _, v in scores]\n return grade", "def what_is_the_grade(self):\n\t\treturn_dict = {\n\t\t\t'section_title': self.title, \n\t\t\t'section_weight': self.weight,\n\t\t\t'grade_value' : self.current_grade_value,\n\t\t\t'comment_text' : self.current_comment_text,\n\t\t\t'default_comments_text' : self.current_default_comment_text,\n\t\t\t'example_comments_text' : self.current_example_comment_text,\n\t\t\t'is_complete': self.is_complete\n\t\t}\n\n\t\treturn return_dict", "def parseAssignments(assignments):\n\treturn dict([(lead, trail) for lead, trail in\n\t\t[litPair.split(\":\") for litPair in assignments.split()]])", "def make_gradebook(roster, grades, sub_info):\n gradebook = []\n for student in roster.keys():\n s = {}\n # fill student file with evaluation grades\n for day, score in zip(sub_info.keys(), grades):\n s[str(day)] = score[student]\n s['total'] = sum(s.values())\n s['username'] = student\n gradebook.append(s)\n return gradebook", "def get_grade_entries(user, assignments_map, students_map):\n grade_entries = GradeEntry.query(ancestor=get_parent_key(user)).fetch()\n for grade_entry in grade_entries:\n grade_entry.assignment = assignments_map[grade_entry.assignment_key]\n grade_entry.student = students_map[grade_entry.student_key]\n return grade_entries", "def grade_to_gpa(grade):\n\n letter_grade = \"\"\n gpa = 0.0\n\n if type(grade) is str:\n accepted_values = [\"A+\", \"A\", \"A-\", \"B+\", \"B\", \"B-\", \"FZ\"]\n\n # check that the grade is one of the accepted values\n if grade in accepted_values:\n\n # assign grade to letter_grade\n letter_grade = grade\n\n #If grade input is a string, but not an accepted value, raise a ValueError\n else:\n raise ValueError(\"Incorrect value. Grade must be an accepted letter grade.\")\n\n elif type(grade) is int:\n\n # check that grade is in the accepted range 0 to 100\n if 0 <= grade <= 100:\n\n # convert the numeric grade to a letter grade\n mark_to_letter = grade\n\n # assign the value to letter_grade\n # hint: letter_grade = mark_to_letter(grade)\n if mark_to_letter >= 90:\n letter_grade = \"A+\"\n elif mark_to_letter >= 85:\n letter_grade = \"A\"\n elif mark_to_letter >= 80:\n letter_grade = \"A-\"\n elif mark_to_letter >= 77:\n letter_grade = \"B+\"\n elif mark_to_letter >= 73:\n letter_grade = \"B\"\n elif mark_to_letter >= 70:\n letter_grade = \"B-\"\n else:\n letter_grade = \"FZ\"\n\n #If grade input is not in accepted range, raise ValueError\n else:\n raise ValueError(\"Incorrect value. Grade must be in the accepted range of 0 to 100.\")\n else:\n # raise a TypeError exception\n raise TypeError(\"Invalid type passed as parameter\")\n\n # write a long if-statement to convert letter_grade\n # assign the value to gpa\n if letter_grade == \"A+\":\n gpa = 4.0\n if letter_grade == \"A\":\n gpa = 4.0\n if letter_grade == \"A-\":\n gpa = 3.7\n if letter_grade == \"B+\":\n gpa = 3.3\n if letter_grade == \"B\":\n gpa = 3.0\n if letter_grade == \"B-\":\n gpa = 2.7\n if letter_grade == \"FZ\":\n gpa = 0.0\n\n #Return the gpa of the grade\n return gpa", "def add_an_assignment(cls):\n os.system('clear')\n while True:\n data = Ui.get_inputs(['Start date\\n\\tday(1-31): ', '\\tmonth(1-12): ', '\\tyear(2000+): ',\n 'End date\\n\\tday(1-31): ', '\\tmonth(1-12): ', '\\tyear(2000+): ',\n 'Assignment name\\n\\t'], \"Please provide the assignment details: \\n\")\n try:\n start_date_day = int(data[0])\n start_date_month = int(data[1])\n start_date_year = int(data[2])\n end_date_day = int(data[3])\n end_date_month = int(data[4])\n end_date_year = int(data[5])\n name_of_assign = str(data[6])\n except ValueError:\n Ui.print_message(\"\\nDate must be an integer!\\n\\n\")\n break\n\n if start_date_day > 31 or start_date_day < 1:\n Ui.print_message('\\nStart day value is incorrect')\n else:\n if start_date_month > 12 or start_date_month < 1:\n Ui.print_message('\\nStart month value is incorrect')\n else:\n if start_date_year > 9999 or start_date_year < 2000:\n Ui.print_message('\\nStart year value is incorrect')\n else:\n if end_date_day > 31 or end_date_day < 1:\n Ui.print_message('\\nEnd day value is incorrect')\n else:\n if end_date_month > 12 or end_date_month < 1:\n Ui.print_message('\\nEnd month value is incorrect')\n else:\n if end_date_year > 9999 or end_date_year < 1000:\n Ui.print_message('\\nEnd year value is incorrect')\n else:\n if len(name_of_assign) <= 1:\n Ui.print_message(\"\\nAssignment name have to be longer!\")\n else:\n list_of_names_of_assignments = []\n for i in Assignments.assignments_list:\n list_of_names_of_assignments.append(i.assignment_name)\n if name_of_assign in list_of_names_of_assignments:\n Ui.print_message(\"\\nAssignment name already exist, \"\n \"type another one!\")\n else:\n start_date = '{}-{}-{}'.format(start_date_year,\n start_date_month,\n start_date_day)\n end_date = '{}-{}-{}'.format(end_date_year,\n end_date_month,\n end_date_day)\n new_assignment = cls(start_date, end_date, name_of_assign)\n Assignments.assignments_list.append(new_assignment)\n Ui.print_message(\"\\nAssignment added!\\n\")\n Ui.get_inputs([''], \"Click enter to go back\")\n break # it stops the WHILE loop whenever passed information is incorrect, or assignment has been added", "def bases(layout, mvClass=MultiVector, grades=None):\n\n dict = {}\n for i in range(layout.gaDims):\n grade = layout.gradeList[i]\n if grade != 0:\n if grades is not None and grade not in grades:\n continue\n v = np.zeros((layout.gaDims,), dtype=int)\n v[i] = 1\n dict[layout.names[i]] = mvClass(layout, v)\n return dict", "def dict_form1(fac_data):\n #sort through last names\n #count number last names? maybe not\n #create key for each last name\n #if key exists, add list of values [degree, title -'of biostatistics', email]\n \n form_dict = {}\n\n for i in fac_data:\n #get name\n split_name = i['name'].split(\" \")\n last_name = split_name[len(split_name)-1]\n \n #build array of degree/title/email\n fixed_title = i[' title'].strip(\" of Biostatistics\")\n \n info = []\n info.append(i[' degree'])\n info.append(fixed_title)\n info.append(i[' email'])\n \n #add to dictionary\n if last_name in form_dict:\n form_dict[last_name].append([info])\n else:\n form_dict[last_name] = info\n\n return form_dict", "def updateGPA(info):\n grades = []\n\n n = 5 # you can change this depends on how many your subjects are\n x = 1\n print(\"Please enter\", n, \"grades: \")\n\n \n for i in range(0, n): #for every grade that's being inputted in goes into the grades list that contains dictionaries\n print(x, \":\")\n x += 1\n grade = int(input())\n grades.append(grade)\n \n grade = calculateGPA(grades)\n\n for letter, numGrade in grading_sys.items():# this is what turns the average grade to its letter grade equivalent\n if numGrade <= grade:\n info[\"GPA\"] = letter\n break\n return info", "def gen_dict():\n lines = [line for line in csv.reader(open(__ppath__ + \"/data/occupations.csv\"))] # uses a csv.reader to parse the file, converts the generic iterable to a list\n lines = [(line[0],float(line[1])) for line in lines[1:-2]]# removes the column names and \"Total\" row, re-expresses as a list of tuples to enable dictionary conversion\n lines.append((\"Unemployed\",0.2)) # accounts for missing 0.2% of jobs\n return dict(lines) # converts to dictionary", "def dict_form2(fac_data):\n #sort through last names\n #create key for each last name\n #if key exists, add list of values [degree, title -'of biostatistics', email]\n \n form_dict = {}\n\n for i in fac_data:\n #get name\n split_name = i['name'].split(\" \")\n last_name = split_name[len(split_name)-1]\n first_name = split_name[0]\n key = (last_name, first_name)\n \n #build array of degree/title/email\n fixed_title = i[' title'].strip(\" of Biostatistics\")\n \n info = []\n info.append(i[' degree'])\n info.append(fixed_title)\n info.append(i[' email'])\n \n #add to dictionary\n if key in form_dict:\n form_dict[key].append([info])\n else:\n form_dict[key] = info\n\n return form_dict", "def _txt_to_basis_dict(basis_txt):\n\n symbol = basis_txt[0].split()[0]\n\n def is_number(s):\n try:\n float(s)\n return True\n except ValueError:\n return False\n\n basis_pure = basis_txt[1:]\n\n section_marks = []\n for i, line in enumerate(basis_pure):\n if not is_number(line.split()[0]):\n section_marks.append(i)\n\n shells = []\n for i in section_marks[:-1]:\n type, n_func, _ = basis_pure[i].split()\n n_func = int(n_func)\n\n if type.upper() in ['SP']:\n p_exponent, con_coefficients, p_con_coefficients = np.array([line.split()\n for line in basis_pure[i + 1:i + n_func + 1]],\n dtype=float).T\n else:\n p_exponent, con_coefficients = np.array([line.split()\n for line in basis_pure[i + 1:i + n_func + 1]],\n dtype=float).T\n p_con_coefficients = np.zeros_like(p_exponent)\n\n\n shells.append({'shell_type': type,\n 'p_exponents': list(p_exponent),\n 'con_coefficients': list(con_coefficients),\n 'p_con_coefficients': list(p_con_coefficients)})\n\n return {'symbol': symbol,\n 'shells': shells}", "def create(self, validated_data):\n return Assignment.objects.create(**validated_data)", "def input_assignment(in_dict):\n\n # define initialization & assignment strings\n init_str= \"\"\n assign_str= \"\"\n\n # loop through elements\n for key,value in in_dict.items():\n # Check if type is a boolean\n if isinstance(in_dict[key][0], str):\n # Initialization\n init_str= init_str + \"init({0})\".format(key) + \":= {TRUE, FALSE};\\n\"\n\n # Assignment\n assign_str= assign_str + \\\n 'next({0}):= case\\n'.format(key) + \\\n ' stab: {TRUE, FALSE};\\n' +\\\n ' TRUE: {0};\\n'.format(key) + \\\n 'esac;\\n'\n \n # if type is not a boolean\n else:\n\n # Initialization\n init_val= in_dict[key][0][1]\n # Check if initial value is a string and is not n enum type\n if (isinstance(init_val, str) and not (\"{\" in init_val)):\n init_val= '\"' + init_val + '\"'\n\n init_str= init_str + \"init({0})\".format(key) + \":= {0};\\n\".format(init_val)\n\n # Assignment\n assign_str= assign_str + \\\n 'next({0}):= case\\n'.format(key) + \\\n ' stab: {0};\\n'.format(in_dict[key][0][2]) +\\\n ' TRUE: {0};\\n'.format(key) + \\\n 'esac;\\n'\n \n # return\n out_str= init_str + assign_str\n \n return out_str", "def _gradesets_and_errors_for(self, course_id, students):\r\n students_to_gradesets = {}\r\n students_to_errors = {}\r\n\r\n for student, gradeset, err_msg in iterate_grades_for(course_id, students):\r\n students_to_gradesets[student] = gradeset\r\n if err_msg:\r\n students_to_errors[student] = err_msg\r\n\r\n return students_to_gradesets, students_to_errors", "def letter_grades(adict):\n\n for key in adict:\n\t if adict[key] >= 90:\n\t\t adict[key] = 'A'\n\t elif 80 <= adict[key] < 90:\n\t\t adict[key] = 'B'\n\t elif 70 <= adict[key] < 80:\n\t\t adict[key] = 'C'\n\t elif 60 <= adict[key] < 70:\n\t\t adict[key] = 'D'\n\t else:\n\t\t adict[key] = 'F'", "def assignment(self):\n return {}", "def assignment_grade(id, session_id, course_id):\n\n user_id = session.get('user_id')\n\n con = db.get_db()\n cur = con.cursor()\n cur.execute(\"\"\"SELECT DISTINCT(ROUND(grades.points_received / grades.total_points, 2) * 100) as assignment_grade,\n grades.total_points as total, grades.points_received as earned,\n grades.submission as submission, grades.feedback as feedback,\n grades.student_id, grades.assignment_id as assign_id, assignments.name as assign_name,\n assignments.description as description,\n grades.grade_id, roster.session_id as class_session, courses.name as name\n\t FROM courses JOIN sessions on courses.course_id=sessions.id\n\t JOIN assignments on assignments.session_id=sessions.id\n JOIN grades on grades.assignment_id=assignments.assignment_id\n JOIN roster on roster.session_id=sessions.id\n WHERE grades.assignment_id= %s\n AND grades.student_id= %s\"\"\",\n (id, user_id))\n\n grade = cur.fetchone()\n cur.close()\n con.close()\n\n return render_template(\"/layouts/gradebook/assignment_grade.html\", course_id=course_id, session_id=session_id, id=id, grade=grade)", "def grade_conversion(grade):\n grade_converter = {\"A\": 4.00, \"A-\":3.67, \"B+\": 3.33, \"B\": 3.00, \"B-\": 2.67, \"C+\": 2.33, \"C\": 2.00, \"C-\": 1.67, \"D\": 1.00, \"F\": 0.0}\n while True:\n for val, val2 in grade_converter.items():\n if grade == val:\n return val2", "def buildApprovalToDict(self, uID, approval, firstname, lastname):\n result = {}\n result['uID'] = uID\n result['approval'] = approval\n result['firstname'] = firstname\n result['lastname'] = lastname\n return result", "def grade_submissions(submissions, roster, deadlines):\n grades = dict([(u, 0) for u in roster.keys()])\n for sub in submissions:\n sub_date = convert_date(sub['Submission date'])\n user = sub['UID'].split('@')[0]\n score = 0\n if user in roster:\n if (sub_date < deadlines[roster[user]] and\n sub['Correct'].upper() == 'TRUE'):\n score = 1\n if grades[user] != 1:\n grades[user] = score\n return grades", "def __init__(self, idy, name):\n self.idy = idy\n self.name = name\n self.active = True\n self.grades = {}", "def get_grade(soup):\n\n # up there with with route name\n grade_table = soup.h3\n\n # look for grades in spans\n grade = []\n for s in grade_table.find_all('span'):\n\n # class names are the grading systems\n if s['class'] != None:\n head = s['class'][0]\n head = head.encode('utf8', errors = 'strict')\n\n # grade are showing with text\n body = s.get_text()\n body = body.encode('utf8', errors = 'ignore')\n\n grade.append(body)\n\n # extract tbe grades\n grade_data = {}\n for g in grade:\n h = g.split(SPLIT_CHAR)\n if len(h) > 1:\n grade_data['rate'+h[0].strip()] = h[1]\n\n return grade_data", "def participles(participle_form):\n ptc = {}\n ptc['parpp']= participle_parpp(participle_form)\n return ptc", "def construct_assignments(priest_list, group_list):\n priest_list = copy(priest_list)\n group_list = copy(group_list)\n buff_assignments = []\n if len(priest_list) == len(group_list):\n \"\"\" 1 priest per group \"\"\"\n priest_group = zip(priest_list, group_list)\n for priest_assign in priest_group:\n priest, group = priest_assign\n buff_assignments.append({\"priest\": priest, \"groups_assigned\": [group]})\n elif len(priest_list) < len(group_list):\n \"\"\" Fewer priests than groups, some will have more than 1 group assigned. \n Function will attempt to give consecutive group assignments in these cases. \"\"\"\n priest_parties_each, priest_additionals = divmod(len(group_list), len(priest_list))\n for priest in priest_list:\n buff_allocation = {\"priest\": priest, \"groups_assigned\": []}\n if priest_additionals > 0:\n for x in range(priest_parties_each+1):\n group_pop = group_list.pop(0)\n buff_allocation[\"groups_assigned\"].append(group_pop)\n priest_additionals -= 1\n else:\n for x in range(priest_parties_each):\n group_pop = group_list.pop(0)\n buff_allocation[\"groups_assigned\"].append(group_pop)\n buff_assignments.append(buff_allocation)\n print(\"Outcome: \", buff_assignments)\n return buff_assignments", "def grade_calculate_grade(self):\n try:\n if int(self.root.ids.grade_input_grade.text) >= 85:\n grade = 'High Distinction'\n elif int(self.root.ids.grade_input_grade.text) >= 75:\n grade = 'Distinction'\n elif int(self.root.ids.grade_input_grade.text) >= 65:\n grade = 'Credit'\n elif int(self.root.ids.grade_input_grade.text) >= 50:\n grade = 'Pass'\n else:\n grade = 'Fail'\n self.root.ids.grade_output_label.text = 'Grade: ' + grade\n except ValueError:\n\n self.root.ids.grade_output_label.text = 'Invalid Grade'", "def __init__(self, name, surname):\n\t\t\n\t\tself.grades = {}\n\t\tself.attendance = 0\n\t\t\n\t\tif not (isinstance(name, str) and isinstance(surname, str)):\n\t\t\tname, surname = \"None\", \"None\"\n\t\tself.name, self.surname = name, surname", "def computeGrades(e1, e2, a):\n \n a = assignmentScores\n a.sort()\n i=0\n while i<10:\n sum+=sum a[i]\n avg = sum/10\n \n grade = ((e1 + e2) /2) * 0.4 + (avg) * 0.6\n \n return grade\n \n if grade >= 90 and grade <= 100:\n return(\"A\")\n \n elif grade >= 80 and grade < 90:\n return(\"B\")\n \n elif grade >= 70 and grade < 80:\n return(\"C\")\n \n elif grade >= 60 and grade < 70:\n return(\"D\")\n \n elif grade < 60:\n return(\"F\")", "def assign_grade(github, title, grade):\n QUERY = \"\"\"\n INSERT INTO Grades VALUES (?, ?, ?)\n \"\"\"\n\n db_cursor.execute(QUERY, (github, title, grade))\n db_connection.commit()\n\n print \"Successfully graded %s with a %s on %s\" % (github, grade, title)" ]
[ "0.69196707", "0.5971959", "0.58035874", "0.5640746", "0.5462263", "0.53878474", "0.5296486", "0.52632314", "0.52490896", "0.5249011", "0.52386457", "0.520961", "0.5207413", "0.516837", "0.51675934", "0.51528466", "0.51475215", "0.51040614", "0.5026935", "0.5017392", "0.49985787", "0.49915475", "0.49818173", "0.49451524", "0.49313697", "0.48881644", "0.48661897", "0.48624313", "0.48623976", "0.48290867" ]
0.7986096
0
Creates a grade dict from a row form.
def form_to_grade_row(form): course = form["course"] student = form["student"] assignment, _, _ = form["assignment"].partition("[") rowdata = json.loads(form["rowdata"]) grade = {"student": student, "assignment": assignment, "course": course} grade["_id"] = "{student}-{assignment}-{course}".format(**grade) scores = {} for k, v in rowdata.items(): if not k.startswith(assignment): continue _, _, n = k.partition("[") n, _, _ = n.partition("]") scores[int(n)] = v scores = sorted(scores.items()) grade["scores"] = [v for _, v in scores] return grade
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def make_dict(row):\n return dict((key[0], value) for key, value in zip(colnames, row))", "def get_row_dict(self, row):\n return self.get_dict(self.possibles[row], \"R\", row)", "def form_to_grade_assignment(form):\n grade_id = \"{student}-{assignment}-{course}\".format(**form)\n grade = {\n \"_id\": grade_id,\n \"student\": form[\"student\"],\n \"assignment\": form[\"assignment\"],\n \"course\": form[\"course\"],\n }\n if form[\"filename\"]:\n grade[\"filename\"] = form[\"filename\"]\n scores = {\n int(k[5:]): float(v) for k, v in form.items() if k.startswith(\"score\")\n }\n scores = sorted(scores.items())\n grade[\"scores\"] = [v for _, v in scores]\n return grade", "def table_row_to_dict(row, make_quantity=True):\n data = {}\n for name, col in row.columns.items():\n val = row[name]\n\n if make_quantity and col.unit:\n val = Quantity(val, unit=col.unit)\n data[name] = val\n return data", "def input_row():\n return {\n 'foo': 1,\n 'bar': 2,\n 'spam': 3,\n 'eggs': 4\n }", "def dict_factory(cursor, row):\n fields = [column[0] for column in cursor.description]\n return {key: value for key, value in zip(fields, row)}", "def gen_dict():\n lines = [line for line in csv.reader(open(__ppath__ + \"/data/occupations.csv\"))] # uses a csv.reader to parse the file, converts the generic iterable to a list\n lines = [(line[0],float(line[1])) for line in lines[1:-2]]# removes the column names and \"Total\" row, re-expresses as a list of tuples to enable dictionary conversion\n lines.append((\"Unemployed\",0.2)) # accounts for missing 0.2% of jobs\n return dict(lines) # converts to dictionary", "def _convert_row(self, row) :\n\n self.row_id += 1\n data = [self.row_id]\n\n if type(row) == type({}) :\n data.extend(row.get(col, None) for col in self.cols[1:])\n elif type(row) in [type([]), type(())] :\n data.extend(row)\n elif type(row) == RowReference :\n data.extend(row.values())\n else :\n raise Exception(\n 'Don''t know how to add row from: %s ' % str(row)\n )\n\n if len(data) != len(self.cols) :\n raise Exception(\n 'Wrong number of values for new row with cols %s: %s' % \n (str(self.cols), str(data))\n \n )\n\n return data", "def dict_factory(cursor, row):\n rowdict = {}\n for idx, col in enumerate(cursor.description):\n rowdict[col[0]] = row[idx]\n return rowdict", "def _row_to_dict(row, fields):\n dict_row = {}\n for i, value in enumerate(row):\n key = fields[i]\n if value and str(value).lower() == 'nan':\n value = None\n dict_row[key] = value\n return dict_row", "def _row_to_labels(row):\n labels = {}\n label_keys = ['name', 'qty', 'range_end', 'unit', 'comment']\n for key in label_keys:\n labels[key] = row[key]\n return labels", "def dict_factory(cursor, row):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row[idx]\n return d", "def tsvRowToDict(row):\n return {col: getattr(row, col) for col in row._columns_}", "def dict_factory(cursor, row):\r\n\td = {}\r\n\tfor idx, col in enumerate(cursor.description):\r\n\t\td[col[0]] = row[idx]\r\n\treturn d", "def dict_factory(cursor, row):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row[idx]\n return d", "def dict_factory(cursor, row):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row[idx]\n return d", "def dict_factory(cursor, row):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row[idx]\n return d", "def make_dicts(cursor, row):\n return dict((cursor.description[idx][0], value)\n for idx, value in enumerate(row))", "def make_dicts(cursor, row):\n return dict((cursor.description[idx][0], value)\n for idx, value in enumerate(row))", "def dict_factory(self, cursor, row):\n results = {}\n for index, col_name in enumerate(cursor.description):\n results[col_name[0]] = row[index]\n\n return results", "def create_dict(rows, tag, tag_id=None, start=0, enroll=False):\n enrollment_info_map = {\n 'Enrollment Requirement': 'requirements',\n 'Add Consent': 'add_consent',\n 'Drop Consent': 'drop_consent',\n }\n\n data = {}\n\n for row in rows:\n name_raw, desc_raw = row.find_all(tag, id=tag_id)[start:]\n name = name_raw.text.strip()\n desc = desc_raw.text.encode('ascii', 'ignore').decode().strip()\n\n if enroll:\n name = enrollment_info_map[name]\n else:\n name = name.lower().replace(' / ', '_')\n\n data.update({name: desc})\n\n return data", "def sqlite3_dict_factory(cursor, row):\n dict_row = dict()\n for idx, col in enumerate(cursor.description):\n dict_row[col[0]] = row[idx]\n dict_row[idx] = row[idx]\n return dict_row", "def deserialize(self, row, query, sort, columns):\n # (Dict[str, Any], Optional[str], Optional[str], Optional[List[str]]) -> Dict[str, Any]\n if columns is not None:\n columns = set(columns)\n\n data = {}\n for column in [self.id_column, self.score_column] + self.columns:\n if columns is None:\n data[column.name] = column.deserialize(row)\n elif column.name in columns:\n data[column.name] = column.deserialize(row)\n\n if query:\n data[self.query_column] = query\n if sort:\n data[self.sort_column] = sort\n\n return data", "def make_row(row: TRowResult,\n include_ts: bool = False) -> Union[Dict[bytes, bytes],\n Dict[bytes, Tuple[bytes, int]]]:\n cell_map = _get_cell_map(row).items()\n if include_ts:\n return {name: (cell.value, cell.timestamp) for name, cell in cell_map}\n else:\n return {name: cell.value for name, cell in cell_map}", "def _make_row_struct(self, row, npars):\n row_struct = {}\n row_struct['obsmode'] = row['obsmode']\n row_struct['datacol'] = row['datacol']\n row_struct['parnames'] = []\n row_struct['parnum'] = npars\n row_struct['nelem'] = []\n row_struct['parvals'] = []\n\n for i in range(1, npars + 1):\n row_struct['parnames'].append(row[f'par{i}names'])\n row_struct['nelem'].append(row[f'nelem{i}'])\n row_struct['parvals'].append(row[f'par{i}values'].tolist())\n\n if npars == 0:\n row_struct['results'] = row[row['datacol']]\n row_struct['telem'] = 1\n else:\n row_struct['results'] = row[row['datacol']].tolist()\n row_struct['telem'] = len(row_struct['results'])\n\n return row_struct", "def build_row(raw_row):\n temp_row = dict()\n ### Plan\n # Add email addresses to row\n # If message == Clicked or message == Submitted data\n ## Append 'Time Clicked' to dict. Format MM/DD/YYYY | HH:mm\n ## If message == Submitted data\n ### Append Credentials Harvested: Yes to dict\n ## Else:\n ### Append Credentials Harvested: No to dict\n # Append Reported: No, Replied to Email: No, Notes: ''\n\n # Append email\n temp_row['Email Address'] = raw_row['email']\n\n if raw_row['message'] == 'Clicked Link' or raw_row['message'] == 'Submitted Data':\n # print(raw_row['time'])\n # print(arrow.get(raw_row['time'], 'YYYY-MM-DDTHH:mm:ss.SSSSSSSSS-ZZ').format('MM/DD/YYYY | HH:mm'))\n temp_row['Time Clicked'] = arrow.get(raw_row['time'], 'YYYY-MM-DDTHH:mm:ss.SSSSSSSSS-ZZ').format('MM/DD/YYYY | HH:mm')\n if raw_row['message'] == 'Submitted Data':\n temp_row['Credentials Harvested'] = 'Yes'\n else:\n temp_row['Credentials Harvested'] = 'No'\n else:\n temp_row['Time Clicked'] = 'N/A'\n temp_row['Credentials Harvested'] = 'No'\n\n temp_row.update({'Reported': '', 'Replied to Email': '', 'Notes': ''})\n return temp_row", "def _add_from_dict(self, row) :\n\n data = [row.get(col, None) for col in self.cols]\n self._insert_internal(self.cols, data)", "def dict_factory(cursor, row):\n dic = {}\n for idx, col in enumerate(cursor.description):\n if isinstance(row[idx], unicode):\n dic[col[0]] = u.unicode_to_string(row[idx])\n else:\n dic[col[0]] = row[idx]\n return dic", "def mapToUserRequestDict(self, row):\n result = {}\n result['rID'] = row[0]\n result['uID'] = row[1]\n result['request'] = row[2]\n result['approval'] = row[3]\n result['uname'] = row[4]\n result['ulastname'] = row[5]\n return result", "def map_row_to_dict(cursor: sqlite3.Cursor, row_data):\n d = {}\n for idx, col in enumerate(cursor.description):\n d[col[0]] = row_data[idx]\n return d" ]
[ "0.66509485", "0.65499103", "0.6370172", "0.62290734", "0.62213093", "0.6178408", "0.6127212", "0.6040272", "0.6037651", "0.5976455", "0.59096533", "0.58915955", "0.57766086", "0.5749406", "0.57470036", "0.57470036", "0.57470036", "0.57335395", "0.57335395", "0.5705864", "0.56909704", "0.5654434", "0.55769277", "0.55716556", "0.5490782", "0.5474135", "0.5463969", "0.54548323", "0.5428701", "0.5427258" ]
0.75859386
0
Inserts a grade into the database.
def insert_grade(grade, form, rc): dbname = form["dbname"] collname = "grades" try: coll = rc.client[dbname][collname] except (KeyError, AttributeError): abort(404) try: added = rc.client.insert_one(dbname, collname, grade) except Exception: traceback.print_exc() raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def assign_grade(github, title, grade):\n QUERY = \"\"\"\n INSERT INTO Grades VALUES (?, ?, ?)\n \"\"\"\n\n db_cursor.execute(QUERY, (github, title, grade))\n db_connection.commit()\n\n print \"Successfully graded %s with a %s on %s\" % (github, grade, title)", "def assign_grade(github, title, grade):\n QUERY = \"\"\"INSERT INTO Grades VALUES(?,?,?)\"\"\"\n db_cursor.execute(QUERY, (github, title, grade))\n db_connection.commit()\n print \"Success! %s received a grade of %s on the %s project!\" % (github, grade, title)", "def set_grade(github, project_title, grade_value):\n \n QUERY = \"\"\"INSERT INTO Grades VALUES (?, ?, ?)\"\"\"\n \n db_cursor.execute(QUERY, (github, project_title, grade_value))\n db_connection.commit()\n\n \n # print \"%s %s's grade: %s\" % (first_name, last_name, grade)\n print \"Successfully graded %s on Project %s: %s\" % (github, project_title, grade_value)", "def add_grade(self, student, grade):\n try:\n self.grades[student.id].append(grade)\n except KeyError:\n raise ValueError('Student not in Grade Book.')", "def _save_grade(self):\r\n student = self._student('POST', key='grader_id')\r\n if student is None:\r\n self._error_response()\r\n\r\n else:\r\n # Update the number of essays the student has graded\r\n student.grade_peer_essay()\r\n return self._success_response({})", "def add_student():\n\n\tprint('You must enter the student as is:\\n'\n\t\t\"'First name', 'middle name', 'Last name', 'major', 'major', 'gpa', id_number, 'minor'\"\n\t\t\" 'minor' graduation year, advisor number\\n For example: 'Kyle', 'Jacob', 'Ranney', 'Insurance'\"\n\t\t\", 'Chemistry', 3.0, 93988, 'Biology', 'NULL', 2016, 2234\\n\")\n\t# use sql insert statement\n\t# become familiar with this!\t", "def addUsertoDatabase(self):\r\n self.c.execute(\"\"\"INSERT INTO student_information VALUES (?,?,?)\"\"\",(self.name,self.password,self.budget,))\r\n self.con.commit()\r\n print(\"Added to Database Student..\")", "def AddGrade(self, student, discipline, grade_value):\n if not self.__data['s'].HasKey(student.ID):\n raise NonExistentItemIDError(\"Student does not exist.\")\n if not self.__data['d'].HasKey(discipline.ID):\n raise NonExistentItemIDError(\"Discipline does not exist.\")\n self.__data['g'].AddItems([Grade(self.__data['g'].GetSafeKey(), student.ID, discipline.ID, grade_value)])\n self.__undo_list.append(['g'])\n self.__redo_list.clear()", "def addStudent():\n name = input(\"Name: \")\n number = input(\"Number: \")\n gpa = input(\"GPA: \")\n field = input(\"Field: \")\n student = Student(name, number, gpa, field)\n if t.insert(number, student):\n ht.insert(student)\n print(name, \"added successfully.\")\n else:\n print(\"student number is not valid.\")", "def add_student_data(connection,fname,lname,class_n,marks):\r\n with connection:\r\n connection.execute(INSERT_STUDENT,(fname,lname,class_n,marks))", "def addGrade(self, student, grade):\n try:\n self.grades[student.getIDNumber()].append(grade)\n except KeyError:\n raise ValueError(\"Student not in Gradebook\")", "def make_new_student(first_name, last_name, github):\n QUERY = \"\"\"INSERT INTO Students VALUES(?,?,?)\"\"\"\n db_cursor.execute(QUERY, (first_name, last_name, github))\n db_connection.commit()\n print \"Successfully added student: %s %s\" % (first_name, last_name)", "def make_new_student(first_name, last_name, github):\n QUERY = \"\"\"\n INSERT INTO Students VALUES(?, ?, ?)\"\"\"\n \n db_cursor.execute(QUERY, (first_name, last_name, github))\n db_connection.commit()\n print \"Successfully added student: %s %s\" % (first_name, last_name)", "def add_project(title, description, max_grade):\n\n QUERY = \"\"\"\n INSERT INTO Projects (title, description, max_grade) VALUES (?, ?, ?)\n \"\"\"\n\n db_cursor.execute(QUERY, (title, description, max_grade))\n db_connection.commit()\n\n print \"Successfully added %s: %s with a max grade of %s\" % (title, description, max_grade)", "def __ui_grade_student(self):\n student_id = input(\"Give student ID: \")\n discipline_name = input(\"Give discipline discipline_name: \")\n\n try:\n grade_value = input(\"Give grade: \")\n if not self.__student_controller.student_has_discipline(student_id, discipline_name):\n print(\"The student isn't enrolled at the given discipline!\")\n return\n self.__grade_controller.add_grade(\n student_id,\n self.__discipline_controller.get_id_by_name(discipline_name),\n grade_value\n )\n print(\"Grade successful! \\n\")\n\n except GradeException as ge:\n print(ge)\n return\n except StudentException as se:\n print(se)\n return\n except RepositoryException as re:\n print(re)\n return\n except ValueError as ve:\n print(ve)\n return", "def make_new_student(first_name, last_name, github):\n\n QUERY = \"\"\"INSERT INTO Students VALUES (?, ?, ?)\"\"\"\n # Query...all caps as a constant here, a string that will not change, (and only in the scope of this function!)\n db_cursor.execute(QUERY, (first_name, last_name, github))\n db_connection.commit()\n print \"Successfully added student: %s %s\" % (first_name, last_name)", "def create_student(conn, student, first_name, last_name, major, start_date):\n sql = ''' INSERT INTO student(firstname, lastname, major, start_date)\n VALUES(?,?,?,?) '''\n cur = conn.cursor() # cursor object\n cur.execute(sql, student)\n # return cur.lastrowid # returns the row id of the cursor object, the student id\n first_name.set('')\n last_name.set('')\n major.set('')\n start_date.set('')\n messagebox.showinfo('Success', 'Student Successfully Added to the Database!')", "def _insert_single(self, disc, class_num):\n self.cursor.execute(self.INSERT, (disc, class_num))\n self.conn.commit()", "def test_save_grade(self):\r\n response = self.peer_grading.save_grade(self.save_dict)\r\n self.assertEqual(response['success'], True)", "def insertData(self, table, title, rating, authorinfo, pubinfo):\n\n\t\tsql = \"insert into %s (bookname, authorinfo, pubinfo, rating) \\\n\t\t\tvalues('%s', '%s', '%s', '%s')\" %(table, title, authorinfo,\n\t\t\tpubinfo, rating)\n\t\ttry:\n\t\t\tself.cursor.execute(sql)\n\t\t\tself.conn.commit()\n\t\texcept Exception, e:\n\t\t\tsys.exit()", "def add_student(self, name: str, grade: int) -> None:\n school_grade = self.students.setdefault(grade, [])\n school_grade.append(name)\n school_grade.sort()", "def add_course_grade(self, course, grade):\n course_grade_tuple = (course, grade)\n self.courses_grades.append(course_grade_tuple)", "def insert_evaluation(connection, evaluation):\n insert_eval = \"\"\"INSERT INTO evaluations(scale_id,corrector_id,corrector_login,\n corrected_id,corrected_login,project_name,\n project_id,rule,begin_at) VALUES(?,?,?,?,?,?,?,?,?)\"\"\"\n try:\n cursor = connection.cursor()\n cursor.execute(insert_eval, evaluation)\n connection.commit()\n except sqlite3.Error as e:\n print(e)", "def add_new_project(title, description, max_grade):\n QUERY = \"\"\"INSERT into Projects (title, description, max_grade) VALUES(?,?,?)\"\"\"\n db_cursor.execute(QUERY, (title, description, max_grade))\n db_connection.commit()\n print \"Success! Add %s project, and here is the description: %s, and max grade: %s\"\\\n %(title, description, max_grade)", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def insert(self):\n db.session.add(self)\n db.session.commit()", "def insert(self, sql):\n try:\n # Execute the SQL command\n self.cursor.execute(sql)\n # Commit your changes in the database\n self.db.commit()\n except:\n # Rollback in case there is any error\n self.db.rollback()", "def add_score(self, player, level, score):\n cursor = self._connection.cursor()\n command = 'INSERT INTO scores (player, level, score) VALUES (?, ?, ?)'\n cursor.execute(command, [player, level, score])\n self._connection.commit()", "def addstar(starname):\n try:\n Star.create(name=starname)\n except IntegrityError:\n print(('Star {0} already in database. Record not created, but can be updated.'.format(starname)))" ]
[ "0.80697274", "0.7878323", "0.74185586", "0.6426635", "0.633985", "0.6160498", "0.6142165", "0.6109473", "0.6094631", "0.60664976", "0.6057973", "0.60507387", "0.60427445", "0.601331", "0.5962537", "0.5918605", "0.58787453", "0.5862085", "0.5807437", "0.5785798", "0.5780195", "0.57689214", "0.5730336", "0.57232094", "0.57202476", "0.57202476", "0.57202476", "0.56997967", "0.5647281", "0.56248736" ]
0.82849544
0
stripchars() Finds and replaces incompatible characters within all .txt files in the '\abstracts' directory. renameid() Renames files, stripping anything from filename after ID.
def main(): target_folder = r'Abstracts cleanup\abstracts\*.txt' try: stripchars(target_folder) except Exception as e: print(e) pass # renameid(target_folder)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def processFilename(filename):\n\n badchars = [\" \", \",\", \"+\", \"$\", \"_\", \"{\", \"}\", \"/\", \"&\"]\n fn = filename\n for bc in badchars:\n fn = fn.replace(bc, \"\")\n return fn", "def main():\n print(\"Current directory is\", os.getcwd())\n os.chdir('Lyrics/Lyrics')\n\n for dir_name, dir_list, file_list in os.walk(\".\"):\n for filename in file_list:\n file_path = dir_name + \"\\\\\" + filename\n new_name = get_fixed_filename(file_path)\n os.rename(file_path, new_name)", "def stripchars(target_folder):\n\tfor filepath in iglob(target_folder, recursive=True):\n\t\tp = Path(filepath)\n\t\tfn = p.parts[-1]\n\n\t\twith open(filepath, encoding='cp1252') as file:\n\t\t\tlogger.info(f'read: {fn}')\n\t\t\tfor line in file:\n\t\t\t\toutput = line.strip().replace(\"Ð\", \"–\").replace(\"Õ\", \"'\").replace(\"Ô\", \"'\").replace(\"Ž\", \"é\").replace(\"Ò\", \"'\").replace(\"Ó\", \"'\").replace(\"ª\", \"™\").replace(\"’\", \"'\").replace(\"‘\", \"'\").replace(\"–\",\"–\")\n\t\t\t\t# only write back the block paragraph by stripping other shorter lines\n\t\t\t\tif len(output) >= 7:\n\t\t\t\t\twith open(filepath, 'w') as file:\n\t\t\t\t\t\ttry:\n\t\t\t\t\t\t\tfile.write(output)\n\t\t\t\t\t\t\tlogger.info(f'write {fn}')\n\t\t\t\t\t\texcept Exception as e:\n\t\t\t\t\t\t\tlogger.error(e)\n\t\t\t\t\t\t\tcontinue", "def process_files(inpath=os.path.join(os.curdir, \"data/raw\"), outpath=os.path.join(os.curdir, \"data/processed\")):\n filenames = [f for f in os.listdir(inpath) if fnmatch.fnmatch(f, '*.txt')]\n print \"fixing ascii encoding...\"\n for f in filenames:\n print f\n infile = os.path.join(inpath, f)\n outname = os.path.join(outpath, f)\n with open(outname, 'w') as outfile:\n text = open(infile).read()\n text = fix_ascii(text)\n outfile.write(text)", "def rename_files():\n folder_dir = r\"C:\\Users\\keithmoore1.AD\\Desktop\\HAFB\\prankOrig\"\n files = os.listdir(folder_dir)\n save_path = os.getcwd() # current working directory\n for file in files:\n #remove digits from name\n new_file = file.lstrip(\"0123456789\")\n print(file, \" - \", new_file)\n # rename filename\n os.chdir(folder_dir)\n os.rename(file,new_file)\n # get back home\n os.chdir(save_path)", "def wipe_bad_chars(filename):\n return multi_replace(filename, {'(': '', ' ': '_', ')': '', '/': '_'})", "def _remove_accents_(unicode_filename):\n valid_characters = bytes(b'-_.() 1234567890abcdefghijklmnopqrstuvwxyz')\n cleaned_filename = unicodedata.normalize('NFKD', unicode_filename).encode('ASCII', 'ignore')\n\n new_filename = \"\"\n\n for char_int in bytes(cleaned_filename):\n char_byte = bytes([char_int])\n if char_byte in valid_characters:\n new_filename += char_byte.decode()\n\n return new_filename", "def strip_unsafe_characters(filename: str):\n return \"\".join([c for c in filename if c.isalpha() or c.isdigit() or c==' ' or c=='_']).rstrip()", "def sanitize_filename(s, restricted=False, is_id=False):\n def replace_insane(char):\n if restricted and char in ACCENT_CHARS:\n return ACCENT_CHARS[char]\n if char == '?' or ord(char) < 32 or ord(char) == 127:\n return ''\n elif char == '\"':\n return '' if restricted else '\\''\n elif char == ':':\n return '_-' if restricted else ' -'\n elif char in '\\\\/|*<>':\n return '_'\n if restricted and (char in '!&\\'()[]{}$;`^,#' or char.isspace()):\n return '_'\n if restricted and ord(char) > 127:\n return '_'\n return char\n\n # Handle timestamps\n s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)\n result = ''.join(map(replace_insane, s))\n if not is_id:\n while '__' in result:\n result = result.replace('__', '_')\n result = result.strip('_')\n # Common case of \"Foreign band name - English song title\"\n if restricted and result.startswith('-_'):\n result = result[2:]\n if result.startswith('-'):\n result = '_' + result[len('-'):]\n result = result.lstrip('.')\n if not result:\n result = '_'\n return result", "def sanitize_filename(s, restricted=False, is_id=False):\n def replace_insane(char):\n if restricted and char in ACCENT_CHARS:\n return ACCENT_CHARS[char]\n if char == '?' or ord(char) < 32 or ord(char) == 127:\n return ''\n elif char == '\"':\n return '' if restricted else '\\''\n elif char == ':':\n return '_-' if restricted else ' -'\n elif char in '\\\\/|*<>':\n return '_'\n if restricted and (char in '!&\\'()[]{}$;`^,#' or char.isspace()):\n return '_'\n if restricted and ord(char) > 127:\n return '_'\n return char\n\n # Handle timestamps\n s = re.sub(r'[0-9]+(?::[0-9]+)+', lambda m: m.group(0).replace(':', '_'), s)\n result = ''.join(map(replace_insane, s))\n if not is_id:\n while '__' in result:\n result = result.replace('__', '_')\n result = result.strip('_')\n # Common case of \"Foreign band name - English song title\"\n if restricted and result.startswith('-_'):\n result = result[2:]\n if result.startswith('-'):\n result = '_' + result[len('-'):]\n result = result.lstrip('.')\n if not result:\n result = '_'\n return result", "def renameFiles(folder):\n\n # Retrieve list of all text files and remove the txt files\n for filename in glob.glob(os.path.join(folder, \"*.txt\")):\n with open(filename, 'r') as file:\n metadata=file.read().replace('\\n', '')\n ident = metadata[27:31]\n order = metadata[26].upper()\n finger = metadata[32:34]\n gender = metadata[8].upper()\n fingerprintClass = metadata[16].upper()\n fp = Fingerprint(ident, order, finger, gender, fingerprintClass)\n\n # Remove the .txt file and rename the png\n os.remove(filename)\n pngName = filename.replace(\".txt\", \".png\")\n newName = fp.id + \"_\" + fp.order + \"_\" + fp.finger + \"_\" + fp.gender + \"_\" + fp.fingerprintClass + \".png\"\n newName = os.path.join(folder, newName)\n os.rename(pngName, newName)", "def correct_naming(obsid, inst):\n cobsid = str(int(float(obsid)))\n if len(cobsid) == 5:\n return \n\n lobsid = mcf.add_leading_zero(obsid, 5)\n \n for sdir in ['secondary', 'analysis']:\n\n cmd = 'ls /data/hrc/' + inst + '/' + lobsid + '/' + sdir + '/hrcf* >' + zspace\n os.system(cmd)\n\n data = mcf.read_data_file(zspace, remove=1)\n for ent in data:\n atemp = re.split('\\/', ent)\n fname = atemp[-1]\n mc = re.search(lobsid, fname)\n if mc is not None:\n continue\n else:\n atemp = re.split('hrcf', fname)\n btemp = re.split('_', atemp[1])\n sobs = btemp[0]\n new = fname.replace(sobs, lobsid)\n full = '/data/hrc/' + inst + '/' + lobsid + '/' + sdir + '/' + new\n\n cmd = 'mv ' + ent + ' ' + full\n os.system(cmd)", "def sanitize_filename(f):\n keepchars = (\" \", \".\", \"_\")\n return \"\".join(c for c in f if c.isalnum() or c in keepchars).rstrip()", "def fix_fasta(database_names):\n for file in database_names:\n file_mod = file.replace(\".fasta\", \"_mod.fasta\")\n with open(file, 'r') as f:\n lines = f.readlines()\n new_lines = []\n for line in lines:\n if '|' in line and \">\" not in line:\n # we replace spaces in header line with \"__\"\n # so I can manipulate that later as biopython doesn't\n # like \"__\"\n new_line = \">\"+line.replace(\" \", \"__\")\n new_lines.append(new_line)\n else:\n new_lines.append(line)\n with open(file_mod, 'w') as f:\n for line in new_lines:\n f.write(line)", "def strip_illegal_chars(filename: str) -> str:\n if OPTIONS['download']['ascii']:\n return ''.join(i for i in filename if i in FILENAME_ALLOWEDASCII)\n else:\n return ''.join(i for i in filename if i not in FILENAME_BANNED)", "def sanitize_filename(file_path: str) -> str:\n file_name = file_path.lower().replace(\" \", \"_\").replace(\".\", \"_\")\n file_name = \"\".join(\n [\n i if i in (string.ascii_letters + string.digits + \"_\") else \"\"\n for i in file_name\n ]\n )\n return file_name", "def fix_filename(self):\n if not self.remove_path:\n return\n self.filename = re.sub(\".+\\/\", \".../\", self.filename)", "def changeFilenames(speciesfolder, species):\n\tfor filename in os.listdir(speciesfolder):\n\t\tif filename.startswith(\"generic\"):\n\t\t\tnewname = filename.replace(\"generic\", species)\n\t\t\tos.rename(os.path.join(speciesfolder, filename), os.path.join(speciesfolder, newname))", "def cleanFilename(filename):\n badChars = {ord('?'): None, ord('*'): None, ord('/'): None,\n ord('\\\\'): None, ord(':'): None, ord('\"'): \"''\",\n ord('<'): None, ord('>'): None, ord('|'): None}\n return filename.translate(badChars)", "def space_cleaning(file=\"\"):\n intermediate = str(file) + str(\"_intermediate\")\n output_file = str(file)\n\n os.rename(intermediate, output_file)", "def _removeDiacritics(self, text):\n norm_txt = unicodedata.normalize('NFD', text)\n shaved = ''.join(c for c in norm_txt if not unicodedata.combining(c))\n # remove accents and other diacritics, replace spaces with \"_\" because identifiers can't have spaces\n no_spaces = unicodedata.normalize(\n 'NFC', shaved).lower().replace(\" \", \"_\")\n final_text = no_spaces\n # only allow [a-z], [0-9] and _\n p = re.compile('[a-z0-9_]+')\n for i in range(0, len(no_spaces)):\n if not (p.match(no_spaces[i])):\n final_text = final_text[:i] + '_' + final_text[i+1:]\n # i the first char is not a-z then replaceit (all identifiers must start with a letter)\n p2 = re.compile('[a-z]+')\n if not p2.match(final_text[0]):\n final_text = 'a' + final_text[1:]\n return final_text", "def correct_filename(self, img_name, categ):\n path = self._path\n\n # Change wrong characters in filename\n wrong_char = [char for char in img_name if char in [\" \", \"(\", \")\", \"é\", \"©\"]]\n if len(wrong_char) > 0:\n\n new_img_name = img_name\n for char in [\" \", \"(\", \")\", \"©\"]:\n new_img_name = new_img_name.replace(char, \"\")\n new_img_name = new_img_name.replace(\"é\", \"e\")\n\n os.rename(join(path, categ, img_name), join(path, categ, new_img_name))\n img_name = new_img_name\n\n return img_name", "def main():\n print(\"Starting directory is: {}\".format(os.getcwd()))\n\n # Change to desired directory\n os.chdir('Lyrics/Christmas')\n\n # Print a list of all files in current directory\n print(\"Files in {}:\\n{}\\n\".format(os.getcwd(), os.listdir('.')))\n\n try:\n os.mkdir('temp')\n except FileExistsError:\n pass\n\n # Loop through each file in the (current) directory\n for filename in os.listdir('.'):\n # Ignore directories, just process files\n if os.path.isdir(filename):\n continue\n\n new_name = get_fixed_filename(filename)\n print(\"Renaming {} to {}\".format(filename, new_name))", "def cleanup(text):\n with open(text, 'r') as uncleaned_text:\n no_chapters = re.sub('[A-Z]{3,}', ' ', uncleaned_text.read())\n remove_periods = re.sub('(\\s\\.){4,}', '', no_chapters)\n new_text = re.sub('\\*', '', remove_periods)\n return new_text", "def fix_subfiles(ln):\n m = input_re.search(ln)\n if m:\n fn = m.group(1)\n fI = file(fn,\"r\")\n fO = file(fn + \".tmp\",\"w\")\n for ln in fI:\n ln = sqb2_re.sub(\"[\",ln)\n fO.write(unescape_sqb(ln))\n fI.close()\n fO.close()\n fI = file(fn,\"w\")\n fO = file(fn + \".tmp\",\"r\")\n for ln in fO:\n fI.write(ln)\n fI.close()\n fI.close()", "def rename(root, filelist):\n if not filelist:\n return\n def apply_rules(filename):\n rulez = [('_+' , ' '), # One or more underscores to spaces\n ('-{2,}' , '-'), # Two or more hyphens to single hyphen\n ('&' , 'And'), # An ampersand to 'And'\n ('(-)(\\w*)' ,r' \\1 \\2')]# Spaces around hyphen seperated words\n \n for look_for, replacement in rulez:\n filename = re.sub(look_for, replacement, filename)\n # Capitalize first letter of every word\n filename = \" \".join([ word.capitalize() for word in filename.split() ])\n return filename\n \n names = []\n for filename in filelist:\n basename = os.path.basename(filename)\n names.append(os.path.join(root, apply_rules(filename)))\n try:\n dest = os.tmpnam()\n fl = open(dest, 'w')\n fl.write(\"\\n\".join(names))\n fl.close()\n os.system(\"%s %s\" % (EDITOR, dest))\n ans = 'no'\n for oldname, newname in zip(filelist, open(dest).readlines()):\n oldname = os.path.join(root, oldname)\n newname = newname.strip()\n if oldname == newname:\n print \"No change from %s to %s ...skipping\" % (oldname, newname)\n else:\n print \"Changing %s to %s\" % (oldname, newname)\n if not ans[0].lower == 'a':\n ans = raw_input(\"Contine (Yes/No/All) ? [N] \") or 'no'\n if ans[0].lower() in ('a', 'y'):\n os.rename(oldname, newname)\n else:\n os.rename(oldname, newname)\n finally:\n os.remove(dest)", "def file_name_cleanup(cls, name: str, remove_left_whitespace: bool = True,\n\t\t\t\t\t\t remove_right_whitespace: bool = True) -> str:\n\t\tnew_name = name\n\t\tnew_name = new_name.replace('|', '-')\n\t\tnew_name = new_name.replace('?', '?')\n\t\tnew_name = new_name.replace('*', '×')\n\t\tnew_name = new_name.replace('/', '╱')\n\t\tnew_name = new_name.replace('\\\\', '╲')\n\t\tnew_name = new_name.replace('\\n', '_')\n\t\tnew_name = new_name.replace('\\r', '_')\n\t\tnew_name = new_name.replace(':', ':')\n\t\tnew_name = new_name.replace('>', '〉')\n\t\tnew_name = new_name.replace('<', '〈')\n\t\tnew_name = new_name.replace('&nbsp;', ' ')\n\t\treplace_by = \"“”\"\n\t\tcount = 0\n\t\twhile new_name.find('\"') > -1:\n\t\t\tnew_name = new_name.replace('\"', replace_by[count % 2], 1)\n\t\t\tcount += 1\n\t\tif remove_left_whitespace:\n\t\t\tnew_name = new_name.lstrip()\n\t\tif remove_right_whitespace:\n\t\t\tnew_name = new_name.rstrip()\n\n\t\treturn new_name", "def ren_mosaic(mosaic_dir='K:/IID_SaltonSea/Tasks/Soil mapping/PhotoDocumentation/Original/', \r\n file_pattern='*stitch.jpg'): \r\n \r\n \r\n if not os.path.exists(mosaic_dir):\r\n sys.exit('input folder does not exist')\r\n \r\n mosaics = []\r\n for root, dirnames, filenames in os.walk(mosaic_dir):\r\n for filename in fnmatch.filter(filenames, file_pattern):\r\n mosaics.append(os.path.join(root, filename).replace('\\\\','/'))\r\n \r\n s = 0\r\n r = 0\r\n for m in mosaics:\r\n dir_name = os.path.dirname(m).split('/')[-1]\r\n new_name = os.path.dirname(m) + '/' + dir_name + '.jpg'\r\n if os.path.exists(new_name):\r\n print('skipping: %s' % m)\r\n s+=1\r\n else:\r\n os.rename(m, new_name)\r\n print('renamed: %s' % new_name)\r\n r+=1\r\n \r\n print('renamed total of %i files' % r)\r\n print('skipped total of %i files' % s)", "def refactor(path: str, files: List):\n skipped = []\n for filename in files:\n try:\n number = get_number_from_name(filename)\n except AttributeError:\n skipped.append(os.path.join(path, filename))\n continue\n new_number = update_number(number)\n\n file_path = os.path.join(path, filename)\n new_file_path = file_path.replace(number, new_number)\n\n with open(file_path, 'r') as file:\n data = file.read()\n data = data.replace(number, new_number)\n with open(file_path, 'w') as file:\n file.write(data)\n\n os.rename(file_path, new_file_path)\n return skipped", "def tidyFileNames(folderToCheck):\n\n filters = list(map(lambda x: \"*.\" + x, expectedExts))\n\n for filter in filters:\n\n for f in getFiles(folderToCheck,filter):\n\n clean = f\n for search in searches:\n clean = replace(clean,search)\n\n if renameFile(f,clean):\n results = list(map(os.path.basename,[f,clean]))\n if results[0] != results[1]:\n print(f\"Renamed: {results[0]} -> {results[1]}\")" ]
[ "0.5564704", "0.547632", "0.54744965", "0.54623824", "0.5450538", "0.53844106", "0.5332848", "0.5256203", "0.5069885", "0.5069885", "0.5069355", "0.505932", "0.50335646", "0.501821", "0.50090104", "0.50069124", "0.4984751", "0.49745655", "0.49716756", "0.49537197", "0.49369138", "0.4934832", "0.4934201", "0.49197713", "0.49176958", "0.49065477", "0.48866716", "0.48603556", "0.48532608", "0.4845391" ]
0.77768743
0
Iterator of the known sessions
def sessions(self): for session_id in self.get_sessions(): session = Session(self.session_cache, self.sid, session_id) yield session
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def iter_sessions():\n return iter(_session_stack)", "def itersessions(self):\n for x in np.unique(self.sessions):\n yield x, self.loc[self.sessions == x, :]", "def _sessions(self):\n return self.__sessions", "def sessions(self):\n return list(Session.get_sessions(self))", "def get_sessions(self):\n\n return self.all_sessions", "def sessions(self):\n return self._sessions", "def get_sessions(self):\n return self.current_sessions", "def safe_session(self) -> Iterator[\"Session\"]:\n with self._lock:\n yield self.session", "def sessions(self):\n logger.debug(\"Get sessions\")\n return self._raw_api.sessions.get()", "def sessions(self):\n return utils.listItems(self, '/status/sessions')", "def active_sessions(self):\n skey = self.r_key('active_sessions')\n sessions_to_expire = []\n for user_id in self.r_server.smembers(skey):\n ukey = self.r_key('session', user_id)\n if self.r_server.exists(ukey):\n yield user_id, self.load_session(user_id)\n else:\n sessions_to_expire.append(user_id)\n\n # clear empty ones\n for user_ids in sessions_to_expire:\n self.r_server.srem(skey, user_id)", "def get_all_sessions(self) -> list:\n sessions = list()\n for stream_id in self.streams.keys():\n tcpsession, session_position, network_tuple = self.streams[stream_id]\n sessions.append(tcpsession.get_session(session_position - 1))\n return sessions", "def sessions(self):\n return self.rpc.compatiblesessions(self.modulename)", "def get_sessions(self, network_tuple: NetworkTuple) -> list:\n tcpsession = self.sessions[network_tuple]\n session_list = tcpsession.get_sessions_list()\n return session_list", "def find_sessions(sfe):\n print(\"-\" * 20 + \" find_sessions started\")\n isessions = sfe.list_iscsisessions()\n json_isessions = isessions.to_json()\n return json_isessions", "def fusion_api_get_active_sessions(self):\n return self.loginsession.get_active_sessions()", "def session_list(self, endpoint_name=None):\n if endpoint_name is None:\n _, body = self.request('/v1.1/endpoint/sessions', 'GET')\n else:\n _, body = self.request('/v1.1/endpoints/%s/sessions' % endpoint_name, 'GET')\n return body", "def list(self):\n return {str(k): v for k, v in self.rpc.call(MsfRpcMethod.SessionList).items()} # Convert int id to str", "def get_sessions_list():\n sessions = Session.query.all()\n result = sessions_schema.dump(sessions).data\n return jsonify({'status': 'success', 'message': None, 'data': result}), 200", "def sessions(self):\n\n return File.session_choices", "def get_active_sessions():\n\n # The output changes based on locales, force it to be YY-MM-DD\n # for the benefit of split()\n os.environ['LANG'] = 'en_GB.utf8'\n try:\n output = subprocess.check_output(['who']).rstrip()\n except subprocess.CalledProcessError:\n print 'UNKNOWN: unable to invoke who'\n sys.exit(NAGIOS_UNKNOWN)\n\n # Nothing to process\n if not output:\n return {}\n\n sessions = {}\n for line in output.split(\"\\n\"):\n fields = line.split()\n sessions[fields[1]] = {\n 'user': fields[0],\n 'date': fields[2],\n 'time': fields[3],\n 'source': fields[4][1:-1] if len(fields) >= 5 else None,\n }\n\n return sessions", "def get_session_ids(self):\n with self._sessions_lock:\n session_ids = self.sessions.keys()\n\n return session_ids", "def _getSessionsBySpeaker(self, request):\n # Ensure that the speaker key is valid and that the speaker exists\n speaker = _getEntityByWebsafeKey(request.websafeSpeakerKey, 'Speaker')\n # Return all of the speaker's sessions\n return ndb.get_multi(speaker.sessions)", "def list_remote_access_sessions(arn=None, nextToken=None):\n pass", "def get_sessions(url: str, token: str) -> List[Session]:\n sessions_url = f'{url}api/sessions'\n response = requests.get(sessions_url, params={'token': token})\n assert(response.status_code == 200)\n sessions_raw = json.loads(response.text)\n sessions = []\n for session_raw in sessions_raw:\n session = Session(\n path = session_raw['path'],\n last_activity = dateutil.parser.isoparse(session_raw['kernel']['last_activity']),\n execution_state = session_raw['kernel']['execution_state']\n )\n assert(session['execution_state'] in valid_execution_states)\n sessions.append(session)\n\n sessions.sort(key=lambda session: session['last_activity'], reverse=True)\n return sessions", "def sessions(self, *args, **kwargs):\r\n return self._get('Sessions', *args, **kwargs)", "def filtered_sessions(self):\n return self.stage.filtered_sessions", "def _getSessionsInWishlist(self):\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n profile = self._getProfileFromUser()\n # Fetch the entities and return them\n return ndb.get_multi(profile.sessionWishlist)", "def get_upcoming_sessions(self):\n return [session for session in self.sessions if not session.is_complete()]", "def sessions(self):\n return SessionManager(self)" ]
[ "0.8693743", "0.7763374", "0.7717324", "0.7705798", "0.7550476", "0.75504535", "0.7429118", "0.74133444", "0.7401458", "0.7400774", "0.7281079", "0.7247437", "0.7214192", "0.69058543", "0.68678856", "0.6845615", "0.6703754", "0.6695977", "0.66274714", "0.6570915", "0.6526649", "0.650701", "0.6485588", "0.6483118", "0.64332014", "0.64130455", "0.64075315", "0.6389735", "0.6354447", "0.6343351" ]
0.81237054
1
Parses and verifies a cookie value
def parse_cookie(self, value): if not value: return None parts = value.split("|") if len(parts) != 3: return None # verify the cookie signature if self.cookie_signature(parts[0], parts[1]) != parts[2]: raise Exception("Invalid cookie signature %r", value) try: return parts[0].strip() except KeyError: return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def parse_cookie(request):\n\n raw_cookie = request.message.get('Cookie','')\n return SimpleCookie(raw_cookie)", "def _parse_cookie(self, request, response, cookie_header_value):\n try:\n # Note to self: This line may print some chars to the console\n return parse_cookie(cookie_header_value)\n except Cookie.CookieError:\n desc = 'The remote Web application sent a cookie with an' \\\n ' incorrect format: \"%s\" that does NOT respect the RFC.'\n desc = desc % cookie_header_value\n\n i = CookieInfo('Invalid cookie', desc, response.id, self.get_name())\n i.set_url(response.get_url())\n i.set_cookie_string(cookie_header_value)\n\n # The cookie is invalid, this is worth mentioning ;)\n kb.kb.append(self, 'invalid-cookies', i)\n return None", "def parse_cookie(name, seed, kaka):\n if not kaka:\n return None\n\n cookie_obj = SimpleCookie(kaka)\n morsel = cookie_obj.get(name)\n\n if morsel:\n parts = morsel.value.split(\"|\")\n if len(parts) != 3:\n return None\n # verify the cookie signature\n sig = cookie_signature(seed, parts[0], parts[1])\n if sig != parts[2]:\n raise SAMLError(\"Invalid cookie signature\")\n\n try:\n return parts[0].strip(), parts[1]\n except KeyError:\n return None\n else:\n return None", "def verify(ctx, config):\n logger = config['logger']\n px_cookie = ctx['_px']\n try:\n if not px_cookie:\n logger.debug('No risk cookie on the request')\n ctx['s2s_call_reason'] = 'no_cookie'\n return False\n\n decrypted_cookie = decrypt_cookie(config['cookie_key'], px_cookie)\n\n if not decrypted_cookie:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n decoded_cookie = json.loads(decrypted_cookie)\n try:\n decoded_cookie['s'], decoded_cookie['s']['b'], decoded_cookie['u'], decoded_cookie['t'], decoded_cookie['v']\n except:\n logger.error('Cookie decryption failed')\n ctx['px_orig_cookie'] = px_cookie\n ctx['s2s_call_reason'] = 'cookie_decryption_failed'\n return False\n\n ctx['risk_score'] = decoded_cookie['s']['b']\n ctx['uuid'] = decoded_cookie.get('u', '')\n ctx['vid'] = decoded_cookie.get('v', '')\n ctx['decoded_cookie'] = decoded_cookie\n\n if decoded_cookie['s']['b'] >= config['blocking_score']:\n ctx['block_reason'] = 'cookie_high_score'\n logger.debug('Cookie with high score: ' + str(ctx['risk_score']))\n return True\n\n if is_cookie_expired(decoded_cookie):\n ctx['s2s_call_reason'] = 'cookie_expired'\n logger.debug('Cookie expired')\n return False\n\n if not is_cookie_valid(decoded_cookie, config['cookie_key'], ctx):\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False\n\n logger.debug('Cookie validation passed with good score: ' + str(ctx['risk_score']))\n return True\n except:\n logger.debug('Cookie validation failed')\n ctx['s2s_call_reason'] = 'cookie_validation_failed'\n return False", "def verify_cookie_hash(h):\n val = h.split('|')[0]\n if make_cookie_hash(val) == h: \n return val\n else:\n return None", "def getCookie(key):", "def parse_cookies( headers ):", "def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)", "def read_secure_cookie(self, name):\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)", "def get(self, name, value=None):\n \n if value is None:\n value = self.request.cookies.get(name, None)\n \n if value is None:\n return None\n \n parts = value.split(\"|\")\n if len(parts) != 3: \n return None\n \n timestamp = int(parts[1])\n if timestamp < time.time() - 31 * 86400:\n logging.warning(\"Expired cookie %r\", value)\n return None\n \n args = (name, parts[0], parts[1])\n signature = _generate_cookie_signature(self._cookie_secret, *args)\n \n if not _time_independent_equals(parts[2], signature):\n logging.warning(\"Invalid cookie signature %r\", value)\n return None\n \n try:\n return base64.b64decode(parts[0])\n except TypeError:\n return None", "def parse_cookie(name, sign_key, kaka, enc_key=None, sign_alg=\"SHA256\"):\n if not kaka:\n return None\n\n parts = cookie_parts(name, kaka)\n\n if parts:\n return ver_dec_content(parts, sign_key, enc_key, sign_alg)\n else:\n return None", "def read_secure_cookie(self, name):\n\n cookie_val = self.request.cookies.get(name)\n return cookie_val and check_secure_val(cookie_val)", "def get_secure_cookie( name, value=None ):", "def get_cookie_value( cookiejar, name ):\n value = None\n for cookie in cookiejar:\n if cookie.name == name:\n value = cookie.value\n break\n return value", "def from_cookie(self):\n try:\n low, high = [int(h, base=10) for h in cookie.split(',')]\n calc = 10**-7 * (high * 2**32 + low) - 11644473600\n dt_obj = dt.utcfromtimestamp(calc)\n self.in_cookie = dt_obj.strftime('%Y-%m-%d %H:%M:%S.%f')\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.in_cookie = False\n return self.in_cookie", "def decrypt_cookie_value(secure_val):\n val = secure_val.split('|')[0]\n if secure_val == encrypt_cookie_value(val):\n return val", "def get_cookie(self, name, value=None):\n try:\n return cherrypy.request.cookie[name].value\n except KeyError:\n return value", "def is_cookie_valid(cookie, cookie_key, ctx):\n user_agent = ctx['user_agent']\n msg = str(cookie['t']) + str(cookie['s']['a']) + str(cookie['s']['b']) + str(cookie['u']) + str(\n cookie['v']) + user_agent\n\n valid_digest = cookie['h']\n try:\n calculated_digest = hmac.new(cookie_key, msg, hashlib.sha256).hexdigest()\n except:\n return False\n\n return valid_digest == calculated_digest", "def auth(self, cookie):\n decode = base64.decodestring(\n cookie.replace(\"_\", \"/\").replace(\"~\", \"=\"))\n signature = decode[:cookie_m._signature_size]\n expires = decode[cookie_m._signature_size:cookie_m._header_size]\n content = decode[cookie_m._header_size:]\n if signature == hmac.new(self.secret, content, sha1).digest():\n if int(expires) > int(cookie_m.make_time(time.time())):\n return content\n else:\n # This is the normal case of an expired cookie; just\n # don't bother doing anything here.\n pass\n else:\n # This case can happen if the server is restarted with a\n # different secret; or if the user's IP address changed\n # due to a proxy. However, it could also be a break-in\n # attempt -- so should it be reported?\n pass", "def get_cookie_value(self, cookie=None, cookie_name=None):\n if cookie_name is None:\n cookie_name = self.default_value[\"name\"]\n\n if cookie is None or cookie_name is None:\n return None\n else:\n try:\n info, timestamp = parse_cookie(\n cookie_name, self.sign_key, cookie, self.enc_key, self.sign_alg\n )\n except (TypeError, AssertionError):\n return None\n else:\n value, _ts, typ = info.split(\"::\")\n if timestamp == _ts:\n return value, _ts, typ\n return None", "def contains_setCookie(content):\n pa = re.compile(\"setCookie\\(\\'(.*?)\\',[\\s\\r\\n]?\\'(.*?)\\'\", re.IGNORECASE)\n match = re.search(pa, content)\n if match != None:\n cookie_text = match.group(1) + '=' + match.group(2)\n debug(cookie_text)\n return cookie_text\n return \"\"", "def verify_cookies(self, device):\n self.assertTrue(device.cookies is not None)", "def authenticate_cookie(self, cookie):\r\n \r\n # read contents if provided a file\r\n if type(cookie) == file: cookie = cookie.read()\r\n \r\n # unlike passwords the cookie contents isn't enclosed by quotes\r\n self.sendAndRecv(\"AUTHENTICATE %s\\r\\n\" % binascii.b2a_hex(cookie))", "def get_cookie( name, default=None ):", "def from_cookie(self):\n reason = \"[!] Internet Explorer Cookie timestamps (txt cookies) consist of 2 integers values. Must be input with a comma between them.\"\n ts_type = self.ts_types['cookie']\n try:\n if not (\",\" in self.cookie) or not (self.cookie.split(\",\")[0].isdigit() and self.cookie.split(\",\")[1].isdigit()):\n self.in_cookie = indiv_output = combined_output = False\n pass\n else:\n low, high = [int(h, base=10) for h in self.cookie.split(',')]\n calc = 10**-7 * (high * 2**32 + low) - 11644473600\n if calc >= 1e+11:\n self.in_cookie = indiv_output = combined_output = False\n pass\n else:\n dt_obj = dt.utcfromtimestamp(calc)\n self.in_cookie = dt_obj.strftime('%Y-%m-%d %H:%M:%S.%f')\n indiv_output = str(\"{} {} UTC\".format(ts_type, self.in_cookie))\n combined_output = str(\"{}{}\\t\\t{} UTC{}\".format(self.left_color, ts_type, self.in_cookie, self.right_color))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.in_cookie = indiv_output = combined_output = False\n return self.in_cookie, indiv_output, combined_output, reason", "def check_xsrf_cookie(self):\n pass", "def get_cookie(self):\n http_cookie = self.get_header('cookie', '')\n return parse_cookie(http_cookie)", "def getCookie(request, arg):\n if not hasattr(request, '_simple_cookie'):\n cookie = request.get_header('Cookie')\n if not cookie:\n return arg.default\n c = Cookie.SimpleCookie()\n c.load(cookie)\n request._simple_cookie = c\n cook_value = request._simple_cookie.get(arg.name)\n if cook_value and cook_value.value:\n return cook_value.value\n return arg.default", "def convert_cookie(cookie_raw):\n cookie = {}\n logging.debug('Raw Cookie: ' + cookie_raw)\n try:\n for i in [i.strip() for i in cookie_raw.split(';')]:\n cookie[i.split('=')[0]] = i.split('=')[1]\n except IndexError:\n #if someone put a ; at the EOF\n pass\n return cookie", "def test_get_user_info_bad_cookie(self):\n cookie_name = 'SinaRot/g/get' # seen in the wild\n cookie_value = 'blah'\n\n http_cookie = '%s=%s' % (cookie_name, cookie_value)\n email, admin, user_id = login.get_user_info(http_cookie,\n cookie_name=cookie_name)\n\n self.assertEqual('', email)\n self.assertFalse(admin)" ]
[ "0.680252", "0.6738563", "0.66854155", "0.66783154", "0.66516906", "0.6612051", "0.65850097", "0.6554575", "0.6554575", "0.65137666", "0.6496006", "0.64937854", "0.64632916", "0.6395168", "0.6386337", "0.63724625", "0.63631266", "0.63494116", "0.632701", "0.6297578", "0.6261526", "0.61120313", "0.60896146", "0.6075474", "0.60009086", "0.59936374", "0.5990683", "0.5960249", "0.59471875", "0.5932209" ]
0.72712016
0
Generates a cookie signature.
def cookie_signature(self, *parts): sha1 = hmac.new(self._secret, digestmod=hashlib.sha1) for part in parts: sha1.update(part) return sha1.hexdigest()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def cookie_signature(seed, *parts):\n sha1 = hmac.new(seed, digestmod=hashlib.sha1)\n for part in parts:\n if part:\n sha1.update(part)\n return sha1.hexdigest()", "def gen_sig():\n return hashlib.md5(\n (\n app.config[\"ROVI_API_KEY\"]\n + app.config[\"ROVI_SHARED_SECRET\"]\n + repr(int(time.time()))\n ).encode(\"utf-8\")\n ).hexdigest()", "def generate_signature(payload):\n gemini_api_secret = get_secret_key()\n t = datetime.now()\n payload[\"nonce\"] = str(int(mktime(t.timetuple())*1000) + get_nonce())\n encoded_payload = dumps(payload).encode()\n b64 = b64encode(encoded_payload)\n signature = new(gemini_api_secret, b64, sha384).hexdigest()\n update_session(\"X-GEMINI-PAYLOAD\", b64)\n update_session(\"X-GEMINI-SIGNATURE\", signature)\n increment_nonce()", "def sign(self, content):\n cookie = base64.encodestring(\n hmac.new(self.secret, content, sha1).digest() +\n cookie_m.make_time(time.time() + 60*self.timeout) +\n content)[:-1]\n cookie = cookie.replace(\"/\", \"_\").replace(\"=\", \"~\").replace(\"\\n\", \"\")\n\n if len(cookie) > self.maxlen:\n raise cookie_m.CookieTooLarge(content, cookie)\n return cookie", "def _build_signature(self):\n sig_contents = \\\n self.payload + \".\" + \\\n b64encode(b\"application/xml\").decode(\"ascii\") + \".\" + \\\n b64encode(b\"base64url\").decode(\"ascii\") + \".\" + \\\n b64encode(b\"RSA-SHA256\").decode(\"ascii\")\n sig_hash = SHA256.new(sig_contents.encode(\"ascii\"))\n cipher = PKCS1_v1_5.new(self.private_key)\n sig = urlsafe_b64encode(cipher.sign(sig_hash))\n key_id = urlsafe_b64encode(bytes(self.author_handle, encoding=\"utf-8\"))\n return sig, key_id", "def _generate_signature(self):\n self.logger.debug(f'body payload {self.body_payload}')\n return hmac.new(self.__decrypted_secret, self.body_payload, hashlib.sha1).hexdigest()", "def make_signature(secret: VersionedSecret, message: str, max_age: datetime.timedelta) -> bytes:\n version = 1\n expiration = int(time.time() + max_age.total_seconds())\n header = _HEADER_FORMAT.pack(version, expiration)\n digest = _compute_digest(secret.current, header, message)\n return base64.urlsafe_b64encode(header + digest)", "def _generate_signature(self, key, msg):\n key = to_bytes(key)\n msg = to_bytes(msg)\n\n hash_obj = hmac.new(key, msg=msg, digestmod=hashlib.sha256)\n digest = hash_obj.digest() # abstract\n\n signature = base64.b64encode(digest) # Signature\n return to_unicode(signature)", "def signature(self, params):\n string = ''.join(key + params[key] for key in sorted(params.keys()))\n return md5(string + self.cfg('secret'))", "def sign(self, msg):\n z = int.from_bytes(helper.hash256(msg), \"big\")\n k = self.deterministic_k(z)\n k_inv = pow(k, N-2, N)\n r = (k*G).x.num\n s = (z + r * self.secret) * k_inv % N\n if s > N/2:\n s = N - s\n\n return Signature(r, s)", "def gen_sig(key, data):\n signature = hmac.new(key.encode('utf-8'), data.encode('utf-8'), hashlib.sha1)\n\n sig = signature.digest()\n # base64 encode\n b64 = base64.b64encode( sig)\n # url encode\n return b64", "def aws_signature(bucket,keypath,expires,secret_access_key=''):\n sign_msg = ('GET\\n\\n\\n'+expires+'\\n' +'/'+bucket+'/'+keypath)\n h = hmac.new(secret_access_key, sign_msg, hashlib.sha1)\n signature = urllib.quote(base64.b64encode(h.digest()))\n return (signature,sign_msg)", "def create_signed_value(self, name, value):\n timestamp = str(int(time.time()))\n value = base64.b64encode(value)\n signature = self._cookie_signature(name, value, timestamp)\n value = \"|\".join([value, timestamp, signature])\n return value", "def generate_signing_keys():\n return SigningKey.generate(curve=SECP256k1)", "def create_signature(self, string_to_sign: str) -> str:\n begin_signature = hmac.new(key=base64.b64decode(self.secret),\n msg=string_to_sign.encode(),\n digestmod=hashlib.sha1)\n end_signature = begin_signature.digest()\n final_signature = base64.b64encode(end_signature).decode()\n return final_signature", "def get_signed(self, **payload):\n param = ''\n for k in payload:\n param += '&' + k + '=' + str(payload[k])\n param = param.lstrip('&')\n signature = hmac.new(self.secret, param, digestmod=hashlib.sha256).hexdigest()\n\n return signature", "def _gen_api_sig(self, endpoint: str) -> str:\n return hmac.new(self._api_secret.encode(),\n endpoint.encode(),\n hashlib.sha512).hexdigest()", "def create_id_nonce_signature(\n cls, *, signature_inputs: TSignatureInputs, private_key: bytes,\n ) -> bytes:\n ...", "def _get_signature(value):\n mySha = hashlib.sha256()\n mySha.update(value)\n # print mySha.hexdigest()\n return mySha.hexdigest()", "def _generate_sas_token(uri, policy, key, expiry=None):\n from base64 import b64encode, b64decode\n from hashlib import sha256\n from hmac import HMAC\n if not expiry:\n expiry = time.time() + 3600 # Default to 1 hour.\n encoded_uri = quote_plus(uri)\n ttl = int(expiry)\n sign_key = '%s\\n%d' % (encoded_uri, ttl)\n signature = b64encode(HMAC(b64decode(key), sign_key.encode('utf-8'), sha256).digest())\n result = {\n 'sr': uri,\n 'sig': signature,\n 'se': str(ttl)}\n if policy:\n result['skn'] = policy\n return 'SharedAccessSignature ' + urlencode(result)", "def _get_signature(self, timestamp: int or str):\n # Key is fixed.\n ha = hmac.new(key=b'd1b964811afb40118a12068ff74a12f4', digestmod=hashlib.sha1)\n grant_type = self.login_data['grant_type']\n client_id = self.login_data['client_id']\n source = self.login_data['source']\n ha.update(bytes((grant_type + client_id + source + str(timestamp)), 'utf-8'))\n return ha.hexdigest()", "def signature(request) -> str:\n return get_test_data(request, __name__, \"signature\", \"r\")", "def sign(self, request, consumer, token):\r\n key, raw = self.signing_base(request, consumer, token)\r\n hashed = hmac.new(key, raw, sha)\r\n # Calculate the digest base 64.\r\n return binascii.b2a_base64(hashed.digest())[:-1]", "def GenSampleSignature(text):\r\n demo_keypair = ('RSA.mVgY8RN6URBTstndvmUUPb4UZTdwvwmddSKE5z_jvKUEK6yk1'\r\n 'u3rrC9yN8k6FilGj9K0eeUPe2hf4Pj-5CmHww=='\r\n '.AQAB'\r\n '.Lgy_yL3hsLBngkFdDw1Jy9TmSRMiH6yihYetQ8jy-jZXdsZXd8V5'\r\n 'ub3kuBHHk4M39i3TduIkcrjcsiWQb77D8Q==')\r\n\r\n signer = SignatureAlgRsaSha256(demo_keypair)\r\n return signer.Sign(text)", "def daily_signature(key, message):\n byte_key = binascii.unhexlify(key)\n message = message.encode()\n return hmac.new(byte_key, message, hashlib.sha256).hexdigest().upper()", "def RSA_SIGNATURE_HASH() :\n return \"SHA-256\"", "def create_signature(auth_scheme, api_key_secret, signing_data, timestamp, nonce):\n if auth_scheme == 'VERACODE-HMAC-SHA-256':\n signature = create_hmac_sha_256_signature(api_key_secret, signing_data, timestamp, nonce)\n else:\n raise UnsupportedAuthSchemeException('Auth scheme {auth_scheme} not supported'.format(auth_scheme=auth_scheme))\n return signature", "def _sign_request(secret, method, url, timestamp, content_hash=None):\n message = f'{timestamp}{url}{method}{content_hash}'\n\n return hmac.new(secret.encode('utf-8'), message.encode('utf-8'), hashlib.sha512).hexdigest()", "def generate_signature(cls, secret, verb, url, nonce, data):\n # Parse the url so we can remove the base and extract just the path.\n parsedURL = urlparse(url)\n path = parsedURL.path\n if parsedURL.query:\n path = path + '?' + parsedURL.query\n\n # print \"Computing HMAC: %s\" % verb + path + str(nonce) + data\n message = verb + path + str(nonce) + data\n\n signature = hmac.new(bytes(secret, 'utf8'), bytes(message, 'utf8'), digestmod=hashlib.sha256).hexdigest()\n return signature", "def compute_signature(msg):\n hashkey = memcache.Client().get('CURL_TEST_SERVER_HASHKEY')\n h = hmac.new(hashkey, msg, hashlib.sha1)\n signature = urllib.quote(base64.b64encode(h.digest()))\n return signature" ]
[ "0.753487", "0.7067199", "0.66266114", "0.6580719", "0.6503767", "0.6412105", "0.6123159", "0.60631573", "0.60524225", "0.6045356", "0.60235345", "0.6020113", "0.5952092", "0.59324956", "0.5925662", "0.5920742", "0.590129", "0.5891545", "0.58810997", "0.58740246", "0.58721316", "0.5863802", "0.5854703", "0.5837636", "0.58364445", "0.579182", "0.57900226", "0.5774454", "0.5763582", "0.57575" ]
0.75094354
1
Transforms a json File containing installations informations into a list of installation objects >>> un = UnSerializerInstallation() >>> un.unSerialize("../test/testInstallation.json") >>> un.collection[0].InsNbPlaceParking '50'
def unSerialize(self): if path == None: path = UnSerializerActivity.path try: with open(path) as data: json_data = json.load(data) for item in json_data["data"]: inst = Installation(item["ComLib"],item["ComInsee"],item["InsCodePostal"],item["InsLieuDit"] ,item["InsNoVoie"],item["InsLibelleVoie"],item["Nb_Equipements"],item["Nb_FicheEquipement"]) self.collection.append(inst) except FileNotFoundError: print("fichier inexistant") except KeyError: print("erreur de clé, clé inéxistante ou mal orthographiée")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def unserialise_installations_json(pathName):\n to_return = []\n\n try:\n with open(pathName) as json_file:\n json_data = json.load(json_file)\n\n for install in json_data[\"data\"]:\n to_return.append(Installations(\n install[\"InsNumeroInstall\"],install[\"ComLib\"],\n install[\"ComInsee\"],install[\"InsCodePostal\"],\n install[\"InsLieuDit\"],install[\"InsNoVoie\"],\n install[\"InsLibelleVoie\"],install[\"Longitude\"],\n install[\"Latitude\"],install[\"InsAccessibiliteAucun\"],\n install[\"InsAccessibiliteHandiMoteur\"],\n install[\"InsAccessibiliteHandiSens\"],\n install[\"InsEmpriseFonciere\"],install[\"InsGardiennee\"],\n install[\"InsMultiCommune\"],install[\"InsNbPlaceParking\"],\n install[\"InsNbPlaceParkingHandi\"],install[\"InsPartLibelle\"],\n install[\"InsTransportMetro\"],install[\"InsTransportBus\"],\n install[\"InsTransportTram\"],install[\"InsTransportTrain\"],\n install[\"InsTransportBateau\"],install[\"InsTransportAutre\"],\n install[\"Nb_Equipements\"],\n install[\"InsDateMaj\"]))\n except FileNotFoundError:\n print(\"bad path of installation json file\")\n except KeyError:\n print(\"bad json file, see documentation of installation for see how is construct this object\")\n \n return to_return", "def loadProducts():\n dump = os.path.dirname(os.path.abspath(__file__)) + \"/dump.json\"\n data = open(dump, 'r')\n for deserialized_object in serializers.deserialize(\"json\", data):\n deserialized_object.save()", "def reload_already_converted_from_json(fname):\n\n converted = json.load(open(fname, 'r'))\n pinfos = []\n #unable_to_parse = []\n n_unable_to_parse = 0\n i = 0\n\n for pinfo_str in converted:\n i += 1\n\n try:\n pinfos.append(depsolver.package.PackageInfo.from_string(str(pinfo_str))) # cleansing unicode bullshit with str() call\n\n except Exception as e:\n print('\\n')\n print('Unable to parse, so skipping failed PackageInfo creation (' + \n str(i) + ' of ' + str(len(converted)) + ': ' + pinfo_str)\n #print('Exception reads: ' + str(e.args))\n #unable_to_parse.append() # TODO: get distkeys here\n n_unable_to_parse += 1\n # continuing\n\n print('Unable to parse ' + str(n_unable_to_parse) + ' out of ' + str(i) +\n ' dists.')\n\n return pinfos", "def load_from_file(cls):\n\n try:\n list_of_ins = []\n with open(cls.__name__ + '.json') as my_file:\n dicts = Base.from_json_string(my_file.read())\n for key in dicts:\n list_of_ins += [cls.create(**key)]\n return (list_of_ins)\n except:\n return ([])", "def load_from_file(cls):\n filename = cls.__name__ + \".json\"\n new_list = []\n if not os.path.isfile(filename):\n return new_list\n with open(filename) as fp:\n json_string = fp.read()\n cls_list = cls.from_json_string(json_string)\n for items in cls_list:\n new_inst = cls.create(**items)\n new_list.append(new_inst)\n return new_list", "def test_deserialize(self):\n with open('tests/small.json', 'r') as fd:\n fc =json.loads(fd.read())\n input_inv = copy.deepcopy(fc)\n inventoryloader = ansible_inventory_manage.inventory.Inventory()\n inventoryloader.load_inventoryjson(fc)\n output_inv = inventoryloader.write_output_json()\n assert input_inv == output_inv", "def load_from_file(cls):\n empty_list = []\n try:\n f = open(cls.__name__ + '.json')\n f.close()\n except FileNotFoundError:\n return empty_list\n\n with open(cls.__name__ + \".json\", 'r') as f:\n new_list = cls.from_json_string(f.read())\n for i in new_list:\n empty_list.append(cls.create(**i))\n return empty_list", "def load_from_file(cls):\n new_list = []\n try:\n with open(\"%s.json\" % cls.__name__, mode='r') as f:\n file = cls.from_json_string(f.read())\n for i in file:\n new_list.append(cls.create(**i))\n except Exception:\n pass\n return new_list", "def load_from_file(cls):\n try:\n with open(cls.__name__ + '.json', 'r') as f:\n jstr = f.read()\n list_d = Base.from_json_string(jstr)\n list_o = []\n for item in list_d:\n list_o.append(cls.create(**item))\n return list_o\n except FileNotFoundError:\n return []", "def deserialize(self):\n with open(os.path.join(self.root_path, self._data_file), 'r') as file:\n data = json.load(file)\n for key, val in data.items():\n self.__dict__[key] = val", "def load_from_file(cls):\n list_obj = []\n if os.path.exists(cls.__name__ + \".json\"):\n with open(cls.__name__ + \".json\", \"r\") as _file:\n str_json = _file.read()\n _file.close()\n _dict = Base.from_json_string(str_json)\n for obj in _dict:\n list_obj.append(cls.create(**obj))\n return(list_obj)", "def load_from_file(cls):\n\n l = []\n if o.exists(cls.__name__ + \".json\"):\n with open(cls.__name__ + \".json\") as f:\n for line in f:\n s = cls.from_json_string(line)\n for d in s:\n l.append(cls.create(**d))\n\n return l", "def read_json():\n try:\n rospack = rospkg.RosPack()\n file_path = rospack.get_path('autonomous') + \"/src/data.txt\"\n with open(file_path) as json_file:\n json_data = json.load(json_file)\n \n new_data = []\n for d in json_data:\n a = Autons(len(new_data))\n a.deserialize_json(d)\n new_data.append(a)\n\n global data\n data = new_data\n except:\n read_json()", "def json2register(self):\n try:\n with open('registered.json', 'r') as file:\n self.final_dicc = json.load(file)\n except (FileNotFoundError, ValueError, json.decoder.JSONDecodeError):\n pass", "def load(self):\n basepath = os.path.dirname(os.path.abspath(__file__))\n filename = os.sep.join([basepath, c.FOLDER_JSON, c.FILE_GAME_VERSIONS])\n Handler.ALL_VERS_DATA = {} # reset known data; do not retain defunct information\n with open(filename, \"r\") as f:\n data = json.loads( f.read() )\n self.update(data)\n self._updated = False\n #for v,record in iteritems(Handler.ALL_VERS_DATA):\n # print(type(v), v)\n #for k,v in iteritems(record): ", "def load_from_file(cls):\n lis = []\n if not os.path.isfile(cls.__name__ + \".json\"):\n return lis\n with open(cls.__name__ + \".json\", encoding=\"utf-8\") as myFile:\n json_str = myFile.read()\n my_dict = cls.from_json_string(json_str)\n for inst in my_dict:\n lis.append(cls.create(**inst))\n return lis", "def test_input_loadjson(self, fname, groups, hosts):\n with open(fname,'r') as fd:\n fcon = json.loads(fd.read())\n inventory = Inventory()\n inventory.load_inventoryjson(fcon)\n assert inventory.count_groups() == len(groups)\n assert inventory.count_hosts() == len(hosts)", "def load_from_file(cls):\n filename = cls.__name__ + \".json\"\n listOfInst = []\n try:\n with open(filename, \"r\") as f:\n listOfInst = cls.from_json_string(f.read())\n for num, val in enumerate(listOfInst):\n listOfInst[num] = cls.create(**listOfInst[num])\n except:\n pass\n return listOfInst", "def load_from_file(cls):\n if path.exists(cls.__name__ + \".json\") is False:\n return []\n with open(cls.__name__ + \".json\", \"r\", encoding='utf-8') as file:\n listofinstances = []\n objectlist = cls.from_json_string(file.read())\n for dict in objectlist:\n objectdict = {}\n for key, value in dict.items():\n objectdict[key] = value\n listofinstances.append(cls.create(**objectdict))\n return listofinstances", "def read(self) -> list:\n try:\n with open(self.__registry_filepath, 'r') as registry_file:\n registry_file.seek(0)\n file_data = registry_file.read()\n if file_data:\n registry_data = json.loads(file_data)\n else:\n registry_data = list() # Existing, but empty registry\n\n except FileNotFoundError:\n raise self.RegistryError(\"No registy at filepath: {}\".format(self.__registry_filepath))\n\n return registry_data", "def load_from_file(cls):\n \"\"\"1.- Create the file name, 2.- if path file no exits return []\n 3.- open the file name, 4.- loop through the file\n 5.- return list of ints\"\"\"\n filename = \"{}.json\".format(cls.__name__)\n if not os.path.exists(filename):\n return []\n list_int = []\n with open(filename, \"r\") as f:\n dicts = cls.from_json_string(f.readline())\n for i in dicts:\n list_int.append(cls.create(**i))\n return list_int", "def read_sku_json(args, verbose=False, dip_home=None):\n with open_sku_stream(args, verbose, dip_home) as sku_output:\n return json.load(sku_output)", "def __init__(self, filename):\n #Opening the file and storing its contents in a list\n with open(filename) as fp:\n self.data = json.load(fp)", "async def _async_load_data(self) -> collection.SerializedStorageCollection | None:\n data = await super()._async_load_data()\n\n if data is None:\n return data\n\n for number in data[\"items\"]:\n number.pop(CONF_INITIAL, None)\n\n return data", "def import_json(self) -> dict:\n with open(self.path, encoding=\"utf8\") as json_file:\n return json.load(json_file)", "def __init__(self):\n with open('info.json') as file:\n self.info = json.load(file)\n file.close()\n self.count = 0", "def _read_jsonl(cls, input_file):\n with open(input_file, 'rb') as f:\n return [json.loads(ln) for ln in f]", "def _read_jsonl(cls, input_file):\n with open(input_file, 'rb') as f:\n return [json.loads(ln) for ln in f]", "def _load(self):\n if os.path.exists(self.path):\n with open(self.path) as src:\n data = json.loads(src.read())\n else:\n data = {\n 'type': 'FeatureCollection',\n 'features': []}\n\n # Must be a FeatureCollection\n assert data['type'] == 'FeatureCollection'\n # All features must have ids, TODO must be unique strings\n assert all(f.get('id') for f in data['features'])\n\n return data", "def readUsers():\n try:\n usersFile = open('../data/users.json', 'rb')\n except IOError:\n usersFile = open('..data/users.json', 'wb')\n\n try:\n f = usersFile.read()\n usersList = jpickle.decode(f)\n except:\n usersList = {}\n usersFile.close()\n return usersList" ]
[ "0.6997117", "0.5583104", "0.5576178", "0.5314316", "0.53104264", "0.529952", "0.52950263", "0.5268388", "0.5228337", "0.508466", "0.504718", "0.5035551", "0.50244963", "0.5004995", "0.49929768", "0.49869066", "0.49772638", "0.4975984", "0.49379867", "0.49078247", "0.48976195", "0.4874021", "0.48460403", "0.48402062", "0.48335087", "0.4828789", "0.4820336", "0.4820336", "0.48116133", "0.48074532" ]
0.7680823
0
Returns the image and its label at the index 'ind' (after applying transformations to the image, if specified).
def __getitem__(self, ind): image = Image.open(os.path.join(self.root_dir,self.image_fns[ind])) #If a transform is specified, apply it if self.transform is not None: image = self.transform(image) # Verify that image is in Tensor format #if type(image) is not torch.Tensor: # image = transform.ToTensor(image) # Convert multi-class label into binary encoding label = self.labels[ind] # Return the image and its label return (image, label)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __getitem__(self, idx):\n img = self.images[idx]\n label = self.labels[idx].split(\" \")[-1]\n img = Image.open(img)\n img = img.convert('RGB')\n img = self.transform(img)\n return(img, label[:-1])", "def __getitem__(self, ind):\n image = np.array([self.data[ind]])\n image = image.squeeze().T\n image = torch.from_numpy(image)\n label = torch.from_numpy(np.array([self.label[ind]]))\n frame_name = self.frame_name[ind]\n \n if self.transform is not None:\n im_tag = {'image': image, 'region_tag': self.reg_tag, 'indices':self.indices}\n transformed = self.transform(im_tag)\n image = transformed['image']\n region_tag = transformed['region_tag'].flatten()\n else:\n region_tag = self.reg_tag.flatten()\n return (image, label, region_tag ,frame_name)", "def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n\n transform_tensor = torchvision.transforms.Compose(\n [torchvision.transforms.ToTensor()]\n )\n\n image = self.images[idx]\n image = transform_tensor(image)\n encoded_label = self.encoder.transform([self.labels[idx]]).ravel()\n\n return (image, encoded_label)", "def __getitem__(self, idx):\n\n image = Image.open(self.filenames[idx])\n image = self.transform(image)\n\n return image, self.labels[idx]", "def __getitem__(self, idx):\n\n img = self.images[idx]\n label = self.labels[idx]\n\n return img, label", "def __getitem__(self, index):\n X = Image.fromarray(self.images[index])\n y = self.labels[index]\n\n if self.transform is not None:\n X = self.transform(X)\n return X, y", "def __getitem__(self, idx):\n im = Image.open(self.data_path + self.sample_df.loc[idx,'filename'])\n mask = Image.open(self.data_path + self.sample_df.loc[idx,'mask_filename'])\n semi_label = torch.tensor(self.sample_df.loc[idx,'semi_label'])\n\n im1, _ = self.transform(im, mask)\n im2, _ = self.transform(im, mask)\n\n return im1, im2, semi_label, idx", "def __getitem__(self, idx):\n\t\tsample = self.samples[idx]\n\t\tfrom PIL import Image\n\t\timage = Image.open(self.DatasetWrapper.features(sample))\n\t\t\n\t\tlabel = self.DatasetWrapper.label(sample)\n\t\timage = self.transformer(image)\n\t\treturn image, label", "def __getitem__(self, idx):\n if torch.is_tensor(idx):\n idx = idx.tolist()\n\n transform_tensor = torchvision.transforms.Compose(\n [torchvision.transforms.ToTensor()]\n )\n\n path, label = self.image_paths[idx]\n image = Image.open(path).convert(\"RGB\")\n image, box = self.transform(image)\n image = transform_tensor(image)\n area = (box[3] - box[1]) * (box[2] - box[0])\n encoded_label = self.encoder.transform(label) # .toarray()\n\n return (image, encoded_label)", "def __getitem__(self, index):\r\n path = self.paths[index] # make sure index is within then range\r\n img = Image.open(os.path.join(self.data_dir, path)).convert('RGB') \r\n width = img.size[0]\r\n # apply image transformation\r\n params = get_params(self.opt, img.size)\r\n transform=get_transform(self.opt, params=params, grayscale=(self.input_nc == 1))\r\n im = transform(img)\r\n \r\n label = torch.from_numpy(np.array(self.labels[index]))\r\n label_len = torch.from_numpy(np.array(self.label_lens[index]))\r\n\r\n return im, label, label_len, width", "def __getitem__(self, idx):\n image_path = self.image_paths[idx]\n \n lookup = image_path.split(\"/\")[-1].split(\".\")[0]\n \n image = Image.open(image_path)\n #y = self.name_to_label[idx]\n y = self.fish_dict[lookup]\n X = self.transform(image)\n return X,y", "def __getitem__(self, idx):\r\n if torch.is_tensor(idx):\r\n idx = idx.tolist()\r\n \r\n # galaxy ID\r\n galaxyid = self.labels_df.iloc[idx, 0].astype(str)\r\n\t\t# path of the image\r\n image_path = os.path.join(self.images_dir, galaxyid + '.jpg')\r\n\t\t# read the image\r\n image = Image.open(image_path)\r\n\t\t# apply transform (optional)\r\n if self.transform is not None:\r\n image = self.transform(image)\r\n\t\t# read the true label\r\n label = int(self.labels_df.iloc[idx, 1])\r\n\r\n return image, label", "def get_example(self, i):\n img = read_image(self.img_paths[i])\n label_orig = read_image(\n self.label_paths[i], dtype=np.int32, color=False)[0]\n if self.ignore_labels:\n label_out = np.ones(label_orig.shape, dtype=np.int32) * -1\n for label in cityscapes_labels:\n if not label.ignoreInEval:\n label_out[label_orig == label.id] = label.trainId\n else:\n label_out = label_orig\n return img, label_out", "def __getitem__(self, idx):\n im = Image.open(self.data_path + self.sample_df.loc[idx,'filename'])\n # load label\n label = torch.tensor(self.sample_df.loc[idx,'abnormal_XR'])\n # load mask\n if self.load_mask:\n mask = Image.open(self.data_path + self.sample_df.loc[idx,'mask_filename'])\n else:\n mask = None\n\n # load semi-label\n if self.load_semilabels:\n semi_label = torch.tensor(self.sample_df.loc[idx, 'semi_label'])\n else:\n semi_label = None\n\n im, mask = self.transform(im, mask)\n\n return im, label, mask, semi_label, torch.tensor(idx)", "def __getitem__(self, index):\n dataset = self.train_dataset if self.mode == 'train' else self.test_dataset\n filename, label = dataset[index]\n image = Image.open(os.path.join(self.image_dir, filename))\n return self.transform(image), torch.FloatTensor(label)", "def __getitem__(self, index):\n img = Image.open(os.path.join(self.img_path, self.imgs[index][0]))\n label = float(self.imgs[index][1]) if self._fine_tune or self._test else int(float(self.imgs[index][1])) - 1\n return self.preproc(img), torch.tensor(label)", "def __getitem__(self, index):\n # Data augment hasn't been applied.\n\n image = cv2.imread(os.path.join(self.images_list[index])) # X, Y, 3\n image = cv2.resize(image, (480, 480), cv2.INTER_CUBIC)\n label = cv2.imread(os.path.join(self.labels_list[index]), 0)\n label = cv2.resize(label, (480, 480), cv2.INTER_NEAREST)\n\n if not self.all_label:\n label[label == 2] = 0\n label[label == 3] = 0\n label[label == 4] = 2\n # ipdb.set_trace()\n # TODO: if apply transform to image, how about the label?\n # The same operation should do both to image and label\n # There is no need to do transpose, just pass it to transform\n sample = {'image': image, 'label': label}\n\n if self.transforms is not None:\n sample = self.transforms(sample)\n else:\n sample[\"image\"] = np.transpose(sample[\"image\"], axes=[2, 0, 1])\n return sample", "def __getitem__(self, idx):\n ImageFile.LOAD_TRUNCATED_IMAGES = True\n image = Image.open(self.filenames[idx]) # PIL image\n width, height = image.size\n if width<IMAGE_SIZE or height<IMAGE_SIZE:\n image = image.resize((IMAGE_SIZE+50, IMAGE_SIZE+50))\n \n image = image.convert('RGB')\n tensor = self.transform(image)\n sample = (tensor, self.labels[idx])\n return sample", "def __getitem__(self, index):\n dataset= self.dataset\n filename, label = dataset[index]\n \n path=os.path.join(self.image_dir, filename)\n if path not in self.img_cache:\n image = Image.open(path)\n image.load()\n self.img_cache[path]=image\n else:\n image=self.img_cache[path]\n \n \n encoded_lab=torch.zeros(len(self.domains), dtype=torch.float32)\n encoded_lab[label]=1\n #image=self.hsv_color_change(image,0.5)\n #im.save(self.image_dir+\"/testimg.jpg\")\n #image.save(self.image_dir+\"/testimg2.jpg\")\n return self.transform(image), encoded_lab", "def __getitem__(self, index):\n if self.train:\n img, target = self.train_data[index], self.train_labels[index]\n else:\n img, target = self.test_data[index], self.test_labels[index]\n\n # doing this so that it is consistent with all other datasets\n # to return a PIL Image\n img = Image.fromarray(img)\n\n if self.transform is not None:\n img = self.transform(img)\n\n if self.target_transform is not None:\n target = self.target_transform(target)\n\n return img, target, index # only line changed", "def __getitem__(self, index: int):\n data, label = self.images[index], self.labels[index]\n data = self._apply_transform(data)\n\n return {\"data\": data, \"target\": label}", "def _norm_input_labels_index(image, label_image=None, index=None):\n\n image = dask.array.asarray(image)\n\n if label_image is None:\n label_image = dask.array.ones(\n image.shape, dtype=int, chunks=image.chunks,\n )\n index = dask.array.ones(tuple(), dtype=int, chunks=tuple())\n elif index is None:\n label_image = (label_image > 0).astype(int)\n index = dask.array.ones(tuple(), dtype=int, chunks=tuple())\n\n label_image = dask.array.asarray(label_image)\n index = dask.array.asarray(index)\n\n if index.ndim > 1:\n warnings.warn(\n \"Having index with dimensionality greater than 1 is undefined.\",\n FutureWarning\n )\n\n if image.shape != label_image.shape:\n raise ValueError(\n \"The image and label_image arrays must be the same shape.\"\n )\n\n return (image, label_image, index)", "def __getitem__(self, index):\r\n if self.is_train:\r\n image_names,labels = self.sample_train_batch()\r\n # get sampled order image_file names and corresponding label\r\n image_list,label_list=[],[]\r\n for img,label in zip(image_names,labels):\r\n image = imread(img,flag=1,to_rgb=True)\r\n x,y,w,h = self.boxes[img]\r\n image = image[y:min(y+h,image.shape[0]),x:min(x+w,image.shape[1])]\r\n if image.shape[2]==1:\r\n print(\"has gray file\",img)\r\n image = nd.tile(image,(1,1,3))\r\n image =self._transform(image) # for rgb same value\r\n image_list.append(image)\r\n label_list.append(label)\r\n batch_data = nd.stack(*image_list,axis=0)\r\n batch_label = nd.array(label_list)\r\n return batch_data,batch_label\r\n else:\r\n img = self.test_images_files[index] # get the file name full path\r\n image = imread(img,flag=1,to_rgb=1)\r\n x,y,w,h = self.boxes[img]\r\n image = image[y:min(y+h,image.shape[0]),x:min(x+w,image.shape[1])]\r\n image = self._transform(image)\r\n\r\n return image,self.test_labels[index]", "def display_image_with_label(index):\n\n image = df_train.ix[index, :].values\n label = labels.values[index]\n\n plt.axis('off')\n plt.imshow(image.reshape(image_width, image_height), cmap=cm.binary)\n print(\"It is a {}\".format(label))", "def __getitem__(self, idx):\n\n image = self.preprocessor.resize_image(cv.imread(self.samples[idx][0], cv.IMREAD_GRAYSCALE), self.image_size)\n gt_text = self.samples[idx][1]\n return image, gt_text", "def see_image(self, idx, show=True):\n true_label = self.true_targets[idx]\n img, label, _ = self.__getitem__(idx) # img has channel as 1st dim\n img = np.transpose(img.numpy(), (1, 2, 0)) # channel as last dim\n if show:\n plt.imshow(img)\n plt.title(f\"Label: {self.classes_labels[true_label]}\")\n plt.show()\n else:\n return img, label, true_label", "def label_from_index(self, index):\n assert self.labels is not None, \"Labels not processed\"\n #return self.labels[index, :, :]\n return self.labels[index]", "def _get_imganno(self, idx):\n raise NotImplementedError", "def __getitem__(self, idx):\n img_path = self.img_labels.iloc[idx, 0]\n mask_path = self.img_labels.iloc[idx, 1]\n\n image = _load_img(img_path)\n image = np.array(image)\n image = torch.from_numpy(image)\n \n mask = _load_img(mask_path)\n mask = np.array(mask)\n mask = torch.from_numpy(mask)\n \n sample = (image, mask)\n\n return sample", "def __getitem__(self, i):\n image = Image.open(self.images[i]).convert('RGB')\n target = Image.open(self.targets[i]).convert('L')\n if self.transform is not None:\n image, target = self.transform(image, target)\n return image, target" ]
[ "0.7585008", "0.7584966", "0.73006594", "0.70862544", "0.7072327", "0.68898976", "0.68424433", "0.67992413", "0.6772978", "0.6754025", "0.6753745", "0.6637847", "0.66045636", "0.65445894", "0.6364122", "0.6302416", "0.62756693", "0.6274071", "0.62737787", "0.62412", "0.6193601", "0.6179664", "0.61604404", "0.6125118", "0.61196035", "0.6118668", "0.61075944", "0.6104224", "0.6075224", "0.60566723" ]
0.77679193
0
Test EventStreams with url from site.
def test_url_from_site(self, key): site = self.get_site(key) streams = 'recentchange' e = EventStreams(site=site, streams=streams) self.assertEqual( e._url, 'https://stream.wikimedia.org/v2/stream/' + streams) self.assertEqual(e._url, e.url) self.assertEqual(e._url, e.sse_kwargs.get('url')) self.assertIsNone(e._total) self.assertEqual(e._streams, streams) site_repr = f'site={repr(site)}, ' if site != Site() else '' self.assertEqual(repr(e), "EventStreams({}streams='{}')" .format(site_repr, streams))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_url_with_streams(self):\n streams = 'recentchange'\n e = EventStreams(streams=streams)\n self.assertEqual(\n e._url, 'https://stream.wikimedia.org/v2/stream/' + streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertEqual(e._streams, streams)", "def test_url_missing_streams(self):\n with self.assertRaises(NotImplementedError):\n EventStreams()", "def test_multiple_streams(self):\n streams = ('page-create', 'page-move', 'page-delete')\n e = EventStreams(streams=streams)\n combined_streams = ','.join(streams)\n self.assertEqual(\n e._url,\n 'https://stream.wikimedia.org/v2/stream/' + combined_streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertEqual(e._streams, combined_streams)", "def test_events(self):\n\n response = self.client.get(reverse('events'))\n\n assert response.status_code == 200", "def test_url_parameter(self, key):\n e = EventStreams(url=self.sites[key]['hostname'])\n self.assertEqual(e._url, self.sites[key]['hostname'])\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertIsNone(e._streams)\n self.assertEqual(repr(e),\n \"EventStreams(url='{}')\"\n .format(self.sites[key]['hostname']))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_event_page(self):\n res = self.client.get('/events')\n data = res.data.decode('utf-8')\n assert res.status == '200 OK'\n assert 'Upcoming Events' in data", "def test_stream_publish(self):\n pass", "def test_events(self):\n\n resp = self.client.get('/events?page=1&user_categories=113%2C105%2C104 ')\n self.assertTrue('next_events_url' in resp.context)\n self.assertTrue('previous_events_url' in resp.context)\n self.assertTrue('events_list' in resp.context)\n self.assertTrue('previous' in resp.context)\n self.assertTrue('next' in resp.context)\n self.assertEqual(resp.status_code, 200)", "def test_finds_live_stream(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(hour=(now.hour - 1)),\n 'ends_on': now.replace(hour=(now.hour + 1)),\n 'title': 'Live Stream',\n 'added_on': now\n },\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n\n self.assertEqual(response.status_code, 200)\n self.assertTrue(response.context['live_stream'])\n self.assertEqual(response.context['live_stream'].title, 'Live Stream')", "def streamTest():\n timer = StoreTimer(store, duration=2.0)\n bottle.response.set_header('Content-Type', 'text/event-stream') #text\n bottle.response.set_header('Cache-Control', 'no-cache')\n # Set client-side auto-reconnect timeout, ms.\n yield 'retry: 1000\\n\\n'\n i = 0\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: START\\n\\n'\n n = 1\n while not timer.expired:\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: {0}\\n\\n'.format(n)\n n += 1\n yield \"data: END\\n\\n\"", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_get_stream(self):\n pass", "def scrape_events(path, urls):\n seen_ids = set()\n result = []\n for url in urls:\n # Get all of the Network requests being sent out\n print(f'Processing {url}')\n driver.get(url)\n browser_log = driver.get_log('performance') \n events = [process_browser_log_entry(entry) for entry in browser_log]\n results = []\n # Find the Network request that sends a GET request to EventBrite API\n for event in events:\n if event['method'] == 'Network.responseReceived':\n # print(event)\n if 'event_ids' in event['params']['response']['url']:\n results.append(event)\n # Get the GET request URL\n get_url = \"\"\n # TODO: Sometimes returning 0 or more than 1... I'm not sure why :(\n if len(results) >= 1:\n get_url = results[0]['params']['response']['url']\n # Get the GET request response JSON\n json_response = get_request(get_url)\n event_list = json_response['events']\n # Find unique events in the response JSON \n unique_event_list = []\n for event in event_list:\n if event['id'] not in seen_ids:\n seen_ids.add(event['id'])\n unique_event_list.append(event)\n parsed_events = parse_event_page(unique_event_list)\n result.extend(parsed_events)\n else:\n print(results)\n print('yikes something went wrong')\n\n driver.close()\n return result\n # save_events(path, result)", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_query_events(self):\n query_list = {\n 'q': 'test',\n 'type': 'show'\n }\n results = query_events(query_list)\n events = list(results['events'])\n showcase = list(results['showcase_events'])\n self.assertTrue(self.event_show1 in events)\n self.assertTrue(self.event_show2 in showcase)\n self.assertFalse(self.event_film in events)", "def test_get_events(self):\n\n request_params = {\n \"token\": EVENTBRITE_API_KEY,\n \"location.latitude\": \"37.4192008972\",\n \"location.longitude\": \"-122.057403564\",\n \"location.within\": \"20mi\",\n \"sort_by\": \"date\"\n }\n url_encoded_request_params = _update_urlencode_request_params(\"103,109\", 1, request_params)\n events_list, page_count = _get_events(url_encoded_request_params)\n self.assertTrue(type(events_list) is list)\n self.assertTrue(type(page_count) is int)", "def test_meeting_live_stream_update(self):\n pass", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_stream(self):\n with skipping(NotImplementedError):\n self.es = EventStreamsTestClass(streams='recentchange')\n limit = 50\n self.es.set_maximum_items(limit)\n self.assertLength(list(self.es), limit)", "def test_404_url():\n def not_found(request):\n request.send_error(404)\n\n with test_server(handler=not_found, methods=(\"post\", \"get\"),\n port=\"random\") as server:\n stream = TweetStream(\"foo\", \"bar\", url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = FollowStream(\"foo\", \"bar\", [1, 2, 3], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = TrackStream(\"foo\", \"bar\", [\"opera\"], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)", "def test_data_source_soaps_change_stream_get(self):\n pass", "async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None:\n aioclient_mock.get(\"http://127.0.0.1/app/entrypoint.js\")\n await hassio_client.get(\"/api/hassio/app/entrypoint.js\", data=\"test\")\n assert isinstance(aioclient_mock.mock_calls[-1][2], StreamReader)", "def test_urls(self):\n # Points to the XML at the moment because the UFT HTML report isn't stored at a predictable location.\n self.assertEqual(['http://server/uft/uft.xml'],\n self.__uft.metric_source_urls('http://server/uft/uft.xml'))", "def test_api_predictor_events_get(self):\n pass", "def test_scraper(self):\n scrap = ScraperModule(url=self.anime_link, host=\"mixdrop\")\n pages = scrap.pages\n print(f\"pages: {pages}\")\n get_pages = scrap.get_pages()\n first_link = scrap.get_links(pages[0])\n print(first_link)\n episodes = scrap.get_episodes()\n print(episodes)", "def test_upcoming_events(self, client, site, homepage, events):\n response = client.get(homepage.relative_url(site))\n\n # should have link to event list\n assertContains(response, reverse(\"events:upcoming\"))\n\n # only one event in context, since others already happened\n assert len(response.context[\"events\"]) == 1\n assert events[\"workshop\"] not in response.context[\"events\"]\n assert events[\"lecture\"] not in response.context[\"events\"]\n\n # shows event title, start/end time in local tz, and link to view\n est = zoneinfo.ZoneInfo(\"America/New_York\")\n assertContains(response, events[\"deadline\"].get_url())\n assertContains(response, events[\"deadline\"].title)\n assertContains(\n response,\n format(\n events[\"deadline\"].start_time.astimezone(est),\n \"F j\",\n ),\n )\n\n # shouldn't show if not published\n events[\"deadline\"].unpublish()\n response = client.get(homepage.relative_url(site))\n assert events[\"deadline\"] not in response.context[\"events\"]", "def test_splits_streams(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year + 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Future Stream',\n 'added_on': now\n },\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year - 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Previous Stream',\n 'added_on': now\n }\n\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n\n future_streams = response.context['future_streams']\n previous_streams = response.context['previous_streams']\n\n self.assertTrue(len(future_streams))\n self.assertTrue(len(previous_streams))\n self.assertEqual(future_streams[0].title, 'Future Stream')\n self.assertEqual(previous_streams[0].title, 'Previous Stream')" ]
[ "0.7481012", "0.7062626", "0.6835958", "0.6751704", "0.6703934", "0.64298284", "0.63931566", "0.63698363", "0.6357172", "0.6342528", "0.6255347", "0.61820084", "0.61741704", "0.6110761", "0.6060046", "0.6060046", "0.6050001", "0.6006673", "0.5970618", "0.5941284", "0.59273326", "0.58726144", "0.5866608", "0.5841816", "0.5801505", "0.5800473", "0.5799239", "0.57918924", "0.5758493", "0.57432735" ]
0.73900414
1
Test EventStreams with url from default site.
def test_url_with_streams(self): streams = 'recentchange' e = EventStreams(streams=streams) self.assertEqual( e._url, 'https://stream.wikimedia.org/v2/stream/' + streams) self.assertEqual(e._url, e.url) self.assertEqual(e._url, e.sse_kwargs.get('url')) self.assertIsNone(e._total) self.assertEqual(e._streams, streams)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_url_missing_streams(self):\n with self.assertRaises(NotImplementedError):\n EventStreams()", "def test_url_from_site(self, key):\n site = self.get_site(key)\n streams = 'recentchange'\n e = EventStreams(site=site, streams=streams)\n self.assertEqual(\n e._url, 'https://stream.wikimedia.org/v2/stream/' + streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertEqual(e._streams, streams)\n site_repr = f'site={repr(site)}, ' if site != Site() else ''\n self.assertEqual(repr(e),\n \"EventStreams({}streams='{}')\"\n .format(site_repr, streams))", "def test_events(self):\n\n response = self.client.get(reverse('events'))\n\n assert response.status_code == 200", "def test_multiple_streams(self):\n streams = ('page-create', 'page-move', 'page-delete')\n e = EventStreams(streams=streams)\n combined_streams = ','.join(streams)\n self.assertEqual(\n e._url,\n 'https://stream.wikimedia.org/v2/stream/' + combined_streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertEqual(e._streams, combined_streams)", "def test_stream_publish(self):\n pass", "def test_url_parameter(self, key):\n e = EventStreams(url=self.sites[key]['hostname'])\n self.assertEqual(e._url, self.sites[key]['hostname'])\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertIsNone(e._streams)\n self.assertEqual(repr(e),\n \"EventStreams(url='{}')\"\n .format(self.sites[key]['hostname']))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def test_get_stream(self):\n pass", "def streamTest():\n timer = StoreTimer(store, duration=2.0)\n bottle.response.set_header('Content-Type', 'text/event-stream') #text\n bottle.response.set_header('Cache-Control', 'no-cache')\n # Set client-side auto-reconnect timeout, ms.\n yield 'retry: 1000\\n\\n'\n i = 0\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: START\\n\\n'\n n = 1\n while not timer.expired:\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: {0}\\n\\n'.format(n)\n n += 1\n yield \"data: END\\n\\n\"", "def test_finds_live_stream(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(hour=(now.hour - 1)),\n 'ends_on': now.replace(hour=(now.hour + 1)),\n 'title': 'Live Stream',\n 'added_on': now\n },\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n\n self.assertEqual(response.status_code, 200)\n self.assertTrue(response.context['live_stream'])\n self.assertEqual(response.context['live_stream'].title, 'Live Stream')", "def test_event_page(self):\n res = self.client.get('/events')\n data = res.data.decode('utf-8')\n assert res.status == '200 OK'\n assert 'Upcoming Events' in data", "def test_events(self):\n\n resp = self.client.get('/events?page=1&user_categories=113%2C105%2C104 ')\n self.assertTrue('next_events_url' in resp.context)\n self.assertTrue('previous_events_url' in resp.context)\n self.assertTrue('events_list' in resp.context)\n self.assertTrue('previous' in resp.context)\n self.assertTrue('next' in resp.context)\n self.assertEqual(resp.status_code, 200)", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_404_url():\n def not_found(request):\n request.send_error(404)\n\n with test_server(handler=not_found, methods=(\"post\", \"get\"),\n port=\"random\") as server:\n stream = TweetStream(\"foo\", \"bar\", url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = FollowStream(\"foo\", \"bar\", [1, 2, 3], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = TrackStream(\"foo\", \"bar\", [\"opera\"], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)", "def test_register_stream(self):\n pass", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_stream(self):\n with skipping(NotImplementedError):\n self.es = EventStreamsTestClass(streams='recentchange')\n limit = 50\n self.es.set_maximum_items(limit)\n self.assertLength(list(self.es), limit)", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_meeting_live_stream_update(self):\n pass", "def test_data_source_soaps_change_stream_get(self):\n pass", "def test_api_predictor_events_get(self):\n pass", "async def test_stream(hassio_client, aioclient_mock: AiohttpClientMocker) -> None:\n aioclient_mock.get(\"http://127.0.0.1/app/entrypoint.js\")\n await hassio_client.get(\"/api/hassio/app/entrypoint.js\", data=\"test\")\n assert isinstance(aioclient_mock.mock_calls[-1][2], StreamReader)", "def test_query_events(self):\n query_list = {\n 'q': 'test',\n 'type': 'show'\n }\n results = query_events(query_list)\n events = list(results['events'])\n showcase = list(results['showcase_events'])\n self.assertTrue(self.event_show1 in events)\n self.assertTrue(self.event_show2 in showcase)\n self.assertFalse(self.event_film in events)", "def test_future_event(self):\n pass", "def test_get_events(self):\n\n request_params = {\n \"token\": EVENTBRITE_API_KEY,\n \"location.latitude\": \"37.4192008972\",\n \"location.longitude\": \"-122.057403564\",\n \"location.within\": \"20mi\",\n \"sort_by\": \"date\"\n }\n url_encoded_request_params = _update_urlencode_request_params(\"103,109\", 1, request_params)\n events_list, page_count = _get_events(url_encoded_request_params)\n self.assertTrue(type(events_list) is list)\n self.assertTrue(type(page_count) is int)", "def test_urls(self):\n # Points to the XML at the moment because the UFT HTML report isn't stored at a predictable location.\n self.assertEqual(['http://server/uft/uft.xml'],\n self.__uft.metric_source_urls('http://server/uft/uft.xml'))", "def test_upcoming_events(self, client, site, homepage, events):\n response = client.get(homepage.relative_url(site))\n\n # should have link to event list\n assertContains(response, reverse(\"events:upcoming\"))\n\n # only one event in context, since others already happened\n assert len(response.context[\"events\"]) == 1\n assert events[\"workshop\"] not in response.context[\"events\"]\n assert events[\"lecture\"] not in response.context[\"events\"]\n\n # shows event title, start/end time in local tz, and link to view\n est = zoneinfo.ZoneInfo(\"America/New_York\")\n assertContains(response, events[\"deadline\"].get_url())\n assertContains(response, events[\"deadline\"].title)\n assertContains(\n response,\n format(\n events[\"deadline\"].start_time.astimezone(est),\n \"F j\",\n ),\n )\n\n # shouldn't show if not published\n events[\"deadline\"].unpublish()\n response = client.get(homepage.relative_url(site))\n assert events[\"deadline\"] not in response.context[\"events\"]", "def test_data_source_soaps_change_stream_post(self):\n pass" ]
[ "0.7300169", "0.69946647", "0.68183845", "0.68033665", "0.65966797", "0.657555", "0.65181506", "0.6353719", "0.6353719", "0.6330753", "0.62743527", "0.62625444", "0.6214412", "0.61807495", "0.61765236", "0.6149776", "0.59979254", "0.5985642", "0.5975823", "0.5968839", "0.5920416", "0.5890861", "0.5888999", "0.5877221", "0.57602596", "0.5754743", "0.5751195", "0.57428133", "0.57402813", "0.5707055" ]
0.74042183
0
Test EventStreams with multiple streams.
def test_multiple_streams(self): streams = ('page-create', 'page-move', 'page-delete') e = EventStreams(streams=streams) combined_streams = ','.join(streams) self.assertEqual( e._url, 'https://stream.wikimedia.org/v2/stream/' + combined_streams) self.assertEqual(e._url, e.url) self.assertEqual(e._url, e.sse_kwargs.get('url')) self.assertEqual(e._streams, combined_streams)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_multiple_streams(self, dummy_streamers, dummy_receivers):\n dummy_ids = [source_id for _, _, source_id, _ in dummy_streamers]\n source_ids = [receiver._source_id\n for _, receiver in dummy_receivers.items()]\n assert set(source_ids) == set(dummy_ids)", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_streaming(self, dummy_streamers, dummy_receivers):\n for dummy, device, source_id, subscriptions in dummy_streamers:\n receiver = dummy_receivers[source_id]\n # basic thread behaviour (start on `receiver.start()`)\n for thread in receiver._threads.values():\n assert not thread.is_alive()\n receiver.start()\n for thread in receiver._threads.values():\n assert thread.is_alive()\n\n # TODO: compare data (use pre-defined data)\n\n # NOTE: some threads may take a while to stop,\n # not sure how to assert this properly\n receiver.stop()\n #for thread in receiver._threads.values():\n # assert not thread.is_alive()", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_url_with_streams(self):\n streams = 'recentchange'\n e = EventStreams(streams=streams)\n self.assertEqual(\n e._url, 'https://stream.wikimedia.org/v2/stream/' + streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertEqual(e._streams, streams)", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_stream_publish(self):\n pass", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def testDataStreams(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS,\n identifier=self._IDENTIFIER_ANOTHER_FILE,\n location='/a_directory/another_file',\n parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n self.assertEqual(file_entry.number_of_data_streams, 1)\n\n data_stream_names = []\n for data_stream in file_entry.data_streams:\n data_stream_names.append(data_stream.name)\n\n self.assertEqual(data_stream_names, [''])\n\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS, identifier=self._IDENTIFIER_A_DIRECTORY,\n location='/a_directory', parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n self.assertEqual(file_entry.number_of_data_streams, 0)\n\n data_stream_names = []\n for data_stream in file_entry.data_streams:\n data_stream_names.append(data_stream.name)\n\n self.assertEqual(data_stream_names, [])\n\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS, identifier=25,\n location='/a_directory/a_resourcefork', parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n self.assertEqual(file_entry.number_of_data_streams, 2)\n\n data_stream_names = []\n for data_stream in file_entry.data_streams:\n data_stream_names.append(data_stream.name)\n\n self.assertEqual(data_stream_names, ['', 'rsrc'])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_stream(self):\n with skipping(NotImplementedError):\n self.es = EventStreamsTestClass(streams='recentchange')\n limit = 50\n self.es.set_maximum_items(limit)\n self.assertLength(list(self.es), limit)", "def testGetDataStreams(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS,\n identifier=self._IDENTIFIER_ANOTHER_FILE,\n location='/a_directory/another_file',\n parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n data_streams = file_entry._GetDataStreams()\n self.assertEqual(len(data_streams), 1)\n\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_HFS, identifier=25,\n location='/a_directory/a_resourcefork', parent=self._raw_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n data_streams = file_entry._GetDataStreams()\n self.assertEqual(len(data_streams), 2)", "def test_subscribe_many_listeners(self):\n def listener():\n pass\n\n def listener1():\n pass\n\n def listener2():\n pass\n\n EVENT_MANAGER.subscribe('test_listeners', listener, listener1, listener2)\n\n self.assertIn(listener, EVENT_MANAGER._listeners['test_listeners'])\n self.assertIn(listener1, EVENT_MANAGER._listeners['test_listeners'])\n self.assertIn(listener2, EVENT_MANAGER._listeners['test_listeners'])", "def test_splits_streams(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year + 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Future Stream',\n 'added_on': now\n },\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year - 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Previous Stream',\n 'added_on': now\n }\n\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n\n future_streams = response.context['future_streams']\n previous_streams = response.context['previous_streams']\n\n self.assertTrue(len(future_streams))\n self.assertTrue(len(previous_streams))\n self.assertEqual(future_streams[0].title, 'Future Stream')\n self.assertEqual(previous_streams[0].title, 'Previous Stream')", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def testDataStreams(self):\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_CS, parent=self._gpt_path_spec,\n volume_index=0)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n self.assertEqual(file_entry.number_of_data_streams, 1)\n\n data_stream_names = []\n for data_stream in file_entry.data_streams:\n data_stream_names.append(data_stream.name)\n\n self.assertEqual(data_stream_names, [''])\n\n path_spec = path_spec_factory.Factory.NewPathSpec(\n definitions.TYPE_INDICATOR_CS, location='/',\n parent=self._gpt_path_spec)\n file_entry = self._file_system.GetFileEntryByPathSpec(path_spec)\n self.assertIsNotNone(file_entry)\n\n self.assertEqual(file_entry.number_of_data_streams, 0)\n\n data_stream_names = []\n for data_stream in file_entry.data_streams:\n data_stream_names.append(data_stream.name)\n\n self.assertEqual(data_stream_names, [])", "def test_get_future_events(self):\n events = list(get_future_events())\n self.assertFalse(self.event_show1 in events)\n self.assertTrue(self.event_show2 in events)", "def test_url_missing_streams(self):\n with self.assertRaises(NotImplementedError):\n EventStreams()", "def streamTest():\n timer = StoreTimer(store, duration=2.0)\n bottle.response.set_header('Content-Type', 'text/event-stream') #text\n bottle.response.set_header('Cache-Control', 'no-cache')\n # Set client-side auto-reconnect timeout, ms.\n yield 'retry: 1000\\n\\n'\n i = 0\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: START\\n\\n'\n n = 1\n while not timer.expired:\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: {0}\\n\\n'.format(n)\n n += 1\n yield \"data: END\\n\\n\"", "async def test_event(bus: lightbus.BusNode, dummy_api, stream_use):\n bus.bus_client.transport_registry.get_event_transport('default').stream_use = stream_use\n manually_set_plugins({})\n received_kwargs = []\n received_api_name = None\n received_event_name = None\n\n async def listener(api_name, event_name, **kwargs):\n nonlocal received_kwargs, received_api_name, received_event_name\n received_kwargs.append(kwargs)\n received_api_name = api_name\n received_event_name = event_name\n\n await bus.my.dummy.my_event.listen_async(listener)\n await asyncio.sleep(0.01)\n await bus.my.dummy.my_event.fire_async(field='Hello! 😎')\n await asyncio.sleep(0.01)\n\n # await asyncio.gather(co_fire_event(), co_listen_for_events())\n assert received_kwargs == [{'field': 'Hello! 😎'}]\n assert received_api_name == 'my.dummy'\n assert received_event_name == 'my_event'", "def test_register_stream(self):\n pass", "async def test_multiple_event_transports(loop, server, redis_server_b):\n registry.add(ApiA())\n registry.add(ApiB())\n\n manually_set_plugins(plugins={})\n\n redis_server_a = server\n\n port_a = redis_server_a.tcp_address.port\n port_b = redis_server_b.tcp_address.port\n\n logging.warning(f'Server A port: {port_a}')\n logging.warning(f'Server B port: {port_b}')\n\n config = Config.load_dict({\n 'bus': {\n 'schema': {\n 'transport': {'redis': {'url': f'redis://localhost:{port_a}'}},\n }\n },\n 'apis': {\n 'default': {\n 'event_transport': {'redis': {'url': f'redis://localhost:{port_a}'}},\n },\n 'api_b': {\n 'event_transport': {'redis': {'url': f'redis://localhost:{port_b}'}},\n },\n }\n })\n\n bus = BusNode(name='', parent=None, bus_client=lightbus.BusClient(config=config, loop=loop))\n await asyncio.sleep(0.1)\n\n await bus.api_a.event_a.fire_async()\n await bus.api_b.event_b.fire_async()\n\n connection_manager_a = bus.bus_client.transport_registry.get_event_transport('api_a').connection_manager\n connection_manager_b = bus.bus_client.transport_registry.get_event_transport('api_b').connection_manager\n\n with await connection_manager_a() as redis:\n assert await redis.xrange('api_a.event_a:stream')\n assert await redis.xrange('api_b.event_b:stream') == []\n\n with await connection_manager_b() as redis:\n assert await redis.xrange('api_a.event_a:stream') == []\n assert await redis.xrange('api_b.event_b:stream')", "def test_subscription(event_manager: EventManager) -> None:\n subscriber_any = Mock()\n subscriber_vmd4 = Mock()\n subscriber_vmd4_c1p1 = Mock()\n subscriber_vmd4_c1p2 = Mock()\n\n unsub_any = event_manager.subscribe(subscriber_any)\n assert len(event_manager) == 1\n\n unsub_vmd4 = event_manager.subscribe(\n subscriber_vmd4,\n topic_filter=EventTopic.MOTION_DETECTION_4,\n )\n assert len(event_manager) == 2\n\n unsub_vmd4_c1p1 = event_manager.subscribe(\n subscriber_vmd4_c1p1,\n id_filter=\"Camera1Profile1\",\n topic_filter=EventTopic.MOTION_DETECTION_4,\n operation_filter=EventOperation.INITIALIZED,\n )\n assert len(event_manager) == 3\n\n unsub_vmd4_c1p2 = event_manager.subscribe(\n subscriber_vmd4_c1p2,\n id_filter=(\"Camera1Profile2\",),\n topic_filter=(EventTopic.MOTION_DETECTION_4,),\n operation_filter=(EventOperation.INITIALIZED, EventOperation.CHANGED),\n )\n assert len(event_manager) == 4\n\n # Validate subscription matching\n event_manager.handler(PIR_INIT)\n assert subscriber_any.call_count == 1\n assert subscriber_vmd4.call_count == 0\n assert subscriber_vmd4_c1p1.call_count == 0\n assert subscriber_vmd4_c1p2.call_count == 0\n\n event_manager.handler(VMD4_C1P1_INIT)\n assert subscriber_any.call_count == 2\n assert subscriber_vmd4.call_count == 1\n assert subscriber_vmd4_c1p1.call_count == 1\n assert subscriber_vmd4_c1p2.call_count == 0\n\n event_manager.handler(VMD4_C1P2_INIT)\n assert subscriber_any.call_count == 3\n assert subscriber_vmd4.call_count == 2\n assert subscriber_vmd4_c1p1.call_count == 1\n assert subscriber_vmd4_c1p2.call_count == 1\n\n event_manager.handler(VMD4_C1P1_CHANGE)\n assert subscriber_any.call_count == 4\n assert subscriber_vmd4.call_count == 3\n assert subscriber_vmd4_c1p1.call_count == 1\n assert subscriber_vmd4_c1p2.call_count == 1\n\n event_manager.handler(VMD4_C1P2_CHANGE)\n assert subscriber_any.call_count == 5\n assert subscriber_vmd4.call_count == 4\n assert subscriber_vmd4_c1p1.call_count == 1\n assert subscriber_vmd4_c1p2.call_count == 2\n\n # validate unsubscribing\n unsub_any()\n assert len(event_manager) == 3\n unsub_vmd4()\n assert len(event_manager) == 2\n unsub_vmd4_c1p1()\n assert len(event_manager) == 1\n unsub_vmd4_c1p2()\n assert len(event_manager) == 0\n\n # Validate no exception when unsubscribe with no subscription exist\n unsub_any()\n\n # Validate no exception when unsubscribe with no object ID exist\n event_manager._subscribers.pop(\"Camera1Profile1\")\n unsub_vmd4_c1p1()", "def allNext(cls, streams=None):\r\n if streams == None:\r\n streams = EventStream.AllStreams\r\n\r\n selectlist = [x.filehandle for x in streams]\r\n ready = select.select(selectlist, [ ], [ ], 0)[0]\r\n if not ready: return\r\n while ready:\r\n for fd in ready:\r\n try:\r\n s = os.read(fd, Format.EventSize)\r\n except Exception as e:\r\n failed = getattr(cls, 'failed', None)\r\n if not failed:\r\n failed = set()\r\n setattr(cls, 'failed', failed)\r\n if fd not in failed:\r\n LOGGER.error(\"Couldn't read fd %d %s\", fd, e)\r\n failed.add(fd)\r\n continue\r\n if s:\r\n for x in streams:\r\n if x.filehandle == fd:\r\n stream = x\r\n break\r\n event = EventStruct.EventStruct(stream)\r\n event.decode(s)\r\n yield event\r\n ready = select.select(selectlist, [ ], [ ], 0)[0]", "def test_stream_loop(self):\n chans, gains, scans, rate = (10,10,10,10), (1,2,4,5), 1024, 500\n v = [v[0] for v in self.l.stream_sync(\n channels=chans, gains=gains,\n num_scans=scans, rate=rate)]\n for vi in v:\n for r in vi:\n self.assertTrue(abs(r-2.5) < .1,\n \"%s should be cal, 2.5v\" % vi[0])", "def test_query_events(self):\n query_list = {\n 'q': 'test',\n 'type': 'show'\n }\n results = query_events(query_list)\n events = list(results['events'])\n showcase = list(results['showcase_events'])\n self.assertTrue(self.event_show1 in events)\n self.assertTrue(self.event_show2 in showcase)\n self.assertFalse(self.event_film in events)", "def test_data_source_soaps_change_stream_get(self):\n pass" ]
[ "0.709718", "0.7045089", "0.68629783", "0.6817295", "0.67916495", "0.65646744", "0.6538408", "0.6466485", "0.64646655", "0.64532644", "0.64400244", "0.6437063", "0.642483", "0.64189976", "0.6392507", "0.6370093", "0.63654864", "0.6338461", "0.62697303", "0.62565124", "0.621282", "0.6176605", "0.61409044", "0.6140487", "0.61143446", "0.6113677", "0.60111284", "0.60032725", "0.5991273", "0.5990363" ]
0.812604
0
Test EventStreams with url from site with missing streams.
def test_url_missing_streams(self): with self.assertRaises(NotImplementedError): EventStreams()
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_url_with_streams(self):\n streams = 'recentchange'\n e = EventStreams(streams=streams)\n self.assertEqual(\n e._url, 'https://stream.wikimedia.org/v2/stream/' + streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertEqual(e._streams, streams)", "def test_multiple_streams(self):\n streams = ('page-create', 'page-move', 'page-delete')\n e = EventStreams(streams=streams)\n combined_streams = ','.join(streams)\n self.assertEqual(\n e._url,\n 'https://stream.wikimedia.org/v2/stream/' + combined_streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertEqual(e._streams, combined_streams)", "def test_url_from_site(self, key):\n site = self.get_site(key)\n streams = 'recentchange'\n e = EventStreams(site=site, streams=streams)\n self.assertEqual(\n e._url, 'https://stream.wikimedia.org/v2/stream/' + streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertEqual(e._streams, streams)\n site_repr = f'site={repr(site)}, ' if site != Site() else ''\n self.assertEqual(repr(e),\n \"EventStreams({}streams='{}')\"\n .format(site_repr, streams))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_url_parameter(self, key):\n e = EventStreams(url=self.sites[key]['hostname'])\n self.assertEqual(e._url, self.sites[key]['hostname'])\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertIsNone(e._total)\n self.assertIsNone(e._streams)\n self.assertEqual(repr(e),\n \"EventStreams(url='{}')\"\n .format(self.sites[key]['hostname']))", "def test_404_url():\n def not_found(request):\n request.send_error(404)\n\n with test_server(handler=not_found, methods=(\"post\", \"get\"),\n port=\"random\") as server:\n stream = TweetStream(\"foo\", \"bar\", url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = FollowStream(\"foo\", \"bar\", [1, 2, 3], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)\n\n stream = TrackStream(\"foo\", \"bar\", [\"opera\"], url=server.baseurl)\n assert_raises(ConnectionError, stream.next)", "def test_finds_live_stream(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(hour=(now.hour - 1)),\n 'ends_on': now.replace(hour=(now.hour + 1)),\n 'title': 'Live Stream',\n 'added_on': now\n },\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n\n self.assertEqual(response.status_code, 200)\n self.assertTrue(response.context['live_stream'])\n self.assertEqual(response.context['live_stream'].title, 'Live Stream')", "def test_bad_host():\n stream = TweetStream(\"foo\", \"bar\", url=\"http://bad.egewdvsdswefdsf.com/\")\n assert_raises(ConnectionError, stream.next)\n\n stream = FollowStream(\"foo\", \"bar\", [1, 2, 3], url=\"http://zegwefdsf.com/\")\n assert_raises(ConnectionError, stream.next)\n\n stream = TrackStream(\"foo\", \"bar\", [\"foo\"], url=\"http://aswefdsews.com/\")\n assert_raises(ConnectionError, stream.next)", "def test_stream_publish(self):\n pass", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_no_events(self):\n result = self.client.get(BASE_URL, **headers)\n expected_result = {\n 'count': 0,\n 'next': None,\n 'previous': None,\n 'results': [],\n }\n self.assertDictEqual(result.data, expected_result)", "def test_no_streams(self):\n username = 'darth-vader'\n create_profile(username)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n\n self.assertEqual(response.status_code, 200)\n self.assertQuerysetEqual(response.context['future_streams'], [])\n self.assertQuerysetEqual(response.context['previous_streams'], [])\n self.assertTemplateUsed(response, 'user.html')", "def test_never_subscribed_streams(self) -> None:\n realm = get_realm(\"zulip\")\n users_to_subscribe = [\n self.example_user(\"othello\").id,\n self.example_user(\"cordelia\").id,\n ]\n\n public_streams = [\n \"test_stream_public_1\",\n \"test_stream_public_2\",\n \"test_stream_public_3\",\n \"test_stream_public_4\",\n \"test_stream_public_5\",\n ]\n\n private_streams = [\n \"test_stream_invite_only_1\",\n \"test_stream_invite_only_2\",\n ]\n\n web_public_streams = [\n \"test_stream_web_public_1\",\n \"test_stream_web_public_2\",\n ]\n\n def create_public_streams() -> None:\n for stream_name in public_streams:\n self.make_stream(stream_name, realm=realm)\n\n self.common_subscribe_to_streams(\n self.user_profile,\n public_streams,\n dict(principals=orjson.dumps(users_to_subscribe).decode()),\n )\n\n create_public_streams()\n\n def create_web_public_streams() -> None:\n for stream_name in web_public_streams:\n self.make_stream(stream_name, realm=realm, is_web_public=True)\n\n ret = self.common_subscribe_to_streams(\n self.user_profile,\n web_public_streams,\n dict(principals=orjson.dumps(users_to_subscribe).decode()),\n )\n self.assert_json_success(ret)\n\n create_web_public_streams()\n\n def create_private_streams() -> None:\n self.common_subscribe_to_streams(\n self.user_profile,\n private_streams,\n dict(principals=orjson.dumps(users_to_subscribe).decode()),\n invite_only=True,\n )\n\n create_private_streams()\n\n def get_never_subscribed() -> List[NeverSubscribedStreamDict]:\n with self.assert_database_query_count(4):\n sub_data = gather_subscriptions_helper(self.user_profile)\n self.verify_sub_fields(sub_data)\n never_subscribed = sub_data.never_subscribed\n\n # Ignore old streams.\n never_subscribed = [dct for dct in never_subscribed if dct[\"name\"].startswith(\"test_\")]\n return never_subscribed\n\n never_subscribed = get_never_subscribed()\n\n # Invite only stream should not be there in never_subscribed streams\n self.assert_length(never_subscribed, len(public_streams) + len(web_public_streams))\n for stream_dict in never_subscribed:\n name = stream_dict[\"name\"]\n self.assertFalse(\"invite_only\" in name)\n self.assert_length(stream_dict[\"subscribers\"], len(users_to_subscribe))\n\n # Send private stream subscribers to all realm admins.\n def test_admin_case() -> None:\n self.user_profile.role = UserProfile.ROLE_REALM_ADMINISTRATOR\n # Test realm admins can get never subscribed private stream's subscribers.\n never_subscribed = get_never_subscribed()\n\n self.assertEqual(\n len(never_subscribed),\n len(public_streams) + len(private_streams) + len(web_public_streams),\n )\n for stream_dict in never_subscribed:\n self.assert_length(stream_dict[\"subscribers\"], len(users_to_subscribe))\n\n test_admin_case()\n\n def test_guest_user_case() -> None:\n self.user_profile.role = UserProfile.ROLE_GUEST\n helper_result = gather_subscriptions_helper(self.user_profile)\n self.verify_sub_fields(helper_result)\n sub = helper_result.subscriptions\n unsub = helper_result.unsubscribed\n never_sub = helper_result.never_subscribed\n\n # It's +1 because of the stream Rome.\n self.assert_length(never_sub, len(web_public_streams) + 1)\n sub_ids = [stream[\"stream_id\"] for stream in sub]\n unsub_ids = [stream[\"stream_id\"] for stream in unsub]\n\n for stream_dict in never_sub:\n self.assertTrue(stream_dict[\"is_web_public\"])\n self.assertTrue(stream_dict[\"stream_id\"] not in sub_ids)\n self.assertTrue(stream_dict[\"stream_id\"] not in unsub_ids)\n\n # The Rome stream has is_web_public=True, with default\n # subscribers not set up by this test, so we do the\n # following check only for the streams we created.\n if stream_dict[\"name\"] in web_public_streams:\n self.assert_length(stream_dict[\"subscribers\"], len(users_to_subscribe))\n\n test_guest_user_case()", "def test_get_stream(self):\n pass", "def test_public_streams_api(self) -> None:\n user = self.example_user(\"hamlet\")\n realm = get_realm(\"zulip\")\n self.login_user(user)\n\n # Check it correctly lists the user's subs with include_public=false\n result = self.api_get(user, \"/api/v1/streams\", {\"include_public\": \"false\"})\n result2 = self.api_get(user, \"/api/v1/users/me/subscriptions\")\n\n json = self.assert_json_success(result)\n\n self.assertIn(\"streams\", json)\n\n self.assertIsInstance(json[\"streams\"], list)\n\n self.assert_json_success(result2)\n json2 = orjson.loads(result2.content)\n\n self.assertEqual(\n sorted(s[\"name\"] for s in json[\"streams\"]),\n sorted(s[\"name\"] for s in json2[\"subscriptions\"]),\n )\n\n # Check it correctly lists all public streams with include_subscribed=false\n filters = dict(include_public=\"true\", include_subscribed=\"false\")\n result = self.api_get(user, \"/api/v1/streams\", filters)\n json = self.assert_json_success(result)\n all_streams = [\n stream.name for stream in Stream.objects.filter(realm=realm, invite_only=False)\n ]\n self.assertEqual(sorted(s[\"name\"] for s in json[\"streams\"]), sorted(all_streams))", "def test_get_audio_stream_does_not_raise(self):\n youtube_url = \"https://www.youtube.com/watch?v=jIxas0a-KgM\"\n _ = utils.get_audio_stream(youtube_url)\n assert True # No error", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_splits_streams(self):\n username = 'darth-vader'\n user = create_profile(username)\n\n now = timezone.now()\n streams = [\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year + 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Future Stream',\n 'added_on': now\n },\n {\n 'author': user,\n 'airs_on': now.replace(year=(now.year - 1)),\n 'ends_on': now.replace(hour=(now.hour - 1)),\n 'title': 'Previous Stream',\n 'added_on': now\n }\n\n ]\n create_streams(streams)\n\n url = reverse('main_app:user', args=(username,))\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n\n future_streams = response.context['future_streams']\n previous_streams = response.context['previous_streams']\n\n self.assertTrue(len(future_streams))\n self.assertTrue(len(previous_streams))\n self.assertEqual(future_streams[0].title, 'Future Stream')\n self.assertEqual(previous_streams[0].title, 'Previous Stream')", "def test_json_get_subscribers_stream_not_exist(self) -> None:\n stream_id = 99999999\n result = self.client_get(f\"/json/streams/{stream_id}/members\")\n self.assert_json_error(result, \"Invalid stream ID\")", "def test_subscriptions_does_not_exist(self) -> None:\n random_streams = self.make_random_stream_names(self.streams)\n self.assertNotEqual(len(random_streams), 0) # necessary for full test coverage\n self.helper_subscriptions_exists(random_streams[0], False, False)", "def streamTest():\n timer = StoreTimer(store, duration=2.0)\n bottle.response.set_header('Content-Type', 'text/event-stream') #text\n bottle.response.set_header('Cache-Control', 'no-cache')\n # Set client-side auto-reconnect timeout, ms.\n yield 'retry: 1000\\n\\n'\n i = 0\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: START\\n\\n'\n n = 1\n while not timer.expired:\n yield 'id: {0}\\n'.format(i)\n i += 1\n yield 'data: {0}\\n\\n'.format(n)\n n += 1\n yield \"data: END\\n\\n\"", "def test_stream(self):\n with skipping(NotImplementedError):\n self.es = EventStreamsTestClass(streams='recentchange')\n limit = 50\n self.es.set_maximum_items(limit)\n self.assertLength(list(self.es), limit)", "def test_events(self):\n\n response = self.client.get(reverse('events'))\n\n assert response.status_code == 200", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def setUp(self):\n super().setUp()\n with mock.patch('pywikibot.comms.eventstreams.EventSource'):\n self.es = EventStreams(url='dummy url')", "def test_meeting_live_stream_update(self):\n pass", "def test_data_source_soaps_change_stream_get(self):\n pass", "def test_nonexisting_event(self):\n response = self.client.get(\"/events/1\")\n self.assertEqual(response.status_code, 404)", "def scrape_events(path, urls):\n seen_ids = set()\n result = []\n for url in urls:\n # Get all of the Network requests being sent out\n print(f'Processing {url}')\n driver.get(url)\n browser_log = driver.get_log('performance') \n events = [process_browser_log_entry(entry) for entry in browser_log]\n results = []\n # Find the Network request that sends a GET request to EventBrite API\n for event in events:\n if event['method'] == 'Network.responseReceived':\n # print(event)\n if 'event_ids' in event['params']['response']['url']:\n results.append(event)\n # Get the GET request URL\n get_url = \"\"\n # TODO: Sometimes returning 0 or more than 1... I'm not sure why :(\n if len(results) >= 1:\n get_url = results[0]['params']['response']['url']\n # Get the GET request response JSON\n json_response = get_request(get_url)\n event_list = json_response['events']\n # Find unique events in the response JSON \n unique_event_list = []\n for event in event_list:\n if event['id'] not in seen_ids:\n seen_ids.add(event['id'])\n unique_event_list.append(event)\n parsed_events = parse_event_page(unique_event_list)\n result.extend(parsed_events)\n else:\n print(results)\n print('yikes something went wrong')\n\n driver.close()\n return result\n # save_events(path, result)", "def test_register_stream(self):\n pass" ]
[ "0.7969818", "0.73117185", "0.7227417", "0.664857", "0.6638547", "0.64848655", "0.6393822", "0.6392778", "0.63857347", "0.6359634", "0.6291006", "0.62475723", "0.62436795", "0.6234471", "0.6136316", "0.6136007", "0.6119837", "0.61162955", "0.6111094", "0.608338", "0.6051383", "0.60392183", "0.6030362", "0.600729", "0.600729", "0.5990913", "0.5942799", "0.5936991", "0.58923876", "0.5887526" ]
0.81596744
0
Test EventStreams timeout value.
def test_timeout_setting(self): self.assertEqual(self.es.sse_kwargs.get('timeout'), config.socket_timeout)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_timeout(self) -> Optional[pulumi.Input['DurationArgs']]:\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> Optional[pulumi.Input['DurationArgs']]:\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self) -> 'outputs.DurationResponse':\n return pulumi.get(self, \"test_timeout\")", "def test_timeout(self):\n start = time.time()\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.03)\n # be a little lenient for slow computers:\n self.assertTrue(abs(time.time() - start) < 0.05)", "def assert_timeout(self) -> None:", "def get_test_timeout(self):\n return None", "def test_timeoutRaises(self):\n\n @self.eventloop.wait_for(timeout=0.5)\n def times_out():\n return Deferred().addErrback(lambda f: f.trap(CancelledError))\n\n start = time.time()\n self.assertRaises(TimeoutError, times_out)\n self.assertTrue(abs(time.time() - start - 0.5) < 0.1)", "def test_get_timeout():\n (read, connect) = AWSClient.get_timeout(None)\n assert read == 60 and connect == 10\n (read, connect) = AWSClient.get_timeout(\"100\")\n assert read == 100 and connect == 10\n (read, connect) = AWSClient.get_timeout(\"200,2\")\n assert read == 200 and connect == 2", "def test_socket_timeout():\n schema = vol.Schema(cv.socket_timeout)\n\n with pytest.raises(vol.Invalid):\n schema(0.0)\n\n with pytest.raises(vol.Invalid):\n schema(-1)\n\n assert schema(None) == _GLOBAL_DEFAULT_TIMEOUT\n\n assert schema(1) == 1.0", "def pytest_timeout_set_timer(item, settings):", "def test_timeout_not_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time()\n assert not connection._timeout_exceeded(start)", "def test_polling_plugin_timeout(self):\n pass", "def get_timeout(self) -> int:", "def TODO_testTimeout(self):\n return \"\"\"TODO: Highly dependent on hardcoded downstream timeout val\"\"\"\n\n # Assuming proxy's downstream_max is 1,\n # and number of threads is 1.\n\n self.client_connect(0)\n\n self.client_send('get time0\\r\\n', 0)\n self.mock_recv('get time0\\r\\n', 0)\n\n # Mock server is 'busy' at this point, so\n # downstream timeout logic should kick in,\n # without our mock server having to send anything.\n\n self.wait(210)\n\n self.client_recv('END\\r\\n', 0)\n\n # TODO: The number of server sessions should be 0,\n # except the close might not have propagated.", "def test_timeout_twice(self):\n dr = EventualResult(Deferred(), None)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)\n self.assertRaises(TimeoutError, dr.wait, timeout=0.01)", "def test_timeout(self):\n context = Context(SSLv23_METHOD)\n context.set_timeout(1234)\n assert context.get_timeout() == 1234", "def test_kafka_group_io_dataset_invalid_stream_timeout():\n\n STREAM_TIMEOUT = -20\n try:\n tfio.experimental.streaming.KafkaGroupIODataset(\n topics=[\"key-partition-test\", \"key-test\"],\n group_id=\"cgteststreaminvalid\",\n servers=\"localhost:9092\",\n stream_timeout=STREAM_TIMEOUT,\n configuration=[\"session.timeout.ms=7000\", \"max.poll.interval.ms=8000\"],\n )\n except ValueError as e:\n assert str(\n e\n ) == \"Invalid stream_timeout value: {} ,set it to -1 to block indefinitely.\".format(\n STREAM_TIMEOUT\n )", "def pytest_timeout_set_timer(item, settings):\n tle.lib.set(int(settings.timeout), str(item).encode(\"utf-8\"))\n return True", "def test_set_timeout_value_error(self, timeout):\n self.assertRaises(ValueError, self.root.set_timeout, timeout)", "def pytest_timeout_cancel_timer(item):", "def test_timeout(self):\n s1, s2 = self.create_bound_pair(zmqpy.PAIR, zmqpy.PAIR)\n poller = self.Poller()\n poller.register(s1, zmqpy.POLLIN)\n tic = time.time()\n evt = poller.poll(timeout=.005)\n toc = time.time()\n self.assertTrue(toc-tic < 0.1)\n tic = time.time()\n evt = poller.poll(timeout=5)\n toc = time.time()\n self.assertTrue(toc-tic < 0.1)\n self.assertTrue(toc-tic > .001)\n tic = time.time()\n evt = poller.poll(timeout=500)\n toc = time.time()\n self.assertTrue(toc-tic < 1)\n self.assertTrue(toc-tic > 0.1)", "def ReceiveTimeout(self) -> int:", "def ReceiveTimeout(self) -> int:", "def test_timeout_exceeded():\n connection = FakeBaseConnection(session_timeout=10)\n start = time.time() - 11\n try:\n connection._timeout_exceeded(start)\n except NetmikoTimeoutException as exc:\n assert isinstance(exc, NetmikoTimeoutException)\n return\n\n assert False", "def get_timeout(self):\n return self.timeout", "def wait_for_event_timeout(event):\n received = event.wait(2)\n name = threading.current_thread().getName()\n print \"Waited with timeout, got {}, name {}\".format(received, name)", "def test__put_afk_timeout_into():\n for input_value, defaults, expected_output in (\n (AFK_TIMEOUT_DEFAULT, False, {'afk_timeout': AFK_TIMEOUT_DEFAULT}),\n (60, False, {'afk_timeout': 60}),\n ):\n data = put_afk_timeout_into(input_value, {}, defaults)\n vampytest.assert_eq(data, expected_output)", "def test_timeout_invalid_start():\n connection = FakeBaseConnection(session_timeout=10)\n assert not connection._timeout_exceeded(start=0)", "def timeout(self):\n pf.debug(\"TIMEOUT\")\n self.acceptData(TIMEOUT)" ]
[ "0.7090183", "0.7090183", "0.68827915", "0.68827915", "0.68628806", "0.68519783", "0.66885316", "0.6631813", "0.6575541", "0.6501257", "0.64762914", "0.6457156", "0.6440145", "0.6437468", "0.64241856", "0.64007854", "0.6395541", "0.63854843", "0.6382834", "0.63294137", "0.6316556", "0.62694997", "0.6255841", "0.6255841", "0.6240662", "0.62376696", "0.623689", "0.622399", "0.6194096", "0.61923134" ]
0.7940405
0
Test EventStreams filter settings.
def test_filter_settings(self): self.es.register_filter(foo='bar') self.assertTrue(callable(self.es.filter['all'][0])) self.es.register_filter(bar='baz') self.assertLength(self.es.filter['all'], 2)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def set_default_filters(fprime_test_api):\n set_event_filter(fprime_test_api, \"COMMAND\", True)\n set_event_filter(fprime_test_api, \"ACTIVITY_LO\", True)\n set_event_filter(fprime_test_api, \"ACTIVITY_HI\", True)\n set_event_filter(fprime_test_api, \"WARNING_LO\", True)\n set_event_filter(fprime_test_api, \"WARNING_HI\", True)\n set_event_filter(fprime_test_api, \"DIAGNOSTIC\", False)", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def testUsingFilterTool(self):\n pass", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_otoroshi_controllers_adminapi_analytics_controller_filterable_events(self):\n pass", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_settings(self):\n def foo():\n \"\"\"Dummy function.\"\"\"\n return True\n\n self.es.register_filter(foo)\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'], [])\n\n self.es.register_filter(foo, ftype='none')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'][0], foo)\n\n self.es.register_filter(foo, ftype='any')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'][0], foo)\n self.assertEqual(self.es.filter['none'][0], foo)", "def test_filter_device(self):\n pass", "def test_filter_function_settings_fail(self):\n with self.assertRaises(TypeError):\n self.es.register_filter('test')", "def test_filter_device1(self):\n pass", "def set_event_filter(fprime_test_api, severity, enabled):\n enabled = \"ENABLED\" if enabled else \"DISABLED\"\n if isinstance(severity, FilterSeverity):\n severity = severity.name\n else:\n severity = FilterSeverity[severity].name\n try:\n fprime_test_api.send_command(\n \"eventLogger.SET_EVENT_FILTER\",\n [severity, enabled],\n )\n return True\n except AssertionError:\n return False", "def setup_mqtt_filters():\n mqtt.callback_filters(\n filter_fan=cbMqtt_dev_fan,\n )\n try:\n mqtt.subscribe_filters()\n except Exception as errcode:\n logger.error(\n 'MQTT subscribtion to topic filters failed with error code %s',\n errcode)", "def test_stream(self):\n with skipping(NotImplementedError):\n self.es = EventStreamsTestClass(streams='recentchange')\n limit = 50\n self.es.set_maximum_items(limit)\n self.assertLength(list(self.es), limit)", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_filters_anonymous_with_empty_events():\n event = {}\n with pytest.raises(EventKeyError):\n filters.anonymous(event)", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_brainvision_data_filters():\n with warnings.catch_warnings(record=True) as w: # event parsing\n raw = _test_raw_reader(\n read_raw_brainvision, vhdr_fname=vhdr_highpass_path,\n montage=montage, eog=eog)\n assert_true(all('parse triggers that' in str(ww.message) for ww in w))\n\n assert_equal(raw.info['highpass'], 0.1)\n assert_equal(raw.info['lowpass'], 250.)", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter(self):\n\n # Set a global filter for all items\n self.site.filter(r\"(.*)\", lambda item: item)\n # Set another filter on the index item\n self.site.filter(r\"index.html\", lambda item: item)\n\n self.assertEqual(2, len(self.site.items[\"index.html\"].filters))\n self.assertEqual(1, len(self.site.items[\"test/test.html\"].filters))", "def test_filter(self):\n self.client.ensure_path(\"/services/db/1.1.1.1\")\n self.client.ensure_path(\"/services/db/2.2.2.2\")\n self.client.ensure_path(\"/services/db/3.3.3.3\")\n self.client.ensure_path(\"/services/db/4.4.4.4\")\n self.client.set(\"/services/db/1.1.1.1\",\n json.dumps({\"enabled\": \"0\", \"weight\": \"20\"}))\n self.client.set(\"/services/db/2.2.2.2\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"20\"}))\n self.client.set(\"/services/db/3.3.3.3\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"10\"}))\n self.client.set(\"/services/db/4.4.4.4\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"30\"}))\n z = ZkFarmExporter(self.client, \"/services/db\", self.conf,\n filter_handler=create_filter(\"enabled=1,weight>15\"))\n z.loop(2, timeout=self.TIMEOUT)\n self.conf.write.assert_called_with({\"2.2.2.2\": {\"enabled\": \"1\", \"weight\": \"20\"},\n \"4.4.4.4\": {\"enabled\": \"1\", \"weight\": \"30\"}})", "def test_filterestimator():\n raw = io.read_raw_fif(raw_fname)\n events = read_events(event_name)\n picks = pick_types(raw.info, meg=True, stim=False, ecg=False,\n eog=False, exclude='bads')\n picks = picks[1:13:3]\n epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,\n baseline=(None, 0), preload=True)\n epochs_data = epochs.get_data()\n\n # Add tests for different combinations of l_freq and h_freq\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=80,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n assert_true(X.shape == epochs_data.shape)\n assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)\n\n filt = FilterEstimator(epochs.info, l_freq=None, h_freq=40,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n assert_raises(ValueError, filt.fit_transform, epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=None,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n # Test init exception\n assert_raises(ValueError, filt.fit, epochs, y)\n assert_raises(ValueError, filt.transform, epochs, y)", "def event_filters(self) -> pulumi.Input[Sequence[pulumi.Input['EventFilterArgs']]]:\n return pulumi.get(self, \"event_filters\")", "def test_filter_messages(self):\n pass", "def test_multiple_streams(self):\n streams = ('page-create', 'page-move', 'page-delete')\n e = EventStreams(streams=streams)\n combined_streams = ','.join(streams)\n self.assertEqual(\n e._url,\n 'https://stream.wikimedia.org/v2/stream/' + combined_streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertEqual(e._streams, combined_streams)" ]
[ "0.6936592", "0.69136494", "0.68039805", "0.6733757", "0.6596865", "0.6570705", "0.6478815", "0.6402157", "0.6372731", "0.63112175", "0.62709296", "0.61745006", "0.6166551", "0.6156711", "0.6093567", "0.60266864", "0.5937779", "0.5915515", "0.5845479", "0.5836323", "0.58278483", "0.5819741", "0.5784857", "0.577438", "0.57473683", "0.5717327", "0.5710453", "0.5695648", "0.5689199", "0.565017" ]
0.7220615
0
Test EventStreams filter all function.
def test_filter_function_all(self): self.es.register_filter(lambda x: True) self.assertTrue(self.es.streamfilter(self.data)) self.es.register_filter(lambda x: False) self.assertFalse(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filters_anonymous_with_empty_events():\n event = {}\n with pytest.raises(EventKeyError):\n filters.anonymous(event)", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_otoroshi_controllers_adminapi_analytics_controller_filterable_events(self):\n pass", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_filter_function_settings(self):\n def foo():\n \"\"\"Dummy function.\"\"\"\n return True\n\n self.es.register_filter(foo)\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'], [])\n\n self.es.register_filter(foo, ftype='none')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'][0], foo)\n\n self.es.register_filter(foo, ftype='any')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'][0], foo)\n self.assertEqual(self.es.filter['none'][0], foo)", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_valid_function_emitted_events(self):\n\t\tsource = \"\"\"\n\t\t\tpragma solidity ^0.4.22;\n\t\t\tcontract testContract {\n\t\t\t\tevent TestEvent(uint t);\n\t\t\t\tfunction testFunction () public returns (string) {\n\t\t\t\t\tuint foo = 5;\n\t\t\t\t\temit TestEvent(foo);\n\t\t\t\t\tuint bar = uint8(foo);\n\t\t\t\t\trequire(foo > 5);\n\t\t\t\t\treturn 'helloWorld';\n\t\t\t\t}\n\t\t\t}\n\t\t\"\"\"\n\t\tevents, statements = self.before_test(source)\n\t\tself.assertTrue(len(statements) > 0)\n\t\tself.assertTrue(len(events) == 1)\n\t\tstatement_under_test = filter_statements(events, statements)\n\t\tself.assertTrue(len(statement_under_test) == 3)", "def test_filter_device1(self):\n pass", "def test_filter_messages(self):\n pass", "def test_filter_device(self):\n pass", "def test_attrib_loose_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~=u?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_query_events(self):\n query_list = {\n 'q': 'test',\n 'type': 'show'\n }\n results = query_events(query_list)\n events = list(results['events'])\n showcase = list(results['showcase_events'])\n self.assertTrue(self.event_show1 in events)\n self.assertTrue(self.event_show2 in showcase)\n self.assertFalse(self.event_film in events)", "def test_filter_settings(self):\n self.es.register_filter(foo='bar')\n self.assertTrue(callable(self.es.filter['all'][0]))\n self.es.register_filter(bar='baz')\n self.assertLength(self.es.filter['all'], 2)", "def test_filter_mixed_function(self):\n for none_type in (False, True):\n for all_type in (False, True):\n for any_type in (False, True, None):\n result = none_type is False and all_type is True \\\n and (any_type is None or any_type is True)\n self._test_filter(none_type, all_type, any_type, result)" ]
[ "0.77774274", "0.76616883", "0.7252225", "0.71493006", "0.69759494", "0.69710374", "0.6735299", "0.6721623", "0.671708", "0.6675768", "0.6571128", "0.655881", "0.651373", "0.65121555", "0.6465832", "0.6444786", "0.643058", "0.6394015", "0.6354946", "0.6305483", "0.63004845", "0.62843394", "0.62803066", "0.6266241", "0.61969924", "0.61947614", "0.6157133", "0.6146304", "0.61322904", "0.61227435" ]
0.83218205
0
Test EventStreams filter any function.
def test_filter_function_any(self): self.es.register_filter(lambda x: True, ftype='any') self.assertTrue(self.es.streamfilter(self.data)) self.es.register_filter(lambda x: False, ftype='any') self.assertTrue(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filters_anonymous_with_empty_events():\n event = {}\n with pytest.raises(EventKeyError):\n filters.anonymous(event)", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_filter_function_settings(self):\n def foo():\n \"\"\"Dummy function.\"\"\"\n return True\n\n self.es.register_filter(foo)\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'], [])\n\n self.es.register_filter(foo, ftype='none')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'][0], foo)\n\n self.es.register_filter(foo, ftype='any')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'][0], foo)\n self.assertEqual(self.es.filter['none'][0], foo)", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_valid_function_emitted_events(self):\n\t\tsource = \"\"\"\n\t\t\tpragma solidity ^0.4.22;\n\t\t\tcontract testContract {\n\t\t\t\tevent TestEvent(uint t);\n\t\t\t\tfunction testFunction () public returns (string) {\n\t\t\t\t\tuint foo = 5;\n\t\t\t\t\temit TestEvent(foo);\n\t\t\t\t\tuint bar = uint8(foo);\n\t\t\t\t\trequire(foo > 5);\n\t\t\t\t\treturn 'helloWorld';\n\t\t\t\t}\n\t\t\t}\n\t\t\"\"\"\n\t\tevents, statements = self.before_test(source)\n\t\tself.assertTrue(len(statements) > 0)\n\t\tself.assertTrue(len(events) == 1)\n\t\tstatement_under_test = filter_statements(events, statements)\n\t\tself.assertTrue(len(statement_under_test) == 3)", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_filter_mixed_function(self):\n for none_type in (False, True):\n for all_type in (False, True):\n for any_type in (False, True, None):\n result = none_type is False and all_type is True \\\n and (any_type is None or any_type is True)\n self._test_filter(none_type, all_type, any_type, result)", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_otoroshi_controllers_adminapi_analytics_controller_filterable_events(self):\n pass", "def test_attrib_loose_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~=u?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_filter_device(self):\n pass", "def test_filter_device1(self):\n pass", "def test_attrib_exact_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~==^H.*?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def testUsingFilterTool(self):\n pass", "def __stream__(myStream, **kwargs):\n print(kwargs)\n d = kwargs\n myStream.filter(**d)", "def test_filter_wea_zero_entry():\n pass", "def test_attrib_expr(self):\n\n eventFilter = EventFilter(\"FooEvent[a.'some'.'dict'=1]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a={'some': {'dict':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should not be handled\n fooEvent2 = FooEvent(a={'some': {'other':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])", "def test(ctx, filter=\"*\", verbose=False):\n test_python(ctx, filter, verbose)" ]
[ "0.8067357", "0.8008988", "0.76024616", "0.6966291", "0.6937647", "0.6935507", "0.6891078", "0.68547994", "0.6800666", "0.6789117", "0.6758554", "0.6741513", "0.6719263", "0.6610594", "0.65932226", "0.6562112", "0.6465409", "0.64569443", "0.63503873", "0.62503487", "0.6247966", "0.6219783", "0.6162798", "0.6138747", "0.6105074", "0.60939187", "0.60434705", "0.6020701", "0.59984374", "0.5985837" ]
0.82227045
0
Test EventStreams filter none function.
def test_filter_function_none(self): self.es.register_filter(lambda x: False, ftype='none') self.assertTrue(self.es.streamfilter(self.data)) self.es.register_filter(lambda x: True, ftype='none') self.assertFalse(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filters_anonymous_with_empty_events():\n event = {}\n with pytest.raises(EventKeyError):\n filters.anonymous(event)", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_filter_wea_zero_entry():\n pass", "def test_url_missing_streams(self):\n with self.assertRaises(NotImplementedError):\n EventStreams()", "def test_filter_function_settings(self):\n def foo():\n \"\"\"Dummy function.\"\"\"\n return True\n\n self.es.register_filter(foo)\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'], [])\n\n self.es.register_filter(foo, ftype='none')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'][0], foo)\n\n self.es.register_filter(foo, ftype='any')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'][0], foo)\n self.assertEqual(self.es.filter['none'][0], foo)", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_device(self):\n pass", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_streamWaitForEvents(self):\n resource = self.eventSourceResource()\n response = self.render(resource)\n\n # Read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_filter_device1(self):\n pass", "def test_filter_mixed_function(self):\n for none_type in (False, True):\n for all_type in (False, True):\n for any_type in (False, True, None):\n result = none_type is False and all_type is True \\\n and (any_type is None or any_type is True)\n self._test_filter(none_type, all_type, any_type, result)", "def test_no():\n errors = generate_errors(10, 5)\n assert NoFiltering().filter(errors) == errors", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_filter_messages_non_message(self):\n pass", "def test_filter_messages_empty_data(self):\n pass", "def filter(self, event: \"TraceEvent\") -> Union[None, \"TraceEvent\", Sequence[\"TraceEvent\"]]:\n raise NotImplementedError()", "def test_otoroshi_controllers_adminapi_analytics_controller_filterable_events(self):\n pass", "def test_filter_with_empty_filters(mockdata, qfilter):\n assert len(qfilter.filter(mockdata)) == 100", "def test_apply_filter_none(app):\n with app.app_context():\n users = User.query\n users = apply_filter(users, User, {})\n assert users.whereclause is None", "def test_filter_messages(self):\n pass" ]
[ "0.7962954", "0.7779999", "0.75724334", "0.7487395", "0.7410826", "0.7252385", "0.723975", "0.6878008", "0.6869562", "0.669162", "0.6551662", "0.644308", "0.638963", "0.63646346", "0.63293976", "0.6323907", "0.63005215", "0.6296376", "0.627124", "0.6240032", "0.6220857", "0.6220402", "0.61838454", "0.6148436", "0.61368227", "0.60983664", "0.6067301", "0.60133636", "0.6006626", "0.59841263" ]
0.8336108
0
Test EventStreams filter with assignment of an int value.
def test_filter_value(self): self.es.register_filter(foo=10) self.assertFalse(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_wea_zero_entry():\n pass", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_settings(self):\n self.es.register_filter(foo='bar')\n self.assertTrue(callable(self.es.filter['all'][0]))\n self.es.register_filter(bar='baz')\n self.assertLength(self.es.filter['all'], 2)", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_window_filter(self):\n test_window_scheme = WindowingScheme(self.window_test_filter, 5)\n filtered_value = test_window_scheme.filter(self.middle_value)\n self.assertEquals(filtered_value, self.middle_value)", "def test_filter_function_settings(self):\n def foo():\n \"\"\"Dummy function.\"\"\"\n return True\n\n self.es.register_filter(foo)\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'], [])\n\n self.es.register_filter(foo, ftype='none')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'], [])\n self.assertEqual(self.es.filter['none'][0], foo)\n\n self.es.register_filter(foo, ftype='any')\n self.assertEqual(self.es.filter['all'][0], foo)\n self.assertEqual(self.es.filter['any'][0], foo)\n self.assertEqual(self.es.filter['none'][0], foo)", "def test_filter_device1(self):\n pass", "def setFilter(self, type: int, filter: int) -> None:\n ...", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_process_filter_value():\n now = dt.utcnow()\n now_ts = now.timestamp()\n filter_ = {'column': \"ts_created_at\", 'value': now_ts, type: 'leq'}\n assert process_filter_value(filter_) == now\n\n filter_ = {'column': \"created_at\", 'value': now_ts, type: 'leq'}\n assert process_filter_value(filter_) == now_ts", "def test_source_with_int_value():\n source = festim.Source(2, volume=1, field=\"solute\")\n assert isinstance(source.value, f.Constant)", "def listener(event):\n if \"test\" in event.data:\n test_value.append(1)", "def test_filterestimator():\n raw = io.read_raw_fif(raw_fname)\n events = read_events(event_name)\n picks = pick_types(raw.info, meg=True, stim=False, ecg=False,\n eog=False, exclude='bads')\n picks = picks[1:13:3]\n epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,\n baseline=(None, 0), preload=True)\n epochs_data = epochs.get_data()\n\n # Add tests for different combinations of l_freq and h_freq\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=80,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n assert_true(X.shape == epochs_data.shape)\n assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)\n\n filt = FilterEstimator(epochs.info, l_freq=None, h_freq=40,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n assert_raises(ValueError, filt.fit_transform, epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=None,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n # Test init exception\n assert_raises(ValueError, filt.fit, epochs, y)\n assert_raises(ValueError, filt.transform, epochs, y)", "def filter(n='I'):\n if n=='':\n n = 'I'\n if type(n) == str:\n fid = filtid(n)\n fnum = filtnum(fid)\n opticalcoupler.SelectFilter(fnum)\n camera.status.filterid = fid\n camera.status.filter = fnum\n logger.info('Moved to filter '+`n`)\n else:\n if (n>=1) and (n<=8):\n opticalcoupler.SelectFilter(n)\n camera.status.filterid = filtid(filtname(n))\n camera.status.filter = n\n logger.info('Moved to filter '+`n`)\n else:\n logger.error(\"Error in filter value: \"+repr(n))", "def test_filter_function_settings_fail(self):\n with self.assertRaises(TypeError):\n self.es.register_filter('test')", "def set_FilterValue(self, value):\n super(GetCallbackDataInputSet, self)._set_input('FilterValue', value)", "def set_event_filter(fprime_test_api, severity, enabled):\n enabled = \"ENABLED\" if enabled else \"DISABLED\"\n if isinstance(severity, FilterSeverity):\n severity = severity.name\n else:\n severity = FilterSeverity[severity].name\n try:\n fprime_test_api.send_command(\n \"eventLogger.SET_EVENT_FILTER\",\n [severity, enabled],\n )\n return True\n except AssertionError:\n return False", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_filter_device(self):\n pass", "def testOnValueEventIgnoresIrrelevantOps(self):\n # Receive a DebugNumericSummary event.\n numeric_summary_event = self._create_event_with_float_tensor(\n \"MatMul\", 42, \"DebugNumericSummary\", list(range(1, 15)))\n self.stream_handler.on_value_event(numeric_summary_event)\n\n # Receive a non-DebugNumericSummary event.\n self.stream_handler.on_value_event(\n self._create_event_with_float_tensor(\"add\", 0, \"DebugIdentity\",\n list(range(1, 15))))\n\n # The stream handler should have only written the DebugNumericSummary event\n # to disk.\n self._verify_event_lists_have_same_tensor_values([numeric_summary_event],\n self.events_written)", "def test_filter(self):\n self.client.ensure_path(\"/services/db/1.1.1.1\")\n self.client.ensure_path(\"/services/db/2.2.2.2\")\n self.client.ensure_path(\"/services/db/3.3.3.3\")\n self.client.ensure_path(\"/services/db/4.4.4.4\")\n self.client.set(\"/services/db/1.1.1.1\",\n json.dumps({\"enabled\": \"0\", \"weight\": \"20\"}))\n self.client.set(\"/services/db/2.2.2.2\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"20\"}))\n self.client.set(\"/services/db/3.3.3.3\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"10\"}))\n self.client.set(\"/services/db/4.4.4.4\",\n json.dumps({\"enabled\": \"1\", \"weight\": \"30\"}))\n z = ZkFarmExporter(self.client, \"/services/db\", self.conf,\n filter_handler=create_filter(\"enabled=1,weight>15\"))\n z.loop(2, timeout=self.TIMEOUT)\n self.conf.write.assert_called_with({\"2.2.2.2\": {\"enabled\": \"1\", \"weight\": \"20\"},\n \"4.4.4.4\": {\"enabled\": \"1\", \"weight\": \"30\"}})", "def testUsingFilterTool(self):\n pass", "def test(ctx, filter=\"*\", verbose=False):\n test_python(ctx, filter, verbose)", "def test_prefilter_check(self):\r\n def handler(event):\r\n pass\r\n\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1)\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_2)\r\n\r\n self.events.register(handler, PrefilterTest_1, require='foo')\r\n self.events.register(handler, PrefilterTest_2, require='foo')\r\n\r\n self.events.register(handler, PrefilterTest_1, require='foo', optional='bar')\r\n self.events.register(handler, PrefilterTest_2, require='foo', optional='bar')\r\n\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1,\r\n require='foo', optional='bar', fooarg='excess argument')\r\n self.events.register(handler, PrefilterTest_2,\r\n require='foo', optional='bar', fooarg='excess argument')" ]
[ "0.6781209", "0.6612637", "0.6382851", "0.63438815", "0.62980855", "0.62586474", "0.61791056", "0.6133874", "0.6049616", "0.6016297", "0.5830464", "0.57639474", "0.5736903", "0.5674254", "0.5672789", "0.56098026", "0.5580512", "0.5575643", "0.55115247", "0.55025834", "0.54811674", "0.54595244", "0.5449152", "0.543572", "0.54348904", "0.5421934", "0.5389105", "0.5379215", "0.5314431", "0.5291496" ]
0.76163733
0
Test EventStreams filter with assignment of a sequence.
def test_filter_sequence_false(self): self.es.register_filter(bar=list('baz')) self.assertFalse(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_sequence(seq_arg, seq_src, seq_dest):\n args = parser.parse_args(['-seq', *seq_arg])\n filters = renamer.initfilters(args)\n dest = renamer.get_renames(seq_src, filters, args.extension, args.raw)\n assert dest == seq_dest", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_multiple_streams(self, dummy_streamers, dummy_receivers):\n dummy_ids = [source_id for _, _, source_id, _ in dummy_streamers]\n source_ids = [receiver._source_id\n for _, receiver in dummy_receivers.items()]\n assert set(source_ids) == set(dummy_ids)", "def test_quality_filter_sequence_pass(self):\r\n header = \"990:2:4:11271:5323#1/1\"\r\n sequence = \\\r\n \"GCACTCACCGCCCGTCACACCACGAAAGTTGGTAACACCCGAAGCCGGTGAGATAACCTTTTAGGAGTCAGCTGTC\"\r\n quality = \\\r\n \"bbbbbbbbbbbbbbbbbbbbbbbbbY``\\`bbbbbbbbbbbbb`bbbbab`a`_[ba_aa]b^_bIWTTQ^YR^U`\"\r\n actual = quality_filter_sequence(header,\r\n sequence,\r\n quality,\r\n max_bad_run_length=0,\r\n phred_quality_threshold=2,\r\n min_per_read_length=75,\r\n seq_max_N=0,\r\n filter_bad_illumina_qual_digit=True)\r\n self.assertEqual(actual, (0,\r\n \"GCACTCACCGCCCGTCACACCACGAAAGTTGGTAACACCCGAAGCCGGTGAGATAACCTTTTAGGAGTCAGCTGTC\",\r\n \"bbbbbbbbbbbbbbbbbbbbbbbbbY``\\`bbbbbbbbbbbbb`bbbbab`a`_[ba_aa]b^_bIWTTQ^YR^U`\"))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_data_source_soaps_change_stream_get(self):\n pass", "def test_streamable_subset(self):\n test_streamable_subset = False\n self.encoder._streamable_subset = test_streamable_subset\n self.assertEqual(self.encoder._streamable_subset, test_streamable_subset)", "async def test_pipeline_from_audio_stream(\n hass: HomeAssistant, mock_stt_provider, init_components, snapshot: SnapshotAssertion\n) -> None:\n\n events = []\n\n async def audio_data():\n yield b\"part1\"\n yield b\"part2\"\n yield b\"\"\n\n await assist_pipeline.async_pipeline_from_audio_stream(\n hass,\n Context(),\n events.append,\n stt.SpeechMetadata(\n language=\"\",\n format=stt.AudioFormats.WAV,\n codec=stt.AudioCodecs.PCM,\n bit_rate=stt.AudioBitRates.BITRATE_16,\n sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,\n channel=stt.AudioChannels.CHANNEL_MONO,\n ),\n audio_data(),\n )\n\n processed = []\n for event in events:\n as_dict = asdict(event)\n as_dict.pop(\"timestamp\")\n processed.append(as_dict)\n\n assert processed == snapshot\n assert mock_stt_provider.received == [b\"part1\", b\"part2\"]", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_attrib_loose_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~=u?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_attrib_expr(self):\n\n eventFilter = EventFilter(\"FooEvent[a.'some'.'dict'=1]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a={'some': {'dict':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should not be handled\n fooEvent2 = FooEvent(a={'some': {'other':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])", "def test_filter_wea_zero_entry():\n pass", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_get_future_events(self):\n events = list(get_future_events())\n self.assertFalse(self.event_show1 in events)\n self.assertTrue(self.event_show2 in events)", "def test_valid_function_emitted_events(self):\n\t\tsource = \"\"\"\n\t\t\tpragma solidity ^0.4.22;\n\t\t\tcontract testContract {\n\t\t\t\tevent TestEvent(uint t);\n\t\t\t\tfunction testFunction () public returns (string) {\n\t\t\t\t\tuint foo = 5;\n\t\t\t\t\temit TestEvent(foo);\n\t\t\t\t\tuint bar = uint8(foo);\n\t\t\t\t\trequire(foo > 5);\n\t\t\t\t\treturn 'helloWorld';\n\t\t\t\t}\n\t\t\t}\n\t\t\"\"\"\n\t\tevents, statements = self.before_test(source)\n\t\tself.assertTrue(len(statements) > 0)\n\t\tself.assertTrue(len(events) == 1)\n\t\tstatement_under_test = filter_statements(events, statements)\n\t\tself.assertTrue(len(statement_under_test) == 3)", "def test_data_source_soaps_change_stream_post(self):\n pass", "def test_attrib_exact_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~==^H.*?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])" ]
[ "0.7480587", "0.631979", "0.63094825", "0.630064", "0.6229207", "0.62180775", "0.59581566", "0.59251046", "0.59033144", "0.58696556", "0.57505023", "0.57444876", "0.5740344", "0.57033825", "0.570173", "0.56660205", "0.56613576", "0.56367373", "0.5618477", "0.5604387", "0.560303", "0.55743015", "0.55718607", "0.5543709", "0.55056775", "0.5490486", "0.54689324", "0.5467321", "0.54642177", "0.54609513" ]
0.68394476
1
Test EventStreams filter with assignment of a sequence.
def test_filter_sequence_true(self): self.es.register_filter(bar=('foo', 'bar', 'baz')) self.assertTrue(self.es.streamfilter(self.data))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_sequence(seq_arg, seq_src, seq_dest):\n args = parser.parse_args(['-seq', *seq_arg])\n filters = renamer.initfilters(args)\n dest = renamer.get_renames(seq_src, filters, args.extension, args.raw)\n assert dest == seq_dest", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_multiple(self):\n self.es.register_filter(foo=False, bar='baz')\n self.assertFalse(self.es.streamfilter(self.data))\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(foo=True, bar='baz')\n self.assertTrue(self.es.streamfilter(self.data))\n # check whether filter functions are different\n f, g = self.es.filter['all']\n c = {'foo': True}\n self.assertNotEqual(f(c), g(c))\n c = {'bar': 'baz'}\n self.assertNotEqual(f(c), g(c))", "def test_multiple_streams(self, dummy_streamers, dummy_receivers):\n dummy_ids = [source_id for _, _, source_id, _ in dummy_streamers]\n source_ids = [receiver._source_id\n for _, receiver in dummy_receivers.items()]\n assert set(source_ids) == set(dummy_ids)", "def test_quality_filter_sequence_pass(self):\r\n header = \"990:2:4:11271:5323#1/1\"\r\n sequence = \\\r\n \"GCACTCACCGCCCGTCACACCACGAAAGTTGGTAACACCCGAAGCCGGTGAGATAACCTTTTAGGAGTCAGCTGTC\"\r\n quality = \\\r\n \"bbbbbbbbbbbbbbbbbbbbbbbbbY``\\`bbbbbbbbbbbbb`bbbbab`a`_[ba_aa]b^_bIWTTQ^YR^U`\"\r\n actual = quality_filter_sequence(header,\r\n sequence,\r\n quality,\r\n max_bad_run_length=0,\r\n phred_quality_threshold=2,\r\n min_per_read_length=75,\r\n seq_max_N=0,\r\n filter_bad_illumina_qual_digit=True)\r\n self.assertEqual(actual, (0,\r\n \"GCACTCACCGCCCGTCACACCACGAAAGTTGGTAACACCCGAAGCCGGTGAGATAACCTTTTAGGAGTCAGCTGTC\",\r\n \"bbbbbbbbbbbbbbbbbbbbbbbbbY``\\`bbbbbbbbbbbbb`bbbbab`a`_[ba_aa]b^_bIWTTQ^YR^U`\"))", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_data_source_soaps_change_stream_get(self):\n pass", "def test_streamable_subset(self):\n test_streamable_subset = False\n self.encoder._streamable_subset = test_streamable_subset\n self.assertEqual(self.encoder._streamable_subset, test_streamable_subset)", "async def test_pipeline_from_audio_stream(\n hass: HomeAssistant, mock_stt_provider, init_components, snapshot: SnapshotAssertion\n) -> None:\n\n events = []\n\n async def audio_data():\n yield b\"part1\"\n yield b\"part2\"\n yield b\"\"\n\n await assist_pipeline.async_pipeline_from_audio_stream(\n hass,\n Context(),\n events.append,\n stt.SpeechMetadata(\n language=\"\",\n format=stt.AudioFormats.WAV,\n codec=stt.AudioCodecs.PCM,\n bit_rate=stt.AudioBitRates.BITRATE_16,\n sample_rate=stt.AudioSampleRates.SAMPLERATE_16000,\n channel=stt.AudioChannels.CHANNEL_MONO,\n ),\n audio_data(),\n )\n\n processed = []\n for event in events:\n as_dict = asdict(event)\n as_dict.pop(\"timestamp\")\n processed.append(as_dict)\n\n assert processed == snapshot\n assert mock_stt_provider.received == [b\"part1\", b\"part2\"]", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_attrib_loose_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~=u?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_streamNewEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n\n response = self.render(resource)\n\n # The first read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n # Add some events\n resource.addEvents(events)\n\n # We should now be unblocked\n self.assertTrue(d.called)\n\n # Each result from read() is another event\n for i in range(len(events)):\n if d is None:\n result = yield response.stream.read()\n else:\n result = yield d\n d = None\n\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=(events[i][\"eventID\"])\n )\n )\n\n # The next read should block on new events.\n d = response.stream.read()\n self.assertFalse(d.called)\n\n d.addErrback(lambda f: None)\n d.cancel()", "def test_attrib_expr(self):\n\n eventFilter = EventFilter(\"FooEvent[a.'some'.'dict'=1]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a={'some': {'dict':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should not be handled\n fooEvent2 = FooEvent(a={'some': {'other':1}}, b=\"Zo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])", "def test_filter_wea_zero_entry():\n pass", "def test_streamBufferedEvents(self):\n events = (\n dict(eventID=u\"1\", eventText=u\"A\"),\n dict(eventID=u\"2\", eventText=u\"B\"),\n dict(eventID=u\"3\", eventText=u\"C\"),\n dict(eventID=u\"4\", eventText=u\"D\"),\n )\n\n resource = self.eventSourceResource()\n resource.addEvents(events)\n\n response = self.render(resource)\n\n # Each result from read() is another event\n for i in range(len(events)):\n result = yield response.stream.read()\n self.assertEquals(\n result,\n textAsEvent(\n text=events[i][\"eventText\"],\n eventID=events[i][\"eventID\"]\n )\n )", "def test_get_future_events(self):\n events = list(get_future_events())\n self.assertFalse(self.event_show1 in events)\n self.assertTrue(self.event_show2 in events)", "def test_valid_function_emitted_events(self):\n\t\tsource = \"\"\"\n\t\t\tpragma solidity ^0.4.22;\n\t\t\tcontract testContract {\n\t\t\t\tevent TestEvent(uint t);\n\t\t\t\tfunction testFunction () public returns (string) {\n\t\t\t\t\tuint foo = 5;\n\t\t\t\t\temit TestEvent(foo);\n\t\t\t\t\tuint bar = uint8(foo);\n\t\t\t\t\trequire(foo > 5);\n\t\t\t\t\treturn 'helloWorld';\n\t\t\t\t}\n\t\t\t}\n\t\t\"\"\"\n\t\tevents, statements = self.before_test(source)\n\t\tself.assertTrue(len(statements) > 0)\n\t\tself.assertTrue(len(events) == 1)\n\t\tstatement_under_test = filter_statements(events, statements)\n\t\tself.assertTrue(len(statement_under_test) == 3)", "def test_data_source_soaps_change_stream_post(self):\n pass", "def test_attrib_exact_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~==^H.*?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])" ]
[ "0.6839882", "0.63193744", "0.63094485", "0.6301365", "0.62288755", "0.6219979", "0.5957509", "0.5924198", "0.5902188", "0.58685464", "0.57498527", "0.5746132", "0.5741277", "0.5704606", "0.57019025", "0.5666215", "0.566152", "0.5637792", "0.56193954", "0.5604694", "0.56026226", "0.5573076", "0.55728596", "0.5542411", "0.55071187", "0.5491438", "0.54706997", "0.5468395", "0.54640496", "0.5460001" ]
0.7480586
0
Test EventStreams filter with multiple arguments.
def test_filter_multiple(self): self.es.register_filter(foo=False, bar='baz') self.assertFalse(self.es.streamfilter(self.data)) self.es.filter = {'all': [], 'any': [], 'none': []} self.es.register_filter(foo=True, bar='baz') self.assertTrue(self.es.streamfilter(self.data)) # check whether filter functions are different f, g = self.es.filter['all'] c = {'foo': True} self.assertNotEqual(f(c), g(c)) c = {'bar': 'baz'} self.assertNotEqual(f(c), g(c))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _test_filter(self, none_type, all_type, any_type, result):\n self.es.filter = {'all': [], 'any': [], 'none': []}\n self.es.register_filter(lambda x: none_type, ftype='none')\n self.es.register_filter(lambda x: all_type, ftype='all')\n if any_type is not None:\n self.es.register_filter(lambda x: any_type, ftype='any')\n self.assertEqual(self.es.streamfilter(self.data), result,\n 'Test EventStreams filter mixed function failed for\\n'\n \"'none': {}, 'all': {}, 'any': {}\\n\"\n '(expected {}, given {})'\n .format(none_type, all_type, any_type,\n result, not result))", "def test_filter_function_all(self):\n self.es.register_filter(lambda x: True)\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_filter_sequence_true(self):\n self.es.register_filter(bar=('foo', 'bar', 'baz'))\n self.assertTrue(self.es.streamfilter(self.data))", "def test_filter_function_any(self):\n self.es.register_filter(lambda x: True, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: False, ftype='any')\n self.assertTrue(self.es.streamfilter(self.data))", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He,b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_filter_true(self):\n self.es.register_filter(foo=True)\n self.assertTrue(self.es.streamfilter(self.data))", "def test_multi_attrib_and(self):\n\n eventFilter = EventFilter(\"FooEvent[a=He] FooEvent[b=Lo]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"He\", b=\"Zo\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"He\", b=\"Lo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "def test_filter_value(self):\n self.es.register_filter(foo=10)\n self.assertFalse(self.es.streamfilter(self.data))", "def __stream__(myStream, **kwargs):\n print(kwargs)\n d = kwargs\n myStream.filter(**d)", "def test_filter_sequence_false(self):\n self.es.register_filter(bar=list('baz'))\n self.assertFalse(self.es.streamfilter(self.data))", "def test_or_operator(self):\n\n eventFilter = EventFilter(\"FooEvent BarEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # The BazEvent should not be handled\n bazEvent1 = BazEvent(traceid=traceids)\n session.handle(bazEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_filter_function_none(self):\n self.es.register_filter(lambda x: False, ftype='none')\n self.assertTrue(self.es.streamfilter(self.data))\n self.es.register_filter(lambda x: True, ftype='none')\n self.assertFalse(self.es.streamfilter(self.data))", "def test_prefilter_check(self):\r\n def handler(event):\r\n pass\r\n\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1)\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_2)\r\n\r\n self.events.register(handler, PrefilterTest_1, require='foo')\r\n self.events.register(handler, PrefilterTest_2, require='foo')\r\n\r\n self.events.register(handler, PrefilterTest_1, require='foo', optional='bar')\r\n self.events.register(handler, PrefilterTest_2, require='foo', optional='bar')\r\n\r\n self.assertRaises(Exception, self.events.register, handler, PrefilterTest_1,\r\n require='foo', optional='bar', fooarg='excess argument')\r\n self.events.register(handler, PrefilterTest_2,\r\n require='foo', optional='bar', fooarg='excess argument')", "def test_apply_filter_multiple(app):\n with app.app_context():\n filters = [{'column': 'id', 'type': 'geq',\n 'value': '1'}, {'column': 'last_seen', 'type': 'leq',\n 'value': 121212121}]\n users = User.query\n for filter_ in filters:\n users = apply_filter(users, User, filter_)\n\n assert str(users.whereclause) == \\\n 'users.id >= :id_1 AND users.last_seen <= :last_seen_1'", "def test_multiple_streams(self):\n streams = ('page-create', 'page-move', 'page-delete')\n e = EventStreams(streams=streams)\n combined_streams = ','.join(streams)\n self.assertEqual(\n e._url,\n 'https://stream.wikimedia.org/v2/stream/' + combined_streams)\n self.assertEqual(e._url, e.url)\n self.assertEqual(e._url, e.sse_kwargs.get('url'))\n self.assertEqual(e._streams, combined_streams)", "def test(ctx, filter=\"*\", verbose=False):\n test_python(ctx, filter, verbose)", "def filter(self, *args, **kwargs):", "def test_filter_settings(self):\n self.es.register_filter(foo='bar')\n self.assertTrue(callable(self.es.filter['all'][0]))\n self.es.register_filter(bar='baz')\n self.assertLength(self.es.filter['all'], 2)", "def test_filters_anonymous_filtering():\n event = {\"username\": \"john\"}\n anonymous_event = {\"username\": \"\"}\n assert filters.anonymous(event) == event\n assert filters.anonymous(anonymous_event) is None", "def test_attrib_exact_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~==^H.*?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_filters_anonymous_with_empty_events():\n event = {}\n with pytest.raises(EventKeyError):\n filters.anonymous(event)", "def test_filter_false(self):\n self.es.register_filter(foo=False)\n self.assertFalse(self.es.streamfilter(self.data))", "def test_any(self):\n\n eventFilter = EventFilter(\"*\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should also be handled\n barEvent1 = BarEvent(traceid=traceids)\n session.handle(barEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n call(barEvent1),\n ])", "def test_filterestimator():\n raw = io.read_raw_fif(raw_fname)\n events = read_events(event_name)\n picks = pick_types(raw.info, meg=True, stim=False, ecg=False,\n eog=False, exclude='bads')\n picks = picks[1:13:3]\n epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,\n baseline=(None, 0), preload=True)\n epochs_data = epochs.get_data()\n\n # Add tests for different combinations of l_freq and h_freq\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=80,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n assert_true(X.shape == epochs_data.shape)\n assert_array_equal(filt.fit(epochs_data, y).transform(epochs_data), X)\n\n filt = FilterEstimator(epochs.info, l_freq=None, h_freq=40,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=1, h_freq=1)\n y = epochs.events[:, -1]\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n assert_raises(ValueError, filt.fit_transform, epochs_data, y)\n\n filt = FilterEstimator(epochs.info, l_freq=40, h_freq=None,\n filter_length='auto',\n l_trans_bandwidth='auto', h_trans_bandwidth='auto')\n with warnings.catch_warnings(record=True): # stop freq attenuation warning\n X = filt.fit_transform(epochs_data, y)\n\n # Test init exception\n assert_raises(ValueError, filt.fit, epochs, y)\n assert_raises(ValueError, filt.transform, epochs, y)", "def testUsingFilterTool(self):\n pass", "def test_simple(self):\n\n eventFilter = EventFilter(\"FooEvent\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should be handled\n fooEvent1 = FooEvent(traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n ])\n\n # The second FooEvent should also be handled\n fooEvent2 = FooEvent(traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent1),\n call(fooEvent2),\n ])", "async def test_stream_with_restricted(\n hass: HomeAssistant, mock_api_client: TestClient\n) -> None:\n listen_count = _listen_count(hass)\n\n async with mock_api_client.get(\n f\"{const.URL_API_STREAM}?restrict=test_event1,test_event3\"\n ) as resp:\n assert resp.status == HTTPStatus.OK\n assert listen_count + 1 == _listen_count(hass)\n\n hass.bus.async_fire(\"test_event1\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event1\"\n\n hass.bus.async_fire(\"test_event2\")\n hass.bus.async_fire(\"test_event3\")\n data = await _stream_next_event(resp.content)\n assert data[\"event_type\"] == \"test_event3\"", "def test_attrib_loose_regex(self):\n\n eventFilter = EventFilter(\"FooEvent[a~=u?lo+]\")\n\n # Start a session\n traceids = ['foobar']\n eventCallback = Mock()\n session = eventFilter.start(traceids, eventCallback)\n\n # The first FooEvent should not be handled\n fooEvent1 = FooEvent(a=\"Helllll\", traceid=traceids)\n session.handle(fooEvent1)\n self.assertEqual(eventCallback.mock_calls, [\n ])\n\n # The second FooEvent should be handled\n fooEvent2 = FooEvent(a=\"Heloooo\", traceid=traceids)\n session.handle(fooEvent2)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # The BarEvent should not be handled\n barEvent = BarEvent(traceid=traceids)\n session.handle(barEvent)\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])\n\n # No more events should be added when the session is finalized\n session.finalize()\n self.assertEqual(eventCallback.mock_calls, [\n call(fooEvent2),\n ])", "def test_filter_device1(self):\n pass", "def test_filter_mixed_function(self):\n for none_type in (False, True):\n for all_type in (False, True):\n for any_type in (False, True, None):\n result = none_type is False and all_type is True \\\n and (any_type is None or any_type is True)\n self._test_filter(none_type, all_type, any_type, result)" ]
[ "0.73427314", "0.72020984", "0.7167585", "0.70557195", "0.6797048", "0.6734244", "0.6645825", "0.66414756", "0.6572019", "0.653977", "0.6431853", "0.6312299", "0.6298566", "0.6192424", "0.6158068", "0.6146278", "0.6141409", "0.61395323", "0.61179775", "0.60769707", "0.6067759", "0.6051345", "0.6035407", "0.6017033", "0.6012635", "0.60020065", "0.59938765", "0.5981598", "0.5979214", "0.58932316" ]
0.7342077
1
This method selects the game type, the standard game or a default version where the user can select lenght of code and what numbers to include
def select_game_type(self): print('Please secect game type.\n' + 'NOTE! only standard game saves highscore') print('1. Standard game') print('2. Custom game, set your rules') print('0. Back to main menu') answer = InputHandler.input_integer_range('Your choice: ', 0, 2) if answer == 1: self.run_game() if answer == 2: self.run_game(True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def choose_game():\r\n game = input('choose a game :\\n *TicTacToe (1)\\n\\n *Nim (2)\\n\\n *Othello (3)\\n\\n *Puissance4 (4)\\n\\n *chess (5)')\r\n if game == '1':\r\n return tictactoe\r\n elif game == '2':\r\n return nim_game\r\n elif game == '3':\r\n return othello\r\n elif game == '4':\r\n return puissance\r\n elif game == '5':\r\n return chess\r\n else:\r\n return choose_game()", "def determine_game_type():\n\tkind_of_game = int(input(\"\"\"There are two games:\nWould you like to choose the number of total players, the number of human players, and the humans' names? (type 1)\nOr would you like to see some quick play with 5 computer players? (type 1)\nWhat'll it be, (1) or (2)?\n\"\"\"))\n\twhile kind_of_game != 1 and kind_of_game != 2:\n\t\tkind_of_game = int(input(\"\"\"What'll it be, (1) or (2)?\"\"\"))\n\tif kind_of_game == 1:\n\t\tnames = get_game_ready()\n\telse:\n\t\tnames = NAMES\n\treturn names", "def choose_option():\n print(\"1. title of most played game\"\n \"\\n2. how many copies have been sold in total\"\n \"\\n3. average selling\"\n \"\\n4. how many characters long is the longest title\"\n \"\\n5. average of the release dates\"\n \"\\n6. properties of the game\"\n \"\\n7. how many games are grouped by genre\"\n \"\\n8. ordered titles of games by date and alphabet\"\n \"\\n9. Exit\")\n\n option = input(\"\\nDisplay: \")\n return option", "def defineGameType(self, gametype_int):\n gametype = str(gametype_int)\n\n if gametype_int == '0':\n gametype = 'ffa'\n elif gametype_int == '1': # Last Man Standing\n gametype = 'lms'\n elif gametype_int == '2': # Quake 3 Arena single player\n gametype = 'dm'\n elif gametype_int == '3':\n gametype = 'tdm'\n elif gametype_int == '4':\n gametype = 'ts'\n elif gametype_int == '5':\n gametype = 'ftl'\n elif gametype_int == '6':\n gametype = 'cah'\n elif gametype_int == '7':\n gametype = 'ctf'\n elif gametype_int == '8':\n gametype = 'bm'\n elif gametype_int == '9':\n gametype = 'jump'\n elif gametype_int == '10':\n gametype = 'freeze'\n\n return gametype", "def parse_gamemode(self, gamemode: str):\n\n gamemode = gamemode.strip()\n\n # for users who input 'gem-grab' or 'gem_grab'\n gamemode = gamemode.replace(\"-\", \" \")\n gamemode = gamemode.replace(\"_\", \" \")\n\n if gamemode.lower() == \"showdown\":\n raise AmbiguityError(\"Please select one between Solo and Duo Showdown.\")\n\n possible_names = {\n \"Gem Grab\": [\"gem grab\", \"gemgrab\", \"gg\", \"gem\"],\n \"Brawl Ball\": [\"brawl ball\", \"brawlball\", \"bb\", \"bball\", \"ball\"],\n \"Solo Showdown\": [\n \"solo showdown\", \"ssd\", \"solo sd\",\n \"soloshowdown\", \"solo\", \"s sd\"\n ],\n \"Duo Showdown\": [\n \"duo showdown\", \"dsd\", \"duo sd\", \"duoshowdown\", \"duo\", \"d sd\"\n ],\n \"Bounty\": [\"bounty\", \"bonty\", \"bunty\"],\n \"Heist\": [\"heist\", \"heis\"],\n \"Lone Star\": [\"lone star\", \"lonestar\", \"ls\", \"lone\"],\n \"Takedown\": [\"takedown\", \"take down\", \"td\"],\n \"Robo Rumble\": [\n \"robo rumble\", \"rr\", \"roborumble\", \"robo\", \"rumble\"\n ],\n \"Big Game\": [\"big game\", \"biggame\", \"bg\", \"big\"],\n \"Boss Fight\": [\"boss fight\", \"bossfight\", \"bf\", \"boss\"]\n }\n\n for gmtype in possible_names:\n modes = possible_names[gmtype]\n if gamemode.lower() in modes:\n return gmtype\n else:\n return None", "def select_program(self):\r\n\r\n path_data = self.json_data # Deserialize json\r\n print(' '+path_data['title'] + '\\n\\n ' + path_data['desc'] + '\\n\\n' +\r\n path_data['path']['desc']) # Print title\r\n for option in path_data['path']['options']: # Print list of functions\r\n print(str(option['num'])+'. '+option['desc'])\r\n scenario = input()\r\n if scenario == '0':\r\n self.install_vk_api_for_python()\r\n elif scenario == '1':\r\n self.vk_sign_in()\r\n self.download_pics_from_dialogs()\r\n elif scenario == '2':\r\n self.vk_sign_in()\r\n self.tag = input('По какому тегу будем искать людей? (0 для отмены)\\n')\r\n if self.tag != '0':\r\n self.get_users_pool()\r\n self.get_friends_numbers()\r\n else:\r\n self.tag = None\r\n elif scenario == '3':\r\n counter = self.count_ffn()\r\n self.check_for_build_plot(counter)\r\n elif scenario == '4':\r\n self.vk_token_sing_in()\r\n self.find_most_popular()\r\n else:\r\n print('Ошибка ввода, попробуйте еще раз')\r\n time.sleep(0.5)\r\n self.select_program()\r\n print('Возвращаемся в главное меню...')\r\n time.sleep(1)\r\n self.select_program()", "def match_game(self):\n # TODO: split this out so hold'em is separate\n return ['Settings arenaVersion 1.0',\n 'Settings gameType NLHE',\n 'Settings gameMode tournament', ]", "def main_menu() -> None:\n option_list = (\"1\", \"quest\", \"2\", \"inventory\", \"3\", \"shop\", \"4\", \"stats\", \"5\", \"load\", \"save\",\n \"6\", *exit_list, \"code\")\n\n print(MenuSprites.main_menu)\n\n while (selection := input(\">\").lower()) not in option_list:\n print(f\"Invalid selection: {selection}\")\n\n with suppress(ValueError):\n selection = int(selection)\n\n if selection in [1, \"quest\"]:\n start_game(_inv=inv)\n return main_menu()\n\n elif selection in [2, \"inventory\"]:\n inv.display.inventory_display()\n return main_menu()\n\n elif selection in [3, \"shop\"]:\n ShopMenu(inv)\n\n elif selection in [4, \"stats\"]:\n inv.display.stats_display(in_loop=False)\n return main_menu()\n\n elif selection in [5, \"save\", \"load\"]:\n\n if selection not in [\"save\", \"load\"]:\n while selection := input(\"Load or save a character file?:\\n\").lower() not in (\"save\", \"load\"):\n print(\"Invalid selection\")\n\n if selection == \"save\":\n inv.save()\n return main_menu()\n\n elif selection == \"load\":\n inv.load()\n return main_menu()\n\n elif selection in [6, *exit_list]:\n quit()\n\n elif selection == \"code\":\n with open(\"DevCode.txt\", 'r') as f:\n code = str(f.read())\n\n inp = input(\"Enter code\")\n\n if inp == code:\n inv.state.dev_mode = True\n\n return main_menu()", "def optionSet(self):\n choice = self.optionVar.get()\n \n #if custom game is chosen\n if choice == 4:\n msg = \"Invalid Input!\"\n valid = True\n nums = []\n \n #make sure all inputs are integers\n for i in range(3):\n try:\n value = int(self.entry[i].get())\n nums.append(value)\n except ValueError:\n valid = False\n if i == 0: msg += \"\\nHeight \"\n elif i == 1: msg += \"\\nWidth \"\n elif i == 2: msg += \"\\nMines \"\n msg += \"input must be an integer.\"\n \n #check for other invalid inputs\n #(negative input, not wide enough, too many mines)\n if valid:\n if nums[0]<=0 or nums[1]<=0 or nums[2]<=0:\n valid = False\n msg += \"\\nInputs must be integers greater than zero\"\n elif nums[1] < 8 :\n valid = False\n msg += \"\\nMinimum width allowed is 8\"\n if nums[0]*nums[1] <= nums[2]:\n valid = False\n msg += \"\\nToo many mines to fit on the board!\"\n\n #start game according to specs if input was valid\n if valid: \n self.menuVar.set(choice)\n self.checkVar.set(4)\n self.resize(nums[0],nums[1],nums[2])\n self.optionsWindow.destroy()\n #otherwise popup error and keep options window open\n else:\n messagebox.showinfo('Custom Game Error', msg)\n\n #start game according to difficulty chosen \n else:\n self.menuVar.set(choice)\n if choice == 1: self.resize(8,8,10)\n elif choice == 2: self.resize(16,16,40)\n else: self.resize(16,30,99)\n self.optionsWindow.destroy()", "def get_choice(list_of_games, num_games, num_pages=None, current_page=None):\n\tif current_page == 0:\n\t\ttext = Fore.WHITE + 'Options: Display (' + Fore.GREEN + 'N' + Fore.WHITE + ')ext page, (' + Fore.MAGENTA + \\\n\t\t 'C' + Fore.WHITE + ')urrent page, (' + Fore.RED + 'Q' + Fore.WHITE + ')uit or enter the ' + Fore.CYAN + \\\n\t\t 'Number' + Fore.WHITE + ' of the game to play'\n\telse:\n\t\ttext = Fore.WHITE + 'Options: Display (' + Fore.BLUE + 'P' + Fore.WHITE + ')revious page, (' + Fore.GREEN + \\\n\t\t 'N' + Fore.WHITE + ')ext page, (' + Fore.MAGENTA + 'C' + Fore.WHITE + ')urrent page, (' + \\\n\t\t Fore.RED + 'Q' + Fore.WHITE + ')uit or enter the ' + Fore.CYAN + 'Number' + Fore.WHITE + ' of the game to play'\n\n\tprint '\\n' + text\n\tindex = raw_input(Fore.WHITE + Style.BRIGHT + 'What would you like to do?: ').lower()\n\twhile index != 'p' or index != 'n' or index != 'd' or index.isdigit():\n\t\tif index == 'c':\n\t\t\tos.system('clear')\n\t\t\tif num_pages:\n\t\t\t\tlist_columns(list_of_games)\n\t\t\t\tprint '\\nDisplaying page {} of {}'.format(current_page, num_pages)\n\t\t\telse:\n\t\t\t\tlist_columns(list_of_games)\n\t\t\tprint text\n\t\telif index == 'p':\n\t\t\tbreak\n\t\telif index == 'n':\n\t\t\tbreak\n\t\telif index == 'q':\n\t\t\tsys.exit()\n\t\telif index.isdigit():\n\t\t\tif 0 < int(index) < num_games:\n\t\t\t\tbreak\n\t\t\telse:\n\t\t\t\tprint Fore.RED + '\\nSorry that is not a valid choice!'\n\t\t\tprint text\n\t\tindex = raw_input(Fore.WHITE + Style.BRIGHT + 'What would you like to do?: ')\n\n\treturn index", "def SelectPlayer(self):\n\n player = input(data['player'])\n if player == \"1\":\n return 0\n elif player == \"2\":\n return 1\n else:\n return 'invalid'", "def mainMenuText():\n print(\"\"\" 1. New Game\n 2. Load Game\n 3. Authors\n 4. Exit\"\"\")\n global choice\n choice = input(\"What to do? [Choose the number]:\")\n return(choice)", "def chooseGamemode(self):\n\n # Set the gamemode when user clicks a radio button\n self.GAME_MODE = self.gamemode_var.get()", "def get_player_mode(mode=None):\n if mode == \"1\":\n print(\"You've chosen Solo Mode! Can you beat a computer?\")\n return mode\n elif mode == \"2\":\n print(\"You've chosen Multiplayer Mode! Can you beat a human?\")\n return mode\n else:\n if mode is not None:\n print(\"Unrecognized input. Please enter 1 or 2\\n\")\n mode = input(\"1 or 2 Players? \")\n return get_player_mode(mode)", "def main_menu(): \r\n \r\n print(\"Please enter in the major for the class you need to study for: \")\r\n print(\"1. for Electrical Engineering\")\r\n print(\"2. for Bioengineering\")\r\n print(\"3. for Chemcial Engineering\")\r\n print(\"4. for Mechanical Engineering\")\r\n print(\"5. for Civil Engineering\")\r\n print(\"6. for Biology\")\r\n print(\"7. for Data Analytics\")\r\n print(\"8. for Chemistry\")\r\n choice = input()\r\n return choice", "def select_game(games):\n\n Ngames = len(games)\n if Ngames == 0: ### no games available\n print('I\\'m sorry, but there are no games currently available. Please design a game soon so we can get playing!')\n sys.exit(0)\n\n elif Ngames==1:\n print('There is only a single game available!')\n return games.items()[0]\n\n else:\n print('Please tell me which of the following games you would like to play!')\n print_available_games(games)\n selected = raw_input('')\n\n while selected not in games: ### make sure the specified game is available\n print('I\\'m sorry, but I did not understand. Please specify one of the following, or specify \"exit\" to quit')\n print_available_games(games)\n selected = raw_input('')\n\n if selected == 'exit': ### quit\n sys.exit(0)\n\n return selected, games[selected]", "def select_win_game(self):\r\n win_game_anims =['anim_speedtap_winround_intensity01_01',\r\n 'anim_speedtap_winround_intensity02_02',\r\n 'anim_speedtap_winround_intensity02_01',\r\n 'anim_speedtap_wingame_intensity02_01',\r\n 'anim_speedtap_wingame_intensity02_02',\r\n 'anim_speedtap_wingame_intensity03_01']\r\n cozmo.logger.info(\"Cozmo win game reacion\")\r\n return win_game_anims[randint(0,5)]", "def InputMenuChoice():\r\n choice = str(input(\"Which option would you like to perform? [1 to 4] - \")).strip()\r\n print() # Add an extra line for looks\r\n return choice", "def generate_game_code() -> int:\n while True:\n # code will only contain digits\n code_options = string.digits\n generated_game_code = ''.join(secrets.choice(code_options) for i in range(7))\n if Game.objects.filter(game_code=generated_game_code).count() == 0:\n break\n return int(generated_game_code)", "def start_menu():\n\n # Print main menu.\n print \"-\" * 8 + \"Main Menu\" + \"-\" * 8\n print \"1. Start Game\"\n print \"2. Instructions\"\n print \"3. Credits\"\n print \"4. Extensions\"\n print \"5. Quit\"\n choice = valid(\"\\nWhat would you like to do? \", 1, 5)\n\n if choice == 1: # Choose game settings.\n\n # Setup the game.\n print \"\\n\" + \"-\" * 8 + \"Select Game Type\" + \"-\" * 8\n print \"1. Quick Game\"\n print \"2. Manual\"\n game_type = valid(\"\\nSelect your game type: \", 1, 2)\n\n if game_type == 1:\n size = 10\n elif game_type == 2:\n size = valid(\"\\nSelect a board size from 2 to 100000: \", 2, 100000)\n\n # Select Player 1.\n print \"\\n\" + \"-\" * 8 + \"Select Player\" + \"-\" * 8\n print \"1. Human\"\n print \"2. Computer\"\n hero_type = valid(\"\\nSelect player type: \", 1, 2)\n\n if hero_type == 1: # Input human player name.\n name1 = raw_input(\"\\nWhat is Player 1's name: \")\n player1 = player.Player(name1, size)\n elif hero_type == 2: # Select AI type.\n print \"\\n\" + \"-\" * 8 + \"Select AI\" + \"-\" * 8\n print \"1. Random\"\n print \"2. Random+\"\n print \"3. SmartRandom\"\n name1 = valid(\"\\nChoose the AI: \", 1, 3)\n player1 = player.Ai(name1, size)\n\n # Select Player 2.\n print \"\\n\" + \"-\" * 8 + \"Select Opponent\" + \"-\" * 8\n print \"1. Human\"\n print \"2. Computer\"\n opp_type = valid(\"\\nSelect your oppenent: \", 1, 2)\n\n if opp_type == 1: # Input human player name.\n name2 = raw_input(\"\\nWhat is Player 2's name: \")\n player2 = player.Player(name2, size)\n elif opp_type == 2: # Select AI type.\n print \"\\n\" + \"-\" * 8 + \"Select AI\" + \"-\" * 8\n print \"1. Random\"\n print \"2. Random+\"\n print \"3. SmartRandom\"\n name2 = valid(\"\\nChoose the AI: \", 1, 3)\n player2 = player.Ai(name2, size)\n\n if game_type == 1: # Basic game with random placement.\n player1.board.random_board(1, 2, 2, 1)\n player2.board.random_board(1, 2, 2, 1)\n elif game_type == 2:\n n = _fleet(size) # Helper function asks for the size of the fleet.\n\n # Select type of placement.\n print \"\\n\" + \"-\" * 8 + \"Select Type of Placement\" + \"-\" * 8\n print \"1. Random\"\n print \"2. Manual\"\n place = valid(\"\\nChoose placement: \", 1, 2)\n\n if place == 1 or hero_type == 2: # Random placement for all.\n player1.board.random_board(n[0], n[1], n[2], n[3])\n player2.board.random_board(n[0], n[1], n[2], n[3])\n elif place == 2: # Manual placement.\n if opp_type == 2: # Random placement of computer ships.\n player2.board.random_board(n[0], n[1], n[2], n[3])\n else:\n\n # Ask for manual placement of Player 1\"s ships.\n print \"*\" * 50\n print \"\\n%s, please place your ships.\\n\" % name1\n print \"*\" * 50\n _place(player1, size, 2, n[0])\n _place(player1, size, 3, n[1])\n _place(player1, size, 4, n[2])\n _place(player1, size, 5, n[3])\n\n # Switch players for placement.\n print \"\\n%s and %s, please switch.\" % (name1, name2)\n print \"Press Enter to continue. \"\n cont()\n\n # Asks for manual placement of Player 2\"s ships.\n print \"*\" * 50\n print \"\\n%s, please place your ships.\\n\" % name2\n print \"*\" * 50\n _place(player2, size, 2, n[0])\n _place(player2, size, 3, n[1])\n _place(player2, size, 4, n[2])\n _place(player2, size, 5, n[3])\n\n run_game(player1, player2) # Play the game!\n\n elif choice == 2: # Read the instructions.\n print_file(\"./media/instructions.txt\", True)\n start_menu()\n elif choice == 3: # Read the credits.\n print_file(\"./media/credits.txt\", True)\n start_menu()\n elif choice == 4: # Read about the extensions.\n print_file(\"extensions.txt\", True)\n start_menu()\n elif choice == 5: # Quit the game.\n quit()\n print \"\"\n start_menu()", "def load_game(self):\n # Show error message if any of the toggles are not picked \n if not self.diff_choice or not self.game_choice or not self.match_style:\n content = Button(text=\"Dismiss\")\n error = Popup(title=\"Select one of each option\", content=content, size_hint=(.6, .3))\n content.bind(on_press=error.dismiss)\n error.open()\n return\n \n # load game settings and swap screens\n game_screen = self.manager.get_screen(self.game_choice)\n game_screen.load_settings(self.diff_choice, self.match_style)\n self.manager.transition = SlideTransition(direction=\"left\")\n self.manager.current = self.game_choice", "def ChooseNumOption(nameList, element, type, message0, message1, message2, pick, dictionary = {}):\n if nameList:\n print( message0, '\\x1b[0;31;43m'+str(type)+'\\x1b[0m', message1)\n for i, element in enumerate(nameList, 1):\n if dictionary:\n print( str(i)+str(')'), element, dictionary.get(element))\n else:\n print( str(i)+str(')'), element)\n\n if pick == True:\n number = PickNumber(len(nameList))\n for i, element in enumerate(nameList, 1):\n if i == number:\n print( '\\n ---> The', '\\x1b[0;31;43m'+str(element)+'\\x1b[0m', message2) # This file is the pdbFile \n globals().update({type+str(\"Name\") : element})\n return element\n else:\n return\n#in case of files \n else:\n if element == \"file\":\n print( '\\n No '+'\\x1b[6;30;42m'+ '.'+type +'\\x1b[0m', 'files found. Please put a ' +'\\x1b[6;30;42m'+ '.'+type +'\\x1b[0m', 'file in the current folder and start again. \\n Good bye! \\n')\n sys.exit()\n if element != \"file\" and element != \"chromophore\" and element != \"chain\":\n print( '\\n No '+'\\x1b[6;30;42m'+ type +'s'+'\\x1b[0m', 'found in the ', '\\x1b[0;31;43m'+pdbName+'\\x1b[0m', \"file\")\n else:\n print( '\\n No '+'\\x1b[6;30;42m'+ type +'s'+'\\x1b[0m', 'found in the ', '\\x1b[0;31;43m'+pdbName+'\\x1b[0m', \"file. The file is corrupt, verify your PDB and start again! \\n Good bye! \\n\")\n sys.exit()", "def input_menu_choice():\r\n choice = str(input(\"Which option would you like to perform? [1 to 4] - \")).strip()\r\n return choice", "def game_choice(game):\n global set_game\n set_game = game\n return set_game", "def select_game_difficulty():\n prompt = \"Please select a game difficulty by typing it in!\\n\"\n prompt += \"Possible choices include easy, medium and hard.\\n\"\n equivalents_difficulty = {x: \"easy\" for x in (\"easy\", \"e\", \"1\", \"1.\")}\n equivalents_difficulty.update(\n {y: \"medium\" for y in (\"medium\", \"m\", \"2\", \"2.\")}\n )\n equivalents_difficulty.update(\n {z: \"hard\" for z in (\"hard\", \"h\", \"3\", \"3.\")}\n )\n chosen_difficulty = input(prompt).lower()\n while chosen_difficulty not in equivalents_difficulty:\n print(\"That's not an option!\")\n chosen_difficulty = input(prompt).lower()\n print(\n \"You've chosen \" +\n str(equivalents_difficulty[chosen_difficulty]) +\n \"!\\n\"\n )\n return equivalents_difficulty[chosen_difficulty]", "def _choose_best_option(self):", "def _get_difficulty_ui(self, difficulty):\n if self.DIFFICULTY == Missions._DIFFICULTY_4:\n if difficulty == 1:\n return Missions._DIFFICULTY_4.STAGE_1\n if difficulty == 2:\n return Missions._DIFFICULTY_4.STAGE_2\n if difficulty == 3:\n return Missions._DIFFICULTY_4.STAGE_3\n if difficulty == 4:\n return Missions._DIFFICULTY_4.STAGE_4\n if self.DIFFICULTY == Missions._DIFFICULTY_6:\n if difficulty == 1:\n return Missions._DIFFICULTY_6.STAGE_1\n if difficulty == 2:\n return Missions._DIFFICULTY_6.STAGE_2\n if difficulty == 3:\n return Missions._DIFFICULTY_6.STAGE_3\n if difficulty == 4:\n return Missions._DIFFICULTY_6.STAGE_4\n if difficulty == 5:\n return Missions._DIFFICULTY_6.STAGE_5\n if difficulty == 6:\n return Missions._DIFFICULTY_6.STAGE_6\n logger.warning(f\"Got wrong difficulty or class setup: class = {self.DIFFICULTY.__name__}, \"\n f\"difficulty={difficulty}. Trying to use max difficulty.\")\n return Missions._DIFFICULTY_6.STAGE_6", "def select_player(n):\n pygame.display.set_caption(\"You selected: \" + PROF[n])", "def get_computer_play():\r\n return random.choice(['Ailurophile', 'Assemblage', 'Becoming', 'Beleaguer', \r\n 'Brood', 'Bucolic', 'Bungalow', 'Chatoyant', 'Comely', \r\n 'Conflate', 'Cynosure', 'Dalliance', 'Demesne', 'Demure', \r\n 'Denouement', 'Desuetude', 'Desultory', 'Diaphanous', \r\n 'Dissemble', 'Dulcet', 'Ebullience', 'Effervescent', \r\n 'Efflorescence', 'Elision', 'Elixir', 'Eloquence', \r\n 'Embrocation', 'Emollient', 'Ephemeral', 'Epiphany', \r\n 'Erstwhile', 'Ethereal', 'Evanescent', 'Evocative', \r\n 'Fetching', 'Felicity', 'Forbearance', 'Fugacious', \r\n 'Furtive', 'Gambol', 'Glamour', 'Gossamer', 'Halcyon', \r\n 'Harbinger', 'Imbrication', 'Imbroglio', 'Imbue', \r\n 'Incipient', 'Ineffable', 'Ingenue', 'Inglenook', \r\n 'Insouciance', 'Inure', 'Kayak', 'Labyrinthine', \r\n 'Lagniappe', 'Lagoon', 'Languor', 'Lassitude', 'Leisure', \r\n 'Lilt', 'Lissome', 'Lithe', 'Love', 'Mellifluous', \r\n 'Moiety', 'Mondegreen', 'Murmurous', 'Nemesis', 'Numbered',\r\n 'Offing', 'Onomatopoeia', 'Opulent', 'Palimpsest', \r\n 'Panacea', 'Panoply', 'Pastiche', 'Penumbra', 'Petrichor', \r\n 'Plethora', 'Propinquity', 'Pyrrhic', 'Python', \r\n 'Quintessential', 'Ratatouille', 'Ravel', 'Redolent', \r\n 'Riparian', 'Ripple', 'Scintilla', 'Sempiternal', 'Seraglio', \r\n 'Serendipity', 'Summery', 'Sumptuous', 'Surreptitious', \r\n 'Susquehanna', 'Susurrous', 'Talisman', 'Tintinnabulation', \r\n 'Umbrella', 'Untoward', 'Vestigial', 'Wafture', \r\n 'Wherewithal', 'Woebegone'])", "def menu():\n print('Selecciona el tipo de problema',end='\\n') \n print('-----------------------------------------')\n print('1) Conduccion de calor estacionaria, conductividad constante ',end='\\n')\n print('2) Conduccion de calor, ecuacion de Poisson condicion tipo Dirichlet ',end='\\n')\n print('3) Conduccion de calor, ecuacion de Poisson condicion tipo Neumman',end='\\n')\n print('4) Conduccion de calor, ecuacion de Poisson conductividad no constante',end='\\n')\n try:\n seleccion=int(input())\n except:\n print(\"No seas ñero\")\n sys.exit()\n return seleccion" ]
[ "0.6393351", "0.6387377", "0.63826907", "0.6039702", "0.59779155", "0.589456", "0.5878415", "0.58534694", "0.58445454", "0.5796118", "0.5719777", "0.57154363", "0.56996566", "0.56939584", "0.5680214", "0.566861", "0.56489444", "0.5643704", "0.5640862", "0.5637051", "0.56139827", "0.5611497", "0.5593302", "0.55372745", "0.552757", "0.5523836", "0.5523688", "0.5509724", "0.54827744", "0.54726136" ]
0.7425328
0
This method runs the game using the bool custom to determing game type. It will create an object of Mastermind based on value of custom. It will then update scores, ask for another game and call itself using custom for same gametype in loop
def run_game(self, custom=False): # used for determining saving highscore or not self.custom = custom game = None if not custom: game = Mastermind() else: # The user gets to set custom rules for the game correct_range = False while not correct_range: message_low = 'Please select the lowest number: ' message_high = 'Please select the highest number: ' low = InputHandler.input_integer(message_low) high = InputHandler.input_integer(message_high) if high - low > 0: correct_range = True else: print('Lowest number must be lower than highest number\n') length = InputHandler.input_integer('Please select a lenght: ') game = Mastermind(low, high, length) score = game.play() self.update_scores(score) message = 'Would you like to play another round? Y(es) or N(no): ' play_again = InputHandler.input_bool(message) if play_again: self.run_game(custom)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run_game(self):\n game = Poker()\n AI_win = game.play_round(self.name)\n self.update_scores(AI_win)\n message = 'Would you like to play another round? Y(es) or N(o): '\n answer = InputHandler.input_bool(message)\n if answer:\n self.run_game()", "def run_game_logic(self):\n pass", "def start_game(self):\n self.code = code.get_random_num()\n self.Player1 = self.get_player(1)\n self.Player2 = self.get_player(2)\n attempt = self.Player1.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n check.check(num_guessed_list, right_answer_list)\n attempt = self.Player2.make_guess()\n guess.guess_lists(attempt, self.code)\n right_answer_list = guess.return_answer()\n num_guessed_list = guess.return_player_guess()\n output = check.check(num_guessed_list, right_answer_list)\n play = end_game.end_game(output)\n if play == True:\n self.keep_playing()", "def run_game(self) -> None:\n decision = 0\n if self._initial:\n self._initial = False\n while decision != 1:\n try:\n display_no_combat_init(self.hero)\n decision = get_user_input([1, 2, -1])\n if decision == -1:\n self._quit()\n elif decision == 2:\n self._show_bag()\n else:\n break\n except KeyboardInterrupt:\n print(\"[!] If you want to quit, use the provided user interface\")\n\n while not self.hero.is_dead:\n try:\n self._load_map()\n except KeyboardInterrupt:\n print(\"[!] If you want to quit, use the provided user interface\")", "def run():\n \n # Enter player name\n #player_name = raw_input(\"Put your Name: \\n \")\n player1 = Player(raw_input(\"Put Player 1 name: \\n \"))\n player2 = Player(raw_input(\"Put Player 2 name: \\n \")) \n \n # Generate Deck\n cards = gen_deck()\n \n game_on = True\n start_pl = 0\n while game_on == True :\n deck = copy(cards) # Cards being played this hand\n deal_cards(deck, player1, player2)\n \n play_set(player1, player2, start_pl) \n\n game_on = check_score(player1, player2, game_on)", "def select_game_type(self):\n print('Please secect game type.\\n' +\n 'NOTE! only standard game saves highscore')\n print('1. Standard game')\n print('2. Custom game, set your rules')\n print('0. Back to main menu')\n answer = InputHandler.input_integer_range('Your choice: ', 0, 2)\n if answer == 1:\n self.run_game()\n if answer == 2:\n self.run_game(True)", "def main():\n game = Game(TIMES, HARDNESS)\n game.start()\n game.print_score()", "def main():\r\n # Initialize words from specific file\r\n words_list = hangman_helper.load_words()\r\n # Run single game with given word list to choose from\r\n run_single_game(words_list)\r\n # Ask the user if he would like to play again\r\n request = hangman_helper.get_input()\r\n if request[INPUT_TYPE] == hangman_helper.PLAY_AGAIN:\r\n if request[INPUT_VALUE]:\r\n run_single_game(words_list)", "def main(player):\n saved_score = 0\n rat_array = [\"reset\"]\n current_fight = \"\"\n while player.hp >= 1:\n\n system.clear_screen()\n if player.location == route_list[0]:\n pass\n else:\n rat_array = []\n rat_chance = randint(1, 100)\n if rat_chance >= 50:\n rat_array = system.npc_swarm_spawn()\n else:\n # must reset here, or a sub 50 roll crashes with no rat_array found\n rat_array = [\"reset\"]\n pass\n if player.location == current_fight:\n rat_array = [\"reset\"]\n else:\n pass\n\n # encounter spawn gotta go somewhere how bout here\n system.encounter_chance(player)\n\n status_array = system.status_message(route_list, player, rat_array)\n print(f\"{status_array[0]}\\n{status_array[1]}\")\n\n movement_options = system.movement_options(route_list, player)\n print(\"\\nAdjacent systems to your current location are:\")\n for movement_option in movement_options:\n print(movement_option)\n if len(movement_options) == 1:\n print(\n f\"\\nWhat is your decision? \\n\\nAvailable commands are {movement_options[0]}, \"\n + \"or type 'rat' to shoot rats.\"\n )\n else:\n print(\n f\"\\nWhat is your decision? \\n\\nAvailable commands are {movement_options[0]}, \"\n + f\"{movement_options[1]} or type 'rat' to shoot rats.\"\n )\n try:\n player_action = str(input())\n except ValueError:\n print(\"You spin your ship.\")\n\n action = system.parse_input(player_action, movement_options, player)\n # print(rat_array)\n if action.lower() == \"rat\":\n if rat_array[0] != \"reset\":\n # print('fightin')\n system.rat_fight(rat_array, player)\n # system.clear_screen()\n try:\n for rat_item in rat_array:\n rat_array[rat_item].remove()\n rat_array = [\"reset\"]\n current_fight = player.location\n except:\n rat_array = [\"reset\"]\n current_fight = player.location\n\n if player.location == destination_system:\n print(\n f\"\\n\\nCongratulations, you have arrived at {player.location}. \"\n + \"\\nYou may now set a new destination, or dock up and use your points you've gained to reship. \"\n + \"\\nOr you may choose to either hold onto your points, in which case they might be lost on death \"\n + \"or save them to buy bigger and better ships\"\n + \"\\no7 capsuleer the system is clear. \"\n + f\"\\n\\nYour final score from this trip was {player.score}\")\n saved_score += player.score\n\n if(player.hp < 1):\n print(\n f\"\\n\\nYour ship explodes in to tiny pieces at the stargate in {player.location}. \"\n + \"\\nYour capsule containing your body shatters from the force of the explosion. \"\n + \"\\nYou are dead. You wake up in your hangar where your death clone is set to and \"\n + \"prepare to voyage out once again. \"\n + \"\\no7 capsuleer the cyno is now lit. \"\n + f\"\\n\\nYour final score was {player.score}\"\n )", "def main(**kwargs):\n print('Start')\n agent = initAgent(**kwargs)\n kwargs['agent'] = agent\n result = []\n\n def mainsub(*args):\n game = Game(**kwargs)\n game.display(kwargs['noshow'])\n while True:\n # get_input = getch(\"Enter direction (w/a/s/d): \")\n get_input = game.action()\n if get_input in keypad:\n game.move(keypad.index(get_input))\n game.update()\n # elif get_input == \"q\":\n # break\n # else:\n # print(\"\\nInvalid choice.\")\n # continue\n if game.end:\n game.savegame()\n game.display(kwargs['noshow'])\n print(\"Result:\", game.nturn, game.score)\n break\n game.display(kwargs['noshow'])\n result.append((game.score, game.nturn))\n game.agent.replay()\n if kwargs['train']:\n game.agent.save()\n game.reset()\n if kwargs['train']:\n np.save('result.%s' % game.agent.algo, np.array(result))\n\n map(mainsub, range(kwargs['n']))\n print(\"Thanks for playing.\")", "def start_game(self):\n\n\t\tpass", "def process_game_logic(self):\n self.dmod = -1\n self.smod = -1\n if self.state == self.STATE_SETUP:\n if self.current_game < self.config['General']['games_per_session']:\n self.current_game += 1\n self.state = self.STATE_GAMENO\n self.game_title = \"Game: %d of %d\" % (self.current_game, self.config['General']['games_per_session'])\n self.game_title = pygl2d.font.RenderText(self.game_title, (255, 255, 0), self.f36)\n self.game_title_rect = self.game_title.get_rect()\n self.game_title_rect.center = (self.SCREEN_WIDTH / 2, self.SCREEN_HEIGHT / 16 * 7)\n self.gameevents.add(\"display_game\", self.current_game)\n else:\n self.state = self.STATE_DONE\n self.ret = 0\n self.lc.stop()\n elif self.state == self.STATE_SETUP_IFF:\n self.mine_list.generate_foes()\n self.gameevents.add(\"display_foes\", \" \".join(self.mine_list.foe_letters), \"player\")\n self.foe_top = pygl2d.font.RenderText(\"The Type-2 mines for this session are:\", (255, 255, 0), self.f24)\n self.foe_top_rect = self.foe_top.get_rect()\n self.foe_top_rect.center = (self.SCREEN_WIDTH / 2, 270 * self.aspect_ratio)\n self.foe_middle = pygl2d.font.RenderText(\", \".join(self.mine_list.foe_letters), (255, 255, 255), self.f96)\n self.foe_middle_rect = self.foe_middle.get_rect()\n self.foe_middle_rect.center = (self.SCREEN_WIDTH / 2, self.SCREEN_HEIGHT / 2)\n self.foe_midbot = pygl2d.font.RenderText(\"Try to memorize them before proceeding\", (255, 255, 0), self.f24)\n self.foe_midbot_rect = self.foe_midbot.get_rect()\n self.foe_midbot_rect.center = (self.SCREEN_WIDTH / 2, 500 * self.aspect_ratio)\n self.foe_bottom = pygl2d.font.RenderText(\"Press return to begin\", (255, 255, 0), self.f24)\n self.foe_bottom_rect = self.foe_bottom.get_rect()\n self.foe_bottom_rect.center = (self.SCREEN_WIDTH / 2, 600 * self.aspect_ratio)\n self.state = self.STATE_IFF\n elif self.state == self.STATE_PREPARE:\n self.gameevents.add(\"game\", \"ready\", type='EVENT_SYSTEM')\n self.setup_world()\n self.state = self.STATE_PLAY\n self.gameevents.add(\"game\", \"start\", type='EVENT_SYSTEM')\n elif self.state == self.STATE_PLAY:\n self.ship.compute()\n if not self.ship.jumped:\n\n overlay = np.zeros((int(self.world.bottom-self.world.top),int(self.world.right-self.world.left),3),np.uint8)\n\n\n cv2.line(overlay,(int(self.ship.position.x-self.world.left),int(self.ship.position.y-self.world.top)),(int(self.ship.oldPosx-self.world.left),int(self.ship.oldPosy-self.world.top)),(255,255,255),self.config['Playback']['line_width'])\n\n cv2.addWeighted(overlay,self.config['Playback']['intensity']/100.0,self.img,1.0,0,self.img)\n\n distance = self.ship.get_distance_to_point(self.WORLD_WIDTH / 2, self.WORLD_HEIGHT / 2)\n flight_max_inc = self.config['Score']['flight_max_increment']\n dmod = 1 - (distance - self.smallhex.radius * 1.125) / (self.WORLD_WIDTH / 2)\n if dmod > 1.0: dmod = 1.0\n if dmod < 0.0: dmod = 0.0\n smod = max([abs(self.ship.velocity.x), abs(self.ship.velocity.y)]) / self.ship.max_vel\n self.dmod = dmod\n self.smod = smod\n for missile in self.missile_list:\n missile.compute()\n if self.fortress_exists == True:\n self.fortress.compute()\n for shell in self.shell_list:\n shell.compute()\n if self.config['Hexagon']['hex_shrink']:\n self.bighex.compute()\n if self.mine_exists:\n if self.mine_list.flag == False and self.mine_list.timer.elapsed() > self.mine_list.spawn_time:\n self.gameevents.add(\"spawn\", \"mine\")\n elif self.mine_list.flag and self.mine_list.timer.elapsed() > self.mine_list.timeout:\n self.gameevents.add(\"timeout\", \"mine\")\n self.mine_list.compute()\n self.check_bounds()\n #test collisions to generate game events\n self.test_collisions()\n if self.flighttimer.elapsed() > self.config['Score']['update_timer']:\n self.flighttimer.reset()\n def pointspace (a0, a1, a2, b0, b1, b2): return math.exp(a1 ** (a0 * a2)) * math.exp(b1 ** (b0 * b2))\n points = flight_max_inc * pointspace(self.dmod, 2, 1, self.smod, 2, 1.75) / pointspace(1, 2, 1, 1, 2, 1.75)\n self.gameevents.add(\"score+\", \"flight\", points)\n self.flight2 += flight_max_inc * pointspace(self.dmod, 2, .45, self.smod, 2, 1) / pointspace(1, 2, .45, 1, 2, 1)\n if (self.ship.velocity.x ** 2 + self.ship.velocity.y ** 2) ** 0.5 < self.config['Score']['speed_threshold']:\n self.gameevents.add(\"score+\", \"vlcty\", self.config['Score']['VLCTY_increment'])\n #self.gameevents.add(\"score+\", \"flight\", self.config['Score']['VLCTY_increment'])\n else:\n self.gameevents.add(\"score-\", \"vlcty\", self.config['Score']['VLCTY_increment'])\n #self.gameevents.add(\"score-\", \"flight\", self.config['Score']['VLCTY_increment'])\n if self.bighex.collide(self.ship):\n self.gameevents.add(\"score+\", \"cntrl\", self.config['Score']['CNTRL_increment'])\n #self.gameevents.add(\"score+\", \"flight\", self.config['Score']['CNTRL_increment'])\n else:\n self.gameevents.add(\"score+\", \"cntrl\", self.config['Score']['CNTRL_increment'] / 2)\n #self.gameevents.add(\"score+\", \"flight\", self.config['Score']['CNTRL_increment']/2)\n if self.bonus_exists:\n if self.config['General']['bonus_system'] == \"AX-CPT\":\n self.bonus.axcpt_update()\n else:\n if self.bonus.visible == False and self.bonus.timer.elapsed() > self.config['Bonus']['symbol_down_time']:\n self.gameevents.add(\"activate\", \"bonus\")\n elif self.bonus.visible == True and self.bonus.timer.elapsed() >= self.config['Bonus']['symbol_up_time']:\n self.gameevents.add(\"deactivate\", \"bonus\", self.bonus.current_symbol)\n #update scores\n self.score.pnts = self.score.__getattribute__(\"pnts\")\n self.score.vlcty = self.score.__getattribute__(\"vlcty\")\n self.score.cntrl = self.score.__getattribute__(\"cntrl\")\n self.score.speed = self.score.__getattribute__(\"speed\")\n self.score.flight = self.score.__getattribute__(\"flight\")\n self.score.fortress = self.score.__getattribute__(\"fortress\")\n self.score.mines = self.score.__getattribute__(\"mines\")\n self.score.bonus = self.score.__getattribute__(\"bonus\")\n\n if self.gametimer.elapsed() > self.config['General']['game_time']:\n self.gameevents.add(\"game\", \"over\", type='EVENT_SYSTEM')\n self.state = self.STATE_SCORES\n self.img = 255-self.img\n now = datetime.datetime.now()\n cv2.imwrite('../Recordings/' + str(now.year) + \"_\"+str(now.month)+\"_\" + str(now.day) + \"_\" + str(now.hour) + \"_\" + str(now.minute)+\"_\" + 'lines.jpg',(self.img),[int(cv2.IMWRITE_JPEG_QUALITY), 100])", "def start_new_game(self):\r\n\r\n self.initialize_game_params()\r\n self.timer = Timer(self.screen)\r\n self.mine_counter = MineCounter(self.num_of_mines, self.screen)\r\n self.reset_button = ResetButton(self.screen)\r\n self.high_score = HighScore(self.rows, self.cols, self.num_of_mines, self.screen)\r\n self.board = Board(self.rows, self.cols, self.num_of_mines, self.screen)\r\n self.play_game()", "def play_game():\n pass", "def main():\n game = Blackjack()\n game.play()", "def main():\n\n print_header()\n statistic = {}\n\n\n while True:\n\n \"\"\"\n System take input for opponent like friend or computer (computer is segregated into two types 'c1' (EasyAi) and 'c2' HarderAi)\n System also take input for player icon and provide only two options 'X' or 'O'\n \"\"\"\n\n opponent = input(\n \"Would you like to play against a friend or the computer? \\n\\t-friend (f)\\n\\t-computer level 1 (c1)\\n\\t-computer level 2 (c2)\")\n icon_coice = input(\"Would you like to play as (X) or (O)? \").upper()\n players = [EasyAi(icon_coice), HarderAi(flip_icon(icon_coice))]\n if opponent.lower() == \"f\":\n players = [Human(icon_coice), Human(flip_icon(icon_coice))]\n # start a game with friend\n if opponent.lower() == \"c1\":\n players = [Human(icon_coice), EasyAi(flip_icon(icon_coice))]\n # start a game with computer\n if opponent.lower() == \"c2\":\n players = [Human(icon_coice), HarderAi(flip_icon(icon_coice))]\n\n start_time = time.time()\n\n \"\"\"\n Load the Game by creating game class object and it takes the Players list\n call its play_game method to start game and return final results\n \"\"\"\n\n game = Game(players=players)\n result = game.play_game()\n ending_time = time.time()\n\n statistic[result] = statistic.get(result, 0) + 1\n\n # calculate game duration\n duration = int(ending_time - start_time)\n duration_string = get_duration_string(duration)\n\n # pass the Game states and it duration to below method\n write_result_to_file(duration_string, statistic)\n\n user_choice = input(\"Would you like to play a game again? [y/n]\")\n if user_choice.lower().startswith(\"n\"):\n break", "def main():\n\n # Setup arguments\n parser = argparse.ArgumentParser()\n parser.add_argument('--player1',\n help='Player 1 type, computer or human.',\n type=str\n )\n parser.add_argument('--player2',\n help='Player 2 type, computer or human.',\n type=str\n )\n parser.add_argument('--timed',\n action='store_true',\n help='Whichever player has the most points after one minute wins the game.'\n )\n args = parser.parse_args()\n\n # Check for required arguments and correct values\n if not args.player1 and not args.player2:\n print(\"The --player1 and --player2 arguments are required. Valid types are computer or human. Please try again.\")\n sys.exit()\n\n if not args.player1.lower() == \"computer\" and not args.player1.lower() == \"human\":\n print(\"You entered an invalid player type for player1. Valid types are computer or human. Please try again.\")\n sys.exit()\n\n if not args.player2.lower() == \"computer\" and not args.player2.lower() == \"human\":\n print(\"You entered an invalid player type for player2. Valid types are computer or human. Please try again.\")\n sys.exit()\n\n # Create a queue for the players\n players = Queue()\n\n # Ask for player names if they are human\n player1_name = \"Computer [Player 1]\" if args.player1.lower() == \"computer\" \\\n else input(\"What is Player 1's name? \")\n\n player2_name = \"Computer [Player 2]\" if args.player2.lower() == \"computer\" \\\n else input(\"What is Player 2's name? \")\n\n # Use PlayerFactory to get correct player classes and add to players queue\n players.put(PlayerFactory().get_player(player1_name, args.player1.lower()))\n players.put(PlayerFactory().get_player(player2_name, args.player2.lower()))\n\n # Use GameFactory to get correct game class and start the game\n TimedGameProxy(players).start(args.timed)\n\n # Exit the program after the game is over\n sys.exit()", "def main():\n ans = random_word()\n run_game(ans, N_TURNS)", "def main():\n play_game(progression)", "def run_ai():\n print(\"Vivian\") # First line is the name of this AI \n color = int(input()) # Then we read the color: 1 for dark (goes first), \n # 2 for light. \n\n while True: # This is the main loop \n # Read in the current game status, for example:\n # \"SCORE 2 2\" or \"FINAL 33 31\" if the game is over.\n # The first number is the score for player 1 (dark), the second for player 2 (light)\n next_input = input() \n status, dark_score_s, light_score_s = next_input.strip().split()\n dark_score = int(dark_score_s)\n light_score = int(light_score_s)\n\n if status == \"FINAL\": # Game is over. \n print \n else: \n board = eval(input()) # Read in the input and turn it into a Python\n # object. The format is a list of rows. The \n # squares in each row are represented by \n # 0 : empty square\n # 1 : dark disk (player 1)\n # 2 : light disk (player 2)\n \n # Select the move and send it to the manager \n# movei, movej = select_move_minimax(board, color)\n movei, movej = select_move_alphabeta(board, color)\n print(\"{} {}\".format(movei, movej))", "async def main(self):\n\n # Fetching the game memory\n memory = await self._read_memory()\n\n if not memory:\n log.warn(\"Could not find GD. Program will wait until it is found.\")\n gdsb.wait_for_gd()\n return\n\n if memory.is_in_level():\n\n if memory.get_level_id() != self.previous_level_id:\n await self.seal_embed()\n\n # Fetch gamestate data\n self.current_level = await self._fetch_level_info()\n \n # Gets wether or not the level being played is the exact same level that was played just previously, so that it will continue to use the same embed\n if self.previous_level_id != self.current_level.id:\n\n self.embed_message = None\n self.previous_embed_message = None\n\n self.session.start_attempts = self.current_level.attempts\n self.session.old_best = self.current_level.best_percent\n self.session.best = 0\n\n else:\n\n if self.previous_embed_message is not None:\n self.embed_message = self.previous_embed_message\n\n self.previous_level_id = self.current_level.id\n\n # So for some reason, the main levels all have their creator blank, so we just set it to RobTop\n if self.current_level.creator == \"\":\n self.current_level.creator = \"RobTop\"\n\n # Getting if you are playing a main level or not. If so, we have to manually set the difficulty using the list I made earlier\n if self.current_level.id in range(1,22):\n \n self.current_level.difficulty = const.MAIN_LEVEL_DIFFICULTIES[self.current_level.id-1]\n\n # Checks if the player is in practice mode or not. If they are, it will display a different color\n if self.current_level.is_practice_mode():\n title = \"Practicing: {0}\"\n color = discord.Color.from_rgb(59, 223, 245)\n else:\n title = \"Playing: {0}\"\n color = discord.Color.from_rgb(18, 219, 31)\n\n # A few little extra texts that go next to the title\n extra_text = \"\"\n if self.current_level.percent == 100:\n\n if self.current_level.is_practice_mode():\n extra_text=\" - PRACTICE COMPLETE!\"\n else:\n extra_text=\" - LEVEL COMPLETE!\"\n color = discord.Color.from_rgb(237, 220, 28)\n\n elif self.current_level.best_percent > self.session.old_best:\n extra_text = \" - New Best!\"\n self.session.old_best = self.current_level.best_percent\n\n # Saving the best percent of the session\n if self.current_level.percent > self.session.best and not self.current_level.is_practice_mode():\n self.session.best = self.current_level.percent\n\n # Calculating the current attempts on a level\n self.current_level.attempts = (self.current_level.attempts - self.session.start_attempts) + 1\n\n rating_text = self._get_rating_text()\n category = self._get_category_text()\n\n self.embed.title = title.format(self.current_level.name)\n self.embed.description = f\"By {' | '.join((self.current_level.creator, rating_text, category))}\"\n self.embed.color = color\n\n self.embed.set_thumbnail(url=const.FACES[const.DIFFICULTIES.index(self.current_level.difficulty)])\n\n # Getting user\n user = self.bot.get_user(conf.user)\n\n self.embed.set_author(name=user.display_name, icon_url=user.avatar_url)\n\n progress_bar_state = self._get_progress_bar(self.current_level.percent)\n\n fields = (\n {\"name\": \"Attempt:\", \"value\": self.current_level.attempts, \"inline\": True},\n {\"name\": \"Best %:\", \"value\": f\"{self.current_level.best_percent}%\", \"inline\": True},\n {\"name\": \"Current Progress:\", \"value\": f\"{self.current_level.percent}%{extra_text}\\n{progress_bar_state}\", \"inline\": False}\n )\n\n for i, field in enumerate(fields):\n\n if len(self.embed.fields) < len(fields):\n self.embed.add_field(**field)\n else:\n self.embed.set_field_at(i, **field)\n \n self.embed.set_footer(text=\"Level ID: {0}\".format(self.current_level.id))\n \n # Sending embed\n\n channel = self.bot.get_channel(conf.channel)\n\n if not channel:\n log.error(f\"Could not find channel with id: {conf.channel}. Use '{conf.prefix}set_channel' to set the channel.\")\n else:\n #If the channel is found, edit the message the embed has been sent to, and if it dosent exist, create it.\n if self.embed_message is None:\n self.embed_message = await channel.send(embed=self.embed)\n else:\n await self.embed_message.edit(embed=self.embed)\n \n else:\n\n if memory:\n await self.seal_embed()\n\n #Sets some globals so that the embed can be reused if the same level is played again\n self.previous_embed_message = self.embed_message\n self.embed_message = None", "def startGame():\n\n\tprint(\"\\nOK! Let's play!\")\n\tprint(\"--------------------------------------------------------------------------------------\")\n\tprint(\"Note:\")\n\tprint(\"\\tNow you must be kept in your mind a random integer from specific range and I must be guessing that number!\")\n\tprint(\"\\tIf you answer honestly all of my questions I certainly will guess that number!\")\n\tprint(\"--------------------------------------------------------------------------------------\\n\")\n\tgameLogic()", "def main(self):\n _age = info.getInfo(self)\n _flag = game.check_age(self, _age)\n if _flag == False:\n exit()\n game.wants_to_play(0)", "def main():\n global levels\n difficulty = select_difficulty()\n start_game(difficulty)", "def main():\n winning_score = 100\n counter = 1\n game_state_list = []\n\n # Enable command-line arguments\n parser = argparse.ArgumentParser()\n # Add command-line argmuemnt\n parser.add_argument('--numPlayers', type=int)\n args = parser.parse_args()\n\n # Get number of games from user input\n num_of_games = input_int(\"How many games do you want to play?: \")\n\n # Get number of players in each game\n for x in range(num_of_games):\n # Note. Use this commented code below if you want to also let the user define the number of players in each game\n #game_state_list.append((Game(6) ,(input_int(\"How many players in Game {}?: \".format((x + 1))))))\n\n # list of tuples (Game class instnace, num_of_plauyers)\n game_state_list.append((Game(6) ,args.numPlayers))\n\n # Play all games. Note that the games are not aware of each other\n for game_state, num_users in game_state_list:\n print \"\\nStarting Game\",counter\n game_loop(game_state,num_users,winning_score)\n counter += 1\n\n print \"Completed all the games!\"", "def main():\n name = input('Enter your Name: ')\n playagain = \"yes\"\n if playagain == \"yes\":\n intro()\n intro_end()\n choice1_end()\n part_1()\n choice2 = attack_or_run()\n part_1_1(choice2)\n scorex = encounter_1(choice2)\n part_1_2(choice2)\n part_1_3()\n print('Do you want to view the Leader Board??: ')\n lb_input = input().lower()\n if lb_input == 'yes' or 'y':\n if str(scorex)[0] == '-':\n leaderboard(score=0, username=name)\n else:\n leaderboard(score=scorex, username=name)\n play()\n else:\n play()", "async def run_game(self):\n await self.run_betting()\n self.force_bet()\n await self.print_players_with_bet()\n time.sleep(self.MESSAGE_GAP)\n cards_msg = await self.send_message(self.channel, \"Retrieving a new deck, shuffling, and dealing cards! Please hold!\")\n self.deal_cards()\n time.sleep(self.MESSAGE_GAP)\n await self.edit_message(cards_msg, cards_msg.content + \"\\n\\n\" + self.str_players_with_hand())\n time.sleep(self.MESSAGE_GAP)\n while self.still_playing_game():\n await self.run_round()\n self.ready_new_round_players()\n await self.send_message(self.channel, \"There are no more players eligible to play, so the game is over!\"\n \" Here evaluation to see who won!\\n\" + self.evaluate_game())\n time.sleep(self.MESSAGE_GAP)\n await self.send_message(self.channel, \"Resetting players for next game...\")\n time.sleep(self.MESSAGE_GAP)\n self.reset_players()", "def main() -> None:\n # the current game is initialized with 1, 3, 5, 7 matches on the 4 rows.\n game: List[int] = [1, 3, 5, 7]\n\n print(\"\\nGame of Nim\")\n print( \"===========\")\n display_game(game)\n start = input(\"Do you want to start? (y/n) \")\n print()\n if start==\"y\" or start==\"Y\":\n print(\"Your turn\")\n user_turn(game)\n display_game(game)\n while True:\n print(\"My turn\")\n computer_turn(game)\n display_game(game)\n if is_finished(game):\n print(\"I WON\\n\")\n break\n print(\"Your turn\")\n user_turn(game)\n display_game(game)\n if is_finished(game):\n print(\"YOU WON\\n\")\n break", "def run(self):\n print \"Welcome to the BlackJack game ......\" # print help function if needed\n deckObj = Deck()\n deckObj.shuffle()\n while(not self.checkGameComplete()):\n self.displayGame()\n card = deckObj.deal()\n # ask user for move\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n while(not isPass):\n position = raw_input('Please input a number [1-16] for table, or [17-20] for discard list\\n')\n isPass = self.errorChecking(position)\n # update table\n self.updateTableAndDiscardLs(position,card)\n ### Score Game\n self.displayGame()\n score = self.scoreGame()\n print 'Congratulations! Your final score is:'\n print score\n print 'Game is done... Thank you!'", "def new_game():\n # Prints the welcome message to the terminal\n welcome_message()\n # Gets the players name\n player_name = name_input()\n # Creates the players game board\n player_board = GameBoard(player_name, 'player')\n # Creates the players guess board\n user_guess = GameBoard('GUESS', 'user guess')\n # Creates the computers board\n computer_board = GameBoard(\"COMPUTER's\", 'computer')\n # Creates the computers guess board\n computer_guess = GameBoard('COMPUTER GUESS', 'computer guess')\n # Randomly places the computers ships on their board\n computer_board.place_ships()\n # Prints the players board to the terminal for reference\n player_board.print_board()\n # Allows the player to place their ships\n player_board.place_ships()\n time.sleep(2)\n # Prints the players guess board to terminal for reference\n print(PHASE)\n print(' ')\n # Takes turns attacking until winner\n run_game(player_board, user_guess, computer_board, computer_guess)\n # Asks the player if they want to play again or quit\n play_again()" ]
[ "0.6436216", "0.6383936", "0.6263446", "0.6183765", "0.61579436", "0.6104631", "0.6084379", "0.593151", "0.5889294", "0.58867204", "0.5882432", "0.5864209", "0.58618486", "0.582254", "0.58143973", "0.58041996", "0.57793134", "0.57723457", "0.5766427", "0.5762397", "0.5761544", "0.57584363", "0.57583076", "0.5751981", "0.57262063", "0.57093424", "0.569002", "0.5683398", "0.56830204", "0.5662061" ]
0.7992603
0
Checks if the current user has permission to edit the given dataset.
def has_edit_permissions(ps_or_token, selected_dataset_id): try: role = pennsieve_get_current_user_permissions(selected_dataset_id, ps_or_token)["role"] except Exception as e: abort(500, "Could not get permissions for this dataset.") return role in ["owner", "manager"]
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def checkPermissionEditUsers(self):\n user = self.REQUEST.AUTHENTICATED_USER\n\n return bool(user.has_permission(eionet_edit_users, self))", "def edit_allowed(self):\n account = Account.current_user_account\n if account is None:\n return False\n return self.user_can_edit(account.user)", "def can_edit_or_403(self, user):\n if self.get_permission_level(user) < self.OWNER_PERMISSION:\n raise PermissionDenied\n return True", "def can_edit(self, user):\n return self.author_id == user.id or user.is_staff", "def user_can_edit(self, user):\n return user == self.owner", "def can_edit_user(user):\n\tu = current_user._get_current_object()\n\treturn u==user or u.is_admin()", "def can_user_edit(self, user):\n\n return user.is_authenticated and (\n user.has_role('admin') or\n unicode(self.user_id) == user.get_id()\n )", "def can_edit(self, user):\n if user.has_perm('funding.make_application_decisions'):\n # Funding manager can update things later, if required\n return True\n # Applicants can only edit the application before the final review step\n if self.status in ('S', 'U'):\n if self.applicant == user:\n return True\n return False", "def can_edit_or_403(self, user):\n if user.id != self.game_master.id:\n raise PermissionDenied\n return True", "def can_be_edited(self, user):\n return (self.is_public or user == self.owner or\n user in list(self.auth_users.all()))", "def has_object_permission(self, request, view, obj):\n if request.method in permissions.SAFE_METHODS:\n return True\n \n \"\"\"Check if the user has the permission to edit their profile. If True it will allow PUT, PATCH & DELETE operations\"\"\"\n return obj.id == request.user.id # returns True or False", "def test_has_perm_post_edit(self):\n self.context['request'].user = User.objects.get(pk=47963)\n allowed = has_perm(self.context, 'forums_forum.post_edit_forum',\n self.forum_1)\n eq_(allowed, True)\n allowed = has_perm(self.context, 'forums_forum.post_edit_forum',\n self.forum_2)\n eq_(allowed, False)", "def has_permission(self, request, view):\n return request.user.group == 'admin'", "def has_write_permission(request):\n user = request.user\n return user.is_superuser", "def test_has_perm_thread_edit(self):\n self.context['request'].user = User.objects.get(pk=47963)\n allowed = has_perm(self.context, 'forums_forum.thread_edit_forum',\n self.forum_1)\n eq_(allowed, True)\n allowed = has_perm(self.context, 'forums_forum.thread_edit_forum',\n self.forum_2)\n eq_(allowed, False)", "def has_object_permission(self, request, view, obj):\n if request.method in permissions.SAFE_METHODS:\n return True\n \n \"\"\"Check if the user has the permission to edit their profile. If True it will allow PUT, PATCH & DELETE operations\"\"\"\n return obj.user_profile.id == request.user.id # returns True or False", "def has_object_permission(self, request, view, obj):\n\n #check if method is get i.e user only want to view\n if request.method in permissions.SAFE_METHODS:\n return True\n\n #if method is not get then will check if user wants to edit own profile\n return obj.id == request.user.ids", "def can_edit(self):\n return self._can_edit", "def has_permission(self, request, view):\n user = request.user\n if (\n isinstance(user, TokenUser)\n and LTI_ROLES[self.__class__.role]\n & set(user.token.payload.get(\"roles\", []))\n and user.token.payload.get(\"permissions\", {}).get(\"can_update\", False)\n is True\n ):\n return True\n\n return False", "async def check_can_edit_user(\n authorization_client: AuthorizationClient, req_user_id: str, user_id: str\n):\n admin_tuple, req_admin_tuple = await asyncio.gather(\n authorization_client.get_administrator(user_id),\n authorization_client.get_administrator(req_user_id),\n )\n\n if admin_tuple[1] is None:\n return True\n\n if req_admin_tuple[1] == AdministratorRole.FULL:\n return True\n\n return False", "def CAN_CHANGE_PERMISSIONS(article, user): # pylint: disable=invalid-name\r\n return _is_staff_for_article(article, user)", "def allow_to_edit(user):\n return allow_to_edit_well(user)", "def is_mutable_by(self, user, perm='site.change_localsite'):\n return user.has_perm(perm) or self.admins.filter(pk=user.pk).exists()", "def test_has_perm_or_owns_thread_edit(self):\n me = User.objects.get(pk=118533)\n my_t = Thread.objects.filter(creator=me)[0]\n other_t = Thread.objects.exclude(creator=me)[0]\n self.context['request'].user = me\n perm = 'forums_forum.thread_edit_forum'\n allowed = has_perm_or_owns(self.context, perm, my_t, self.forum_1)\n eq_(allowed, True)\n allowed = has_perm_or_owns(self.context, perm, other_t, self.forum_1)\n eq_(allowed, False)", "def test_only_edit_perm(self):\n self.assertStatusCode(self.url, 403)", "def can_edit_dag(self, dag_id: str, user=None) -> bool:\n root_dag_id = self._get_root_dag_id(dag_id)\n dag_resource_name = permissions.resource_name_for_dag(root_dag_id)\n return self.has_access(permissions.ACTION_CAN_EDIT, dag_resource_name, user=user)", "def has_object_update_permission(self, request):\n user = request.user\n if self == user:\n return True\n return user.is_superuser", "def can_retrieve(self, user):\n return user.has_perm('agenda.can_see')", "def has_permission(self, request, view):\n return request.user.group != 'patient'", "def has_object_permission(self, request, view, obj):\n\n # Users can always see and edit their own comments\n if obj.create_user == request.user:\n return True\n\n # And see but not edit those from their others in their own\n # organization\n if obj.create_user.organization == request.user.organization and \\\n request.method in permissions.SAFE_METHODS:\n return True\n\n # Government roles can always view comments\n # and can view or edit privileged comments with correct permission\n if request.user.is_government_user:\n # read\n if request.method in permissions.SAFE_METHODS:\n if obj.privileged_access:\n return request.user.has_perm('DOCUMENTS_VIEW')\n return True\n\n # write\n if request.method not in permissions.SAFE_METHODS:\n if obj.privileged_access:\n return request.user.has_perm('DOCUMENTS_GOVERNMENT_REVIEW')\n return True\n\n # not authorized\n return False" ]
[ "0.73684084", "0.7314556", "0.7221785", "0.718398", "0.71801096", "0.7136714", "0.703597", "0.7032112", "0.69825906", "0.69658035", "0.6953812", "0.69154143", "0.68954784", "0.68173236", "0.6797478", "0.67713076", "0.6757528", "0.6735634", "0.66979504", "0.66878027", "0.667325", "0.6661474", "0.6648876", "0.6559331", "0.655892", "0.65388185", "0.6529599", "0.65161157", "0.6488344", "0.6470639" ]
0.84489244
0
Check if exist bluetooth pairing failed window in current page
def handle_pairing_window(self, wait_time=5): logger.info("Handling bluetooth pairing prompt") sleep(1) try: self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_pair_button, wait_time) except Exception as e: logger.debug('No bluetooth pairing prompt appears in given time') return False else: logger.info('Bluetooth pairing prompt appears, click OK/Pair button to approve pairing') permission_checkbox = self.find_element(self.driver.appium_driver, self.do_not_ask_again_checkbox, 0) if permission_checkbox is not None: permission_checkbox.click() else: logger.info("There is no permission checkbox.") ok_pair_button = self.find_element(self.driver.appium_driver, self.bluetooth_pair_button, 0) if ok_pair_button is not None: ok_pair_button.click() else: logger.info("Cannot find OK/Pair button.") return True
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bt_unpair(self, target):\n is_target_unpaired = False\n try:\n # if phone is not paired with any device, return True\n any_paired_device = self.find_element(self.driver.appium_driver, self.paired_device_list, 0)\n wait(2)\n if any_paired_device is None:\n logger.info(\"There is no paired device.\")\n return True\n\n logger.debug('Unpair {} Bluetooth device'.format(\n target))\n\n # enter paired bluetooth device page\n target_detail_tab_xpath = '//android.widget.TextView[@text=\"{}\"]' \\\n '/../../descendant-or-self::android.widget.LinearLayout/android.widget.LinearLayout/' \\\n 'android.widget.ImageView[@resource-id=\"com.coloros.wirelesssettings:id/deviceDetails\"]'\\\n .format(target)\n try:\n self.driver.appium_driver.find_element_by_xpath(target_detail_tab_xpath).click()\n except:\n logger.error(\"Cannot find the target detail tab.\")\n return False\n else:\n logger.error(\"Found target detail tab and cliked it.\")\n\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n fail_cnt = 0\n while bt_unpair_button is None and fail_cnt < 5:\n wait(2)\n fail_cnt += 1\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n if fail_cnt == 5:\n logger.error(\"Cannot find bt_unpair_button.\")\n return False\n else:\n logger.info('Found unpair button and clicking it.')\n bt_unpair_button.click()\n is_target_unpaired = True\n\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver, 'bt_unpair')\n logger.warning(\"Need to attempt pair before unpair\")\n logger.warning(repr(e))\n return is_target_unpaired", "def bt_unpair_all(self):\n try:\n detail_tab = self.find_element(self.driver.appium_driver, self.paired_device_list, 0)\n\n while detail_tab is not None:\n detail_tab.click()\n wait(1)\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n if bt_unpair_button is not None:\n bt_unpair_button.click()\n\n wait(1)\n detail_tab = self.find_element(self.driver.appium_driver, self.paired_device_list, 0)\n except Exception as e:\n logger.exception(repr(e))\n return False\n else:\n return True", "def bt_is_paired(self):\n is_paired = False\n try:\n self.show_more_for_paired_devices()\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 5)\n pair_device_list = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n logger.debug('Checks if the phone is paired with the any devices')\n if len(pair_device_list) > 0:\n if pair_device_list[0].text.upper() == \"PAIR NEW DEVICE\":\n return False\n\n logger.debug(\n \"phone {} paired with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n is_paired = True\n\n except Exception as e:\n logger.warning(\"Need to attempt pair before is_paired\")\n return is_paired", "def _bluetooth_check_profile_connection(self):\n profiles = dict()\n output = self.dut.get_conn_devices()\n # need to strip all whitespaces.\n conn_devs = {}\n\n for key in output:\n conn_devs[key.strip()] = output[key].strip()\n for key in conn_devs:\n self.logger.info('%s:%s' % (key, conn_devs[key]))\n if 'XXXXXXXX' in conn_devs[key]:\n profiles[key] = conn_devs[key]\n else:\n profiles[key] = False\n return profiles", "def is_window_exist(self) -> bool:\n self.app.log.info(f\"Checking the page exists {self.name}\")\n windows = self.app.driver.window_handles\n for window in windows:\n try:\n self.app.driver.switch_to.window(window)\n if self.window_anchor is not None:\n raise NotImplementedError\n elif self.title_text is not None and self.title_text in self.app.driver.title:\n self.window = window\n self.app.log.info('Window handle found by title')\n break\n elif self.sub_url is not None and self.sub_url in self.app.driver.current_url:\n self.window = window\n self.app.log.info('Window handle found by url')\n break\n except exceptions.NoSuchWindowException:\n self.app.log.info('Some window was closed. Mb its missing notify')\n if self.window is None:\n self.app.log.info('Unable to find window handler')\n return False\n else:\n return True", "def skip_connect_your_computer_screen(self):\n if self.driver.wait_for_object(\"connect_your_computer_title\", timeout=10, raise_e=False):\n self.driver.click(\"connect_your_computer_not_now_btn\")", "def bt_is_connected(self):\n try:\n is_bluetooth_on = self.bt_enabled()\n\n # if bluetooth is OFF then throw Exception\n if not is_bluetooth_on:\n logger.error(\"The bluetooth is disabled on {}\".format(self.phone_info.bluetooth_name))\n\n self.bt_radio(\"on\")\n # return False\n # sys.exit(0)\n\n # displays all paired devices\n self.show_more_for_paired_devices()\n\n connected_devices = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_indicator, 0)\n\n time.sleep(1)\n if len(connected_devices) > 0:\n logger.debug(\n \"phone {} is connected with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n return True\n except Exception as e:\n logger.warning(\n \"Need to attempt connect before checking connection status.\")\n\n logger.warning(repr(e))\n # raise\n return False", "def card_failure(self): \n handles = self.driver.window_handles\n while len(handles) != 3:\n handles = self.driver.window_handles\n self.driver.switch_to_window(handles[2]) \n WebDriverWait(self.driver, 30).until(EC.visibility_of_element_located((By.CSS_SELECTOR,'.success')))\n self.driver.find_element_by_class_name(\"danger\").click()\n self.driver.switch_to_window(handles[0])", "def bt_is_paired_to(self, paired_bluetooth_device):\n is_paired_with_device = False\n try:\n bt_is_paired = self.bt_is_paired()\n if not bt_is_paired:\n return is_paired_with_device\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 10)\n pair_element = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n paired_bluetooth_device):\n is_paired_with_device = True\n break\n except Exception as e:\n logger.warning(\"Need to attempt pair or unpair before is_paired.\")\n logger.warning(repr(e))\n # raise\n return is_paired_with_device", "def bt_try_connect(self, bluetooth_device_name_to_connect,\n contact_sharing=False): # TODO: Need to update to\n # use the new/refactored bt_connect() design from above.\n is_bluetooth_connect = False\n try:\n is_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n if is_already_connected is True:\n is_bluetooth_connect = True\n else:\n is_bt_paired = self.bt_is_paired_to(\n bluetooth_device_name_to_connect)\n if contact_sharing:\n if is_bt_paired:\n if self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.previously_paired_device_button, 5)\n self.find_element(self.driver.appium_driver,\n self.previously_paired_device_button,\n 0).click()\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver, self.paired_device_list,\n 10)\n pair_element = self.find_elements(\n self.driver.appium_driver, self.paired_device_list,\n 1)\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n bluetooth_device_name_to_connect):\n pair_element[index].click()\n # self._bt_swipe_and_connect(pair_element,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n is_bluetooth_connect = True\n self._go_to_connected_device_screen(\n no_of_back_click=1)\n return is_bluetooth_connect\n else:\n if is_bt_paired:\n self.bt_unpair(bluetooth_device_name_to_connect)\n self.bt_radio('off')\n self.bt_radio('on')\n\n try:\n if '8.1' in self.phone_info.os_version or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n sleep(10)\n except:\n logger.debug(\"Pair new device option is not available\")\n is_device_found = False\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 5)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 1)\n # Wait till bluetooth device found in list and click when it\n # is visible in list\n for retry in range(1):\n if retry == 0:\n for index in range(len(element_list)):\n element_text = element_list[index].text\n # For some reason my iPhone 6 (iOS 11.1.1) is\n # getting stuck here because one of the\n # element's text is None.\n # So adding bit to ignore that.\n if type(element_text) is not str:\n logger.warn(\n \"Found pairing list element's text was \"\n \"None! Ignoring for now.\")\n continue\n if self.is_same_bluetooth_name(element_list[index],\n bluetooth_device_name_to_connect):\n element_list[index].click()\n # self._bt_swipe_and_connect(element_list,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n logger.debug(\"Connecting to \" +\n bluetooth_device_name_to_connect)\n is_device_found = True\n # NOTE: Removed a bunch of stuff after this...\n break\n else:\n is_device_found = self._bt_retry_to_connect(\n bluetooth_device_name_to_connect)\n if is_device_found == False:\n if '8.1' in self.phone_info.os_version \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.bt_radio('off')\n self.bt_radio('on')\n self.perform_bottom_to_up_swipe(\n self.driver.appium_driver)\n logger.debug(\"Retries count : \" + str(retry))\n sleep(1)\n else:\n # The below can become strangely slow (take ~12\n # seconds) randomly, so skipping it...\n # is_bt_button_visible = self.__verify_current_screen()\n # logger.debug(\"The BT button is visible? {\n # }\".format(is_bt_button_visible))\n # if not is_bt_button_visible:\n # self.__retry_to_bt_connect(\n # bluetooth_device_name_to_connect)\n break\n if is_device_found:\n is_bluetooth_connect = True\n else:\n self.take_screenshot(self.driver.appium_driver,\n 'bt_connect')\n logger.error(\"Not connecting to given mobile Device\")\n except Exception as e:\n if '8.1' in self.phone_info.os_version or \\\n self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.take_screenshot(self.driver.appium_driver, 'bt_connect')\n logger.error(\n \"Connection is not successfully with bluetooth device\")\n logger.error(repr(e))\n return is_bluetooth_connect", "def dnt_share_app():\r\n msg, status = \"\", True\r\n try:\r\n\r\n 'Click on Do not share button'\r\n flag1 = ui_controls.button(get_obj_identifier('a'))\r\n #flag2 = ui_controls.button(get_obj_identifier('share_dontShare_btn'))\r\n \r\n \r\n\r\n status = False if not(flag1) else True\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n status = False\r\n return status, msg", "def MacSecurityhistory(window,refrenceid):\n try:\n appbuttons = getAppButtons(window)\n atomacclick(appbuttons[1])\n time.sleep(5)\n appbuttonsnew = getAppButtons(window)\n atomacclick(appbuttonsnew[18])\n History_window = getChildwindows(refrenceid)\n History_window_buttons = getAppButtons(History_window)\n atomacclick(History_window_buttons[1])\n ldtp.wait(3)\n atomacclick(History_window_buttons[2])\n ldtp.wait(3)\n atomacclick(History_window_buttons[3])\n ldtp.wait(3)\n except Exception as er:\n print \"Not able to click on MacSecurityhistory\"\n return False", "def protectMoreDevices(button):\n try:\n atomacclick(button)\n except Exception as er:\n return False\n print \"Not able to click on protectMoreDevices button\"", "def the_user_should_not_be_able_to_connect_to_another_device():\n print(\"Trying to connect 2 devices at once\")\n bln_result = web_app.connect_to_device2()\n assert(bln_result, False)", "def bt_start_discovery(self):\n is_start_discovery = False\n try:\n is_bluetooth_on = self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n is_bluetooth_on = True\n if is_bluetooth_on:\n logger.debug(\"Bluetooth discovery Stared on {}\".format(\n self.phone_info.bluetooth_name))\n is_start_discovery = True\n else:\n logger.debug(\"Bluetooth discovery not Stared on {}\".format(\n self.phone_info.bluetooth_name))\n except Exception as e:\n logger.error(\"Trun on Bluetooth Button is not Visible\")\n logger.error(repr(e))\n return is_start_discovery", "def check_for_activate(self):\n try:\n # Attempt to activate. If the user has completed pairing on the,\n # backend, this will succeed. Otherwise it throws and HTTPError()\n\n token = self.data.get(\"token\")\n login = self.api.activate(self.state, token) # HTTPError() thrown\n\n # When we get here, the pairing code has been entered on the\n # backend and pairing can now be saved.\n # The following is kinda ugly, but it is really critical that we\n # get this saved successfully or we need to let the user know that\n # they have to perform pairing all over again at the website.\n try:\n IdentityManager.save(login)\n except Exception as e:\n self.log.debug(\"First save attempt failed: \" + repr(e))\n time.sleep(2)\n try:\n IdentityManager.save(login)\n except Exception as e2:\n # Something must be seriously wrong\n self.log.debug(\"Second save attempt failed: \" + repr(e2))\n self.abort_and_restart()\n\n if mycroft.audio.is_speaking():\n # Assume speaking is the pairing code. Stop TTS of that.\n mycroft.audio.stop_speaking()\n\n self.enclosure.activate_mouth_events() # clears the display\n\n # Notify the system it is paired\n self.gui.show_page(\"pairing_done.qml\", override_idle=False)\n self.bus.emit(Message(\"mycroft.paired\", login))\n\n self.pairing_performed = True\n with self.pair_dialog_lock:\n if self.mycroft_ready:\n # Tell user they are now paired\n self.speak_dialog(self.paired_dialog)\n mycroft.audio.wait_while_speaking()\n else:\n self.speak_dialog(\"wait.for.startup\")\n mycroft.audio.wait_while_speaking()\n\n # Un-mute. Would have been muted during onboarding for a new\n # unit, and not dangerous to do if pairing was started\n # independently.\n self.bus.emit(Message(\"mycroft.mic.unmute\", None))\n\n # Send signal to update configuration\n self.bus.emit(Message(\"configuration.updated\"))\n\n # Allow this skill to auto-update again\n self.reload_skill = True\n except HTTPError:\n # speak pairing code every 60th second\n with self.counter_lock:\n if self.count == 0:\n self.speak_code()\n self.count = (self.count + 1) % 6\n\n if time.monotonic() > self.time_code_expires:\n # After 20 hours the token times out. Restart\n # the pairing process.\n with self.counter_lock:\n self.count = -1\n self.data = None\n self.handle_pairing()\n else:\n # trigger another check in 10 seconds\n self.__create_activator()\n except Exception as e:\n self.log.debug(\"Unexpected error: \" + repr(e))\n self.abort_and_restart()", "def isconnected(self) -> bool:", "def detect_obscuring_windows(self):\n try:\n self.driver.find_element_by_class_name(\"selected\").click()\n except ElementClickInterceptedException:\n logger.info(\"Detecting element obscuring the window\")\n return True\n return False", "def check_remote_pairing(ignore_errors):\n try:\n DeviceApi().get()\n return True\n except HTTPError as e:\n if e.response.status_code == 401:\n return False\n error = e\n except Exception as e:\n error = e\n\n LOG.warning('Could not get device info: {}'.format(repr(error)))\n\n if ignore_errors:\n return False\n\n if isinstance(error, HTTPError):\n if connected():\n raise BackendDown from error\n else:\n raise InternetDown from error\n else:\n raise error", "def check_connection():\n if connected():\n ws.emit(Message('mycroft.internet.connected'))\n # check for pairing, if not automatically start pairing\n if not is_paired():\n # begin the process\n payload = {\n 'utterances': [\"pair my device\"],\n 'lang': \"en-us\"\n }\n ws.emit(Message(\"recognizer_loop:utterance\", payload))\n else:\n thread = Timer(1, check_connection)\n thread.daemon = True\n thread.start()", "def test_bond_buttons_inactive(self):\n self.assertFalse(PageObject.find_element\n (InterfacesSettings(), 'bond_interfaces').\n is_enabled())\n self.assertFalse(PageObject.find_element\n (InterfacesSettings(), 'unbond_interfaces').\n is_enabled())", "def homeCustomScan(window,referenceid):\n try:\n allbuttons = getAppButtons(window)\n atomacclick(allbuttons[0])\n atomacclick(allbuttons[19])\n time.sleep(5)\n Runwindow = getChildwindows(referenceid)\n buttons = getAppButtons(Runwindow)\n atomacclick(buttons[1])\n newb = getAllObjects(Runwindow)\n time.sleep(4)\n atomacclick(newb[2])\n except Exception as er:\n print(\"Not able to click on HomeCustomScan\")\n return False", "def is_dialing(self) -> bool:", "def MacSecurityqurantine(window,refrenceid):\n try:\n appbuttons = getAppButtons(window)\n atomacclick(appbuttons[1])\n time.sleep(5)\n appbuttonsnew = getAppButtons(window)\n print appbuttonsnew[17]\n time.sleep(5)\n atomacclick(appbuttonsnew[17])\n Quarantine_window = getChildwindows(refrenceid)\n Quarantine_window_buttons = getAppButtons(Quarantine_window)\n print \"Quarantine_window_buttons\", Quarantine_window_buttons\n time.sleep(3)\n atomacclick(Quarantine_window_buttons[0])\n time.sleep(3)\n '''atomacclick(Quarantine_window_buttons[1])\n time.sleep(3)\n atomacclick(Quarantine_window_buttons[5])\n time.sleep(3)\n atomacclick(Quarantine_window_buttons[6])'''\n except Exception as er:\n print \"Not able to click on MacSecurityqurantine\"\n return False", "def check_websockets(window_info):\n logger.info(\"websocket OK\")\n scall(window_info, \"df.monitoring.checked_ws\", to=[WINDOW])", "def IsOk(self):\r\n \r\n return self.window != None", "def has_been_paired():\n # This forces a load from the identity file in case the pairing state\n # has recently changed\n id = IdentityManager.load()\n return id.uuid is not None and id.uuid != \"\"", "def is_incall_connected(self) -> bool:", "def is_incall_dialing(self) -> bool:", "def verify_dialogue_with_wrong_password():\r\n status = False\r\n if g.platform == 'android':\r\n \r\n expected_dialogue_title = g.popup_title_error\r\n expected_dialogue_message = g.popup_message_password_blank\r\n flag1=verify_wrong_popup_dialogue(title=expected_dialogue_title, message=expected_dialogue_message, name_of_control_to_click='popup_default_button')\r\n \r\n \r\n \r\n\r\n status = False if not (flag1) else True\r\n else:\r\n \r\n expected_dialogue_title = g.ios_wrong_passwrd_error \r\n expected_dialogue_message = g.ios_wrong_password_message\r\n verify_wrong_popup_dialogue(title=expected_dialogue_title, message=expected_dialogue_message, name_of_control_to_click='popup_default_button')\r\n \r\n\r\n status = False if not ( ) else True\r\n\r\n return status" ]
[ "0.5990399", "0.59855914", "0.58938396", "0.58894676", "0.578745", "0.5731907", "0.5639497", "0.56267965", "0.5602063", "0.5560094", "0.5515805", "0.55017823", "0.5442884", "0.5395704", "0.53537095", "0.53509915", "0.5342693", "0.5328098", "0.5326035", "0.5306663", "0.5292037", "0.5290287", "0.528964", "0.52783287", "0.52753866", "0.5262606", "0.52466875", "0.5242742", "0.52244127", "0.52054304" ]
0.66436934
0
Checks if the phone is paired with the last address we attempted a pairing operation with. To be used directly after pair(mac) or unpair(mac).
def bt_is_paired(self): is_paired = False try: self.show_more_for_paired_devices() self.wait_till_element_to_be_visible(self.driver.appium_driver, self.paired_device_list, 5) pair_device_list = self.find_elements(self.driver.appium_driver, self.paired_device_list, 0) logger.debug('Checks if the phone is paired with the any devices') if len(pair_device_list) > 0: if pair_device_list[0].text.upper() == "PAIR NEW DEVICE": return False logger.debug( "phone {} paired with some bluetooth device".format( self.phone_info.bluetooth_name)) is_paired = True except Exception as e: logger.warning("Need to attempt pair before is_paired") return is_paired
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def is_paired(self, phone):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n return bt_util.android_device_in_paired_state(phone, target_addr)", "def has_been_paired():\n # This forces a load from the identity file in case the pairing state\n # has recently changed\n id = IdentityManager.load()\n return id.uuid is not None and id.uuid != \"\"", "def pair(self, mac_address):\n try:\n self.send(f\"pair {mac_address}\", 4)\n except Exception as e:\n logger.error(e)\n return False\n else:\n res = self.process.expect(\n [\"Failed to pair\", \"Pairing successful\", pexpect.EOF]\n )\n return res == 1", "def pair(self, mac_address):\n try:\n out = self.get_output(\"pair \" + mac_address, 4)\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n res = self.child.expect([\"Failed to pair\", \"Pairing successful\", pexpect.EOF])\n success = True if res == 1 else False\n return success", "def is_paired(ignore_errors=True):\n global _paired_cache\n if _paired_cache:\n # NOTE: This assumes once paired, the unit remains paired. So\n # un-pairing must restart the system (or clear this value).\n # The Mark 1 does perform a restart on RESET.\n return True\n\n api = DeviceApi()\n _paired_cache = api.identity.uuid and check_remote_pairing(ignore_errors)\n\n return _paired_cache", "def bt_is_paired_to(self, paired_bluetooth_device):\n is_paired_with_device = False\n try:\n bt_is_paired = self.bt_is_paired()\n if not bt_is_paired:\n return is_paired_with_device\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 10)\n pair_element = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n paired_bluetooth_device):\n is_paired_with_device = True\n break\n except Exception as e:\n logger.warning(\"Need to attempt pair or unpair before is_paired.\")\n logger.warning(repr(e))\n # raise\n return is_paired_with_device", "def is_valid_pair(self, pair, exchange):\n pairs = self.ccxt.get_pairs(exchange)\n print(pairs)\n return pair in pairs", "def pair(self, phone, companion_app=True):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n if bt_util.android_device_in_connected_state(phone, target_addr):\n self.logger.info('Already paired and connected, skipping pairing.')\n else:\n if bt_util.android_device_in_paired_state(phone, target_addr):\n self.logger.info(\n 'Device is paired but not connected, unpair first.')\n if not bt_util.bt_unpair(phone, self.dut):\n raise TestActsError('Unable to unpair the device')\n bt_util.bt_pair_and_connect(phone, self.dut)\n self.logger.info('DEVICE PAIRED')\n if companion_app:\n profiles = PROFILES_CONNECTED.copy()\n profiles.update(COMP_PROFILE_CONNECTED)\n else:\n profiles = PROFILES_CONNECTED\n self.logger.info(profiles)\n if not bt_util.check_device_bt(device=self.dut, profiles=profiles):\n raise TestActsError('Dut BT status check failed.')\n else:\n return True", "def _get_pairing_status(self):\n try:\n self.is_paired = is_paired(ignore_errors=False)\n except BackendDown:\n LOG.error('Cannot complete device updates due to backend issues.')\n self.backend_down = True\n\n if self.is_paired:\n LOG.info('Device is paired')", "def _ensure_device_is_paired(self):\n if not self.is_paired and not self.backend_down:\n LOG.info('Device not paired, invoking the pairing skill')\n payload = dict(utterances=[\"pair my device\"], lang=\"en-us\")\n self.bus.emit(Message(\"recognizer_loop:utterance\", payload))", "def bt_unpair(self, target):\n is_target_unpaired = False\n try:\n # if phone is not paired with any device, return True\n any_paired_device = self.find_element(self.driver.appium_driver, self.paired_device_list, 0)\n wait(2)\n if any_paired_device is None:\n logger.info(\"There is no paired device.\")\n return True\n\n logger.debug('Unpair {} Bluetooth device'.format(\n target))\n\n # enter paired bluetooth device page\n target_detail_tab_xpath = '//android.widget.TextView[@text=\"{}\"]' \\\n '/../../descendant-or-self::android.widget.LinearLayout/android.widget.LinearLayout/' \\\n 'android.widget.ImageView[@resource-id=\"com.coloros.wirelesssettings:id/deviceDetails\"]'\\\n .format(target)\n try:\n self.driver.appium_driver.find_element_by_xpath(target_detail_tab_xpath).click()\n except:\n logger.error(\"Cannot find the target detail tab.\")\n return False\n else:\n logger.error(\"Found target detail tab and cliked it.\")\n\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n fail_cnt = 0\n while bt_unpair_button is None and fail_cnt < 5:\n wait(2)\n fail_cnt += 1\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n if fail_cnt == 5:\n logger.error(\"Cannot find bt_unpair_button.\")\n return False\n else:\n logger.info('Found unpair button and clicking it.')\n bt_unpair_button.click()\n is_target_unpaired = True\n\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver, 'bt_unpair')\n logger.warning(\"Need to attempt pair before unpair\")\n logger.warning(repr(e))\n return is_target_unpaired", "async def unpair(self) -> bool:\n return await self._backend.unpair()", "def is_pair(hand):\n\tis_a_pair = False\n\ti = 0\n\twhile i < 13:\n\t\tif hand[i] == 2:\n\t\t\tis_a_pair = True\n\t\ti += 1 \n\thigh_card = 0\n\tj = 0\n\twhile j < 13 and is_a_pair == True:\n\t\tif hand[j] == 2 and j >= high_card:\n\t\t\thigh_card = j\n\t\tj += 1\n\tif is_a_pair:\n\t\treturn True, high_card\n\telse:\n\t\treturn False", "def has_pair(self):\n self.suit_hist()\n for val in self.ranks.values():\n if val == 2:\n self.rank_per_hand['0'] = \"pair\"\n return True\n return False", "def requires_pairing(cls) -> bool:\n return False", "def check_pairs(self, all_pr, curr):\n flag = True\n for pair_ox in all_pr:\n if (curr[0] == pair_ox or curr[1] == pair_ox):\n flag = False\n return flag", "def is_paired_list(self, key):\n if key in ('pattern','points'):\n return True\n else:\n return False", "def hasConflicts(self):\n partners = {}\n for first, second in self:\n #print >>sys.stderr, \"first:\", first, \"second:\", second\n if first is None:\n if second is None:\n continue #no pairing info\n else:\n first, second = second, first #swap order so None is 2nd\n if second is None: #check first isn't paired\n if partners.get(first, None) is not None:\n print >>sys.stderr, \"here1\"\n print >>sys.stderr, \"first:\", first, \"second:\", second\n return True\n else:\n partners[first] = None\n else: #first and second were both non-empty: check partners\n if first in partners:\n if partners[first] != second:\n print >>sys.stderr, \"here2\"\n print >>sys.stderr, \"first:\", first, \"second:\", second, \"partners[first]\", partners[first]\n print \"partners:\", partners\n return True\n if second in partners:\n if partners[second] != first:\n print >>sys.stderr, \"here3\"\n print >>sys.stderr, \"first:\", first, \"second:\", second, \"partners[second]:\", partners[second]\n return True\n #add current pair to the list of constraints\n partners[first] = second\n partners[second] = first\n #can only get here if there weren't conflicts\n return False", "def is_router(self):\n # @todo: Rewrite\n return self.address_set.count() > 1", "def check_address(intent, session):\n user_data = database.get_user_data(session['user']['userId'])\n if not user_data:\n return reply.build(\"I don't remember any of your addresses.\",\n is_end=True)\n\n # Standardize the input address requester\n which_raw = intent.get('slots', {}).get('which_address', {}).get('value')\n if not which_raw:\n # We might not have gotten anything in the slot.\n which = None\n else:\n which = difflib.get_close_matches(which_raw.lower(),\n ORIGIN_NAMES + DEST_NAMES, n=1)\n which = which[0] if which else None\n\n if which in ORIGIN_NAMES:\n which_lab = 'origin'\n elif which in DEST_NAMES:\n which_lab = 'destination'\n else:\n # If nothing was filled in the slot,\n # give the user both addresses.\n both = [_speak_address(wh, user_data)\n for wh in ['origin', 'destination']\n if wh in user_data]\n return reply.build(\" \".join(both), is_end=True)\n return reply.build(_speak_address(which_lab, user_data), is_end=True)", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def ExclusiveAddressUse(self) -> bool:", "def is_incall_dialing(self) -> bool:", "def bt_try_connect(self, bluetooth_device_name_to_connect,\n contact_sharing=False): # TODO: Need to update to\n # use the new/refactored bt_connect() design from above.\n is_bluetooth_connect = False\n try:\n is_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n if is_already_connected is True:\n is_bluetooth_connect = True\n else:\n is_bt_paired = self.bt_is_paired_to(\n bluetooth_device_name_to_connect)\n if contact_sharing:\n if is_bt_paired:\n if self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.previously_paired_device_button, 5)\n self.find_element(self.driver.appium_driver,\n self.previously_paired_device_button,\n 0).click()\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver, self.paired_device_list,\n 10)\n pair_element = self.find_elements(\n self.driver.appium_driver, self.paired_device_list,\n 1)\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n bluetooth_device_name_to_connect):\n pair_element[index].click()\n # self._bt_swipe_and_connect(pair_element,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n is_bluetooth_connect = True\n self._go_to_connected_device_screen(\n no_of_back_click=1)\n return is_bluetooth_connect\n else:\n if is_bt_paired:\n self.bt_unpair(bluetooth_device_name_to_connect)\n self.bt_radio('off')\n self.bt_radio('on')\n\n try:\n if '8.1' in self.phone_info.os_version or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n sleep(10)\n except:\n logger.debug(\"Pair new device option is not available\")\n is_device_found = False\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 5)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 1)\n # Wait till bluetooth device found in list and click when it\n # is visible in list\n for retry in range(1):\n if retry == 0:\n for index in range(len(element_list)):\n element_text = element_list[index].text\n # For some reason my iPhone 6 (iOS 11.1.1) is\n # getting stuck here because one of the\n # element's text is None.\n # So adding bit to ignore that.\n if type(element_text) is not str:\n logger.warn(\n \"Found pairing list element's text was \"\n \"None! Ignoring for now.\")\n continue\n if self.is_same_bluetooth_name(element_list[index],\n bluetooth_device_name_to_connect):\n element_list[index].click()\n # self._bt_swipe_and_connect(element_list,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n logger.debug(\"Connecting to \" +\n bluetooth_device_name_to_connect)\n is_device_found = True\n # NOTE: Removed a bunch of stuff after this...\n break\n else:\n is_device_found = self._bt_retry_to_connect(\n bluetooth_device_name_to_connect)\n if is_device_found == False:\n if '8.1' in self.phone_info.os_version \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.bt_radio('off')\n self.bt_radio('on')\n self.perform_bottom_to_up_swipe(\n self.driver.appium_driver)\n logger.debug(\"Retries count : \" + str(retry))\n sleep(1)\n else:\n # The below can become strangely slow (take ~12\n # seconds) randomly, so skipping it...\n # is_bt_button_visible = self.__verify_current_screen()\n # logger.debug(\"The BT button is visible? {\n # }\".format(is_bt_button_visible))\n # if not is_bt_button_visible:\n # self.__retry_to_bt_connect(\n # bluetooth_device_name_to_connect)\n break\n if is_device_found:\n is_bluetooth_connect = True\n else:\n self.take_screenshot(self.driver.appium_driver,\n 'bt_connect')\n logger.error(\"Not connecting to given mobile Device\")\n except Exception as e:\n if '8.1' in self.phone_info.os_version or \\\n self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.take_screenshot(self.driver.appium_driver, 'bt_connect')\n logger.error(\n \"Connection is not successfully with bluetooth device\")\n logger.error(repr(e))\n return is_bluetooth_connect", "def is_dialing(self) -> bool:", "def get_address(self,device_list):\n for device in device_list:\n if device.name == self.NAME:\n self.ADDR = device.address\n return True\n return False", "async def pair(self, *args, **kwargs) -> bool:\n return await self._backend.pair(*args, **kwargs)", "def validate_pairs(pairs, historical_pairs):\n if pairs is None:\n return False\n for p in pairs:\n if p in historical_pairs:\n return False\n return True" ]
[ "0.7128904", "0.70029426", "0.6926195", "0.6679933", "0.6630855", "0.63567245", "0.6187539", "0.60254407", "0.59765", "0.5807887", "0.5801837", "0.56981874", "0.560043", "0.55862856", "0.55387974", "0.5527718", "0.5525297", "0.5495494", "0.5479396", "0.53446454", "0.53356194", "0.53356194", "0.53356194", "0.53356194", "0.5334393", "0.5325869", "0.53070945", "0.5289", "0.52738637", "0.5269067" ]
0.7010961
1
Get list of discovered blutooth devices on mobile device
def bt_get_discovered_devices(self): discovered_bluetooth_device = [] try: self.bt_radio('on') if '8.1' in self.phone_info.os_version: self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_pair_new_device_in_android_8_1_button, 10) self.find_element(self.driver.appium_driver, self.bluetooth_pair_new_device_in_android_8_1_button, 2).click() time.sleep(10) self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_discovered_device_list, 10) element_list = self.find_elements(self.driver.appium_driver, self.bluetooth_discovered_device_list, 1) # To add connected bluetooth device name in list for index in range(len(element_list)): discovered_bluetooth_device.append( str(element_list[index].text.replace('\u200e', ''))) logger.debug("List of Discovered Devices:" + str( discovered_bluetooth_device)) except Exception as e: self.take_screenshot(self.driver.appium_driver, '__retry_to_bt_connect') logger.error("No device are discoverable .") logger.error(repr(e)) return discovered_bluetooth_device
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def list_mobile_global_beacons():\n return GlobalBeacon.list_mobile()", "def get_devices(self):\n return self.api_request('GET', self.url + '/device', {})", "def list_devices():\r\n DeviceManagerCLI.BuildDeviceList()\r\n return DeviceManagerCLI.GetDeviceList()", "async def find_devices() -> List[DeviceInfo]:\n return await Discovery.search_devices()", "def get_devices():\n devices = []\n for device_id in range(pm.lib.Pm_CountDevices()):\n devices.append(DeviceInfo(device_id))\n\n return devices", "def list_devices(self):\n response = self.oauth.get(url=f'{self.base_url}/json/devices/list')\n\n result = response.json()['device']\n for device in result:\n print(device)", "def get_devices(mac=None):\n wemo_devices = discover_wemo()\n\n if mac:\n dev = get_device(mac, wemo_devices)\n if not dev:\n return []\n return [dev]\n\n return wemo_devices", "def list_devices(self):\n return [x for x in self.devices.keys()]", "def list_bridges(self):\n return [x for x,y in self.devices.items() if y.device_type == \"Bridge\"]", "def listDevices(self):\n count = 0\n for device in self:\n count += 1\n printLog(\"Device \" + str(count) + \": '%s %s (%s, %s, %s)'\" % (\n device.make, device.model, device.deviceId, device.androidVersion, device.operator))\n if device.idle:\n printLog(\"[Idle]\")\n else:\n printLog(\"[Busy]\")", "def getDevices():\n \n scannedDevices = list()\n \n proc = subprocess.Popen('bluetoothctl scan on', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=8192, universal_newlines=True)\n \n time.sleep(10)\n \n proc.stdin.write('scan off')\n \n try:\n stdout, stderr = proc.communicate()\n except subprocess.TimeoutExpired:\n proc.kill()\n stdout, stderr = proc.communicate()\n\n ansiEscapePattern = re.compile(r'\\x1B[@-_][0-?]*[ -/]*[@-~]')\n stdout = ansiEscapePattern.sub('', stdout)\n \n #deviceNamePattern = re.compile('^\\[NEW\\] Device [A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2} ')\n \n for line in stdout.split('\\n'):\n if '[NEW] Device' in line:\n device = list()\n device.append(line[13:31])\n device.append(line[31:])\n scannedDevices.append(device)\n \n return scannedDevices", "def list_devices():\n return _lib.SeaTeaseAPI().list_devices()", "async def async_find_available_devices(hass: HomeAssistant, username: str, password: str):\n\n result = []\n\n devices = await DeviceScanner.async_find_devices(hass)\n\n _LOGGER.debug(\"Found %d AwoX devices\" % (len(devices)))\n\n for mac, dev in devices.items():\n _LOGGER.debug(\"Device %s [%s]\" % (dev['name'], dev['mac']))\n try:\n mylight = DeviceScanner._connect(dev['mac'], username, password)\n if mylight.session_key:\n result.append({\n 'mac': dev['mac'],\n 'name': mylight.getModelNumber()\n })\n mylight.disconnect()\n except:\n _LOGGER.debug('Failed to connect [%s]' % dev['mac'])", "def get_devices(self):\n devices = self.get(\"event/device\")", "def user_sends_get_call_to_the_devices():\n web_app.list_devices()", "def get_available_devices(self):\n try:\n out = self.get_output(\"devices\")\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n available_devices = []\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n\n return available_devices", "def list_devices(cls):\n # get all matching devices\n return usb.core.find(\n find_all=True,\n custom_match=lambda dev: (\n dev.idVendor == cls.vendor_id and dev.idProduct in cls.product_ids\n ),\n )", "def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n return [d for d in available if d not in paired]", "def scan_chip_ble_devices(devCtrl):\n devices = []\n bleMgr = BleManager(devCtrl)\n bleMgr.scan(\"-t 10\")\n\n for device in bleMgr.peripheral_list:\n devIdInfo = bleMgr.get_peripheral_devIdInfo(device)\n if devIdInfo:\n devInfo = devIdInfo.__dict__\n devInfo[\"name\"] = device.Name\n devices.append(devInfo)\n\n return devices", "def get_devices():\n global managed_objects\n global devices_by_adr\n \n devices_by_adr = {}\n \n r = re.compile(\"\\/org\\/bluez\\/hci\\d*\\/dev\\_(.*)\")\n # e.g., match a string like this:\n # /org/bluez/hci0/dev_58_C9_35_2F_A1_EF\n \n for key, value in managed_objects.items():\n # print(\"key=\", key)\n m = r.match(key)\n if m is not None:\n dev_str = m.group(1) # we have a device string!\n # print(\"dev_str=\", dev_str)\n # let's flatten that dict a bit\n devices_by_adr[dev_str] = value[\"org.bluez.Device1\"]", "def scan_devices(self):\n self._update_info()\n return [client[\"mac\"] for client in self.last_results]", "def GetAllDevices(self):\n\n return list(self.YieldAllDevices())", "def devices(self):\n return self.enumerate_devices()", "def scan_devices(self):\n self._update_info()\n\n return [client['mac'] for client in self.last_results]", "def list_devices(self):\n xml = str(self._server.listDevices())\n return self._parse_cabling_xml(xml)", "def getDevices(i):\n devices = Account['KTFLR'].devices('monpressprod')\n device = devices[i]\n return device", "def get_available_devices(self):\n available_devices = []\n try:\n out = self.get_output(\"devices\")\n except Exception as e:\n logger.error(e)\n else:\n for line in out:\n device = self.parse_device_info(line)\n if device:\n available_devices.append(device)\n return available_devices", "def get_bt_smarthub_data(self):\n import btsmarthub_devicelist\n\n data = btsmarthub_devicelist.get_devicelist(router_ip=self.host, only_active_devices=True)\n devices = {}\n for device in data:\n try:\n devices[device['UserHostName']] = {\n 'ip': device['IPAddress'],\n 'mac': device['PhysAddress'],\n 'host': device['UserHostName'],\n 'status': device['Active']\n }\n except (KeyError, 'no'):\n pass\n return devices", "def get_connected_devices(self):\n all_devices = []\n if self.vid_list:\n for vid in self.vid_list:\n all_devices += UsbDriver.usb_list_devices(vid)\n\n if self.pid_ignore_list:\n return [device for device in all_devices\n if not (device.product_id in self.pid_ignore_list)]\n else:\n return all_devices", "def get_discoverable_devices(self):\n available = self.get_available_devices()\n paired = self.get_paired_devices()\n\n return [d for d in available if d not in paired]" ]
[ "0.6909028", "0.6836376", "0.6818698", "0.68010455", "0.6787019", "0.67741495", "0.67020565", "0.669887", "0.6642634", "0.6622597", "0.66008425", "0.6589596", "0.6583874", "0.6577444", "0.65751064", "0.6569571", "0.6566964", "0.65413487", "0.651726", "0.65085953", "0.6506465", "0.64924186", "0.64831716", "0.64662415", "0.64329183", "0.64226925", "0.63929", "0.6374559", "0.6373403", "0.636461" ]
0.6911494
0
Start Bluetooth discovery on phone
def bt_start_discovery(self): is_start_discovery = False try: is_bluetooth_on = self.bt_radio('on') if '8.1' in self.phone_info.os_version: self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_pair_new_device_in_android_8_1_button, 10) self.find_element(self.driver.appium_driver, self.bluetooth_pair_new_device_in_android_8_1_button, 2).click() is_bluetooth_on = True if is_bluetooth_on: logger.debug("Bluetooth discovery Stared on {}".format( self.phone_info.bluetooth_name)) is_start_discovery = True else: logger.debug("Bluetooth discovery not Stared on {}".format( self.phone_info.bluetooth_name)) except Exception as e: logger.error("Trun on Bluetooth Button is not Visible") logger.error(repr(e)) return is_start_discovery
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def discover(bt_addr):\n print \"performing inquiry...\"\n nearby_devices = bluetooth.discover_devices(lookup_names = True)\n print \"Found %d devices\" % len(nearby_devices)\n \n for addr, name in neaby_devices:\n print \" %s - %s\" % (addr, name)", "def device_discovery(endless):\r\n click.echo(\"start device discovery ...\")\r\n _device_discovery(endless)", "def start_scan(self):\n try:\n out = self.get_output(\"scan on\")\n except BluetoothctlError, e:\n print(e)\n return None", "def connect_magic():\n nearby_devices = bluetooth.discover_devices(lookup_names = True, duration=5)\n\n for addr, name in nearby_devices:\n print(name)\n if name == \"MindWave Mobile\":\n print \"found\"\n return (connect_bluetooth_addr(addr), addr)\n return (None, \"\")", "async def discover(*args):\n # Since discovery needs to connect to all discovered bluetooth devices, and\n # only rules out devices after a timeout, it can potentially take a long\n # time. If there's already a discovery running, just skip this poll.\n if hass.data[DOMAIN][\"discovery\"].locked():\n return\n\n async with hass.data[DOMAIN][\"discovery\"]:\n bluetooth_devices = await hass.async_add_executor_job(\n pykulersky.discover_bluetooth_devices\n )\n\n # Filter out already connected lights\n new_devices = [\n device\n for device in bluetooth_devices\n if device[\"address\"] not in hass.data[DOMAIN][\"devices\"]\n ]\n\n for device in new_devices:\n light = pykulersky.Light(device[\"address\"], device[\"name\"])\n try:\n # If the connection fails, either this is not a Kuler Sky\n # light, or it's bluetooth connection is currently locked\n # by another device. If the vendor's app is connected to\n # the light when home assistant tries to connect, this\n # connection will fail.\n await hass.async_add_executor_job(check_light, light)\n except pykulersky.PykulerskyException:\n continue\n # The light has successfully connected\n hass.data[DOMAIN][\"devices\"].add(device[\"address\"])\n async_add_entities([KulerskyLight(light)], update_before_add=True)", "def scan_bluetooth(self):\n nearby_devices = bluetooth.discover_devices(lookup_names=True)\n print(\"Found {} devices at {}\".format(len(nearby_devices), datetime.now()))\n timestamp = datetime.now().strftime('%m/%d/%Y %H:%M:%S')\n self.capture = self.MonitorCapture(timestamp=timestamp, structure=nearby_devices, ip_addr=self.ip_addr,\n location=self.location)\n for name, addr in nearby_devices:\n print(\" %s - %s\" % (addr, name))\n\n self.capture = json.dumps(self.capture.__dict__)", "async def async_step_bluetooth(\n self, discovery_info: BluetoothServiceInfoBleak\n ) -> FlowResult:\n _LOGGER.debug(\"Discovered bluetooth device: %s\", discovery_info)\n await self.async_set_unique_id(format_unique_id(discovery_info.address))\n self._abort_if_unique_id_configured()\n parsed = parse_advertisement_data(\n discovery_info.device, discovery_info.advertisement\n )\n if not parsed or parsed.data.get(\"modelName\") not in SUPPORTED_MODEL_TYPES:\n return self.async_abort(reason=\"not_supported\")\n self._discovered_adv = parsed\n data = parsed.data\n self.context[\"title_placeholders\"] = {\n \"name\": data[\"modelName\"],\n \"address\": discovery_info.address,\n }\n return await self.async_step_user()", "async def connect(self,\n search_name=None,\n address=None,\n port=None,\n bluetooth_interface=None,\n use_ble=False,\n num_retry_attempts=1):\n # Create the bluetooth interface\n global HAS_PYBLUEZ\n global USE_PYBLUEZ\n global HAS_PYGATT\n global USE_PYGATT\n global HAS_WINBLE\n global USE_WINBLE\n if bluetooth_interface is None:\n if use_ble:\n if (HAS_PYGATT and USE_PYGATT) or (HAS_WINBLE and USE_WINBLE):\n self._bluetooth_interface = BleInterface(\n search_name=search_name, address=address, port=port)\n else:\n raise RuntimeError(\n 'Could not import a bluetooth LE Library.')\n else:\n if HAS_PYBLUEZ and USE_PYBLUEZ:\n self._bluetooth_interface = BluetoothInterface(\n search_name=search_name, address=address, port=port)\n else:\n raise RuntimeError(\n 'Could not import a bluetooth (non-BLE) library.')\n else:\n self._bluetooth_interface = bluetooth_interface\n\n self._bluetooth_interface.data_received_handler = self._handle_data_received\n self._bluetooth_interface.connect(\n num_retry_attempts=num_retry_attempts)\n print('Connected to Sphero.')", "def connect(self):\n print_msg(self.name, \"Connecting...\")\n self.client_sock = BluetoothSocket( RFCOMM )\n uuid = \"00001101-0000-1000-8000-00805F9B34FB\"\n btaddr = \"08:60:6E:A5:90:50\"\n service_match = find_service(uuid = uuid, address = btaddr)\n while len(service_match) == 0:\n service_match = find_service(uuid = uuid, address = btaddr)\n\n first_match = service_match[0]\n port = first_match[\"port\"]\n host = first_match[\"host\"]\n self.client_sock.connect((host,port))\n print_msg(self.name, \"Connected to \"+str(host))\n self.is_connected = True", "def discover(self):\n self.ola_thread.run_discovery(self.universe.get(), self._upon_discover)\n if self.auto_disc.get():\n self.ola_thread.add_event(5000, self.discover)\n else: \n print \"auto_disc is off\"", "def advertise_and_connect(self):\n self.server_sock = bluetooth.BluetoothSocket(bluetooth.RFCOMM)\n self.server_sock.bind((\"\", self.PORT))\n self.server_sock.listen(1)\n bluetooth.advertise_service(\n self.server_sock,\n \"Sensor Service\",\n self.UUID\n )\n self.client_sock, client_address = self.server_sock.accept()", "def bt_get_discovered_devices(self):\n discovered_bluetooth_device = []\n try:\n self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n time.sleep(10)\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 10)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 1)\n\n # To add connected bluetooth device name in list\n for index in range(len(element_list)):\n discovered_bluetooth_device.append(\n str(element_list[index].text.replace('\\u200e', '')))\n logger.debug(\"List of Discovered Devices:\" + str(\n discovered_bluetooth_device))\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver,\n '__retry_to_bt_connect')\n logger.error(\"No device are discoverable .\")\n logger.error(repr(e))\n return discovered_bluetooth_device", "def run():\n adapter = pygatt.GATTToolBackend()\n ADDRESS_TYPE = pygatt.BLEAddressType.random\n\n # TODO if a thread is killed then this will never reestablish a new one since connections never has elements removed\n while True:\n try:\n for device in adapter.scan():\n if device[\"name\"] == \"DYP Hydrometer\":\n print(\"NEW HYDROMETER FOUND\")\n t = threading.Thread(target=handle_connection, args=(device[\"address\"],))\n t.start()\n except:\n pass\n ptime.sleep(5)", "def _device_discovery(endless=True):\r\n conf = Config.Config().get_config()\r\n\r\n msg = json.dumps({\"name\": conf.get('mqtt', 'server_name'),\r\n \"ip\": conf.get('mqtt', 'ip'),\r\n \"port\": conf.getint('mqtt', 'port')})\r\n\r\n run_event = threading.Event()\r\n run_event.set()\r\n\r\n esp_discovery = Discovery.EspDiscovery(conf.get('discovery', 'broadcast'),\r\n conf.getint('discovery', 'port'),\r\n msg,\r\n conf.getint('discovery', 'interval'),\r\n run_event)\r\n try:\r\n esp_discovery.start()\r\n\r\n if endless:\r\n while True:\r\n time.sleep(0.5)\r\n\r\n except KeyboardInterrupt or SystemExit:\r\n run_event.clear()\r\n esp_discovery.join()\r\n click.echo(\"Process terminated\")", "def bluetooth_listen(\n addr, threshold, callback, sleep=1, daily=True, debug=False):\n b = BluetoothRSSI(addr=addr)\n while True:\n rssi = b.request_rssi()\n if debug:\n print(\"---\")\n print(\"addr: {}, rssi: {}\".format(addr, rssi))\n # Sleep and then skip to next iteration if device not found\n if rssi is None:\n time.sleep(sleep)\n continue\n # Trigger if RSSI value is within threshold\n if threshold[0] < rssi < threshold[1]:\n callback()\n if daily:\n # Calculate the time remaining until next day\n now = datetime.datetime.now()\n tomorrow = datetime.datetime(\n now.year, now.month, now.day, 0, 0, 0, 0) + \\\n datetime.timedelta(days=1)\n until_tomorrow = (tomorrow - now).seconds\n if debug:\n print(\"Seconds until tomorrow: {}\".format(until_tomorrow))\n else:\n time.sleep(until_tomorrow)\n # Delay between iterations\n time.sleep(sleep)", "def start_scan(self):\n try:\n self.send(\"scan on\")\n except Exception as e:\n logger.error(e)", "def discover(self, seconds=3):\n log.info(\"Discovering devices\")\n self.upnp.broadcast(seconds)", "def broadcast(loopstate):\n cmdstring = 'sudo hcitool -i hci0 cmd ' # Send cmd to hci0\n cmdstring += '0x08 ' # Set group to BLE\n cmdstring += '0x0008 ' # Set command to HCI_LE_Set_Advertising_Data\n cmdstring += '0D ' # Length of entire following data, in bytes\n cmdstring += '02 ' # Length of flag info\n cmdstring += '01 ' # Use AD flags\n cmdstring += '02 ' # Flag value:\n # bit 0 (OFF) LE Limited Discoverable Mode\n # bit 1 (ON) LE General Discoverable Mode\n # bit 2 (OFF) BR/EDR Not Supported\n # bit 3 (ON) Simultaneous LE and BR/EDR to Same Device Capable (controller)\n # bit 4 (ON) Simultaneous LE and BR/EDR to Same Device Capable (Host)\n cmdstring += '09 ' # Length of following message, in bytes\n cmdstring += '07 ' # GAP value (07 = 128 Bit Complete Service UUID List)\n cmdstring += '42 69 63 79 63 6c 65 ' # Header to identify beacon message-\n # - and it's also is Bicycle in ASCII!\n if loopstate:\n cmdstring = cmdstring + LOOP_ON\n else:\n cmdstring = cmdstring + LOOP_OFF + ' >/dev/null 2>&1'\n subprocess.call(cmdstring, shell=True)\n subprocess.call('sudo hciconfig hci0 leadv 3 >/dev/null 2>&1', shell=True)", "def start(self):\n \r\n # Fetch the XBee Manager name from the Settings Manager:\r\n xbee_manager_name = SettingsBase.get_setting(self, \"xbee_device_manager\")\r\n dm = self.__core.get_service(\"device_driver_manager\")\r\n self.__xbee_manager = dm.instance_get(xbee_manager_name)\r\n\r\n # Register ourselves with the XBee Device Manager instance:\r\n self.__xbee_manager.xbee_device_register(self)\r\n\r\n # Get the extended address of the device:\r\n extended_address = SettingsBase.get_setting(self, \"extended_address\")\r\n\r\n # Create a callback specification for our device address, endpoint\r\n # Digi XBee profile and sample cluster id:\r\n xbdm_rx_event_spec = XBeeDeviceManagerRxEventSpec()\r\n xbdm_rx_event_spec.cb_set(self.sample_indication)\r\n xbdm_rx_event_spec.match_spec_set(\r\n (extended_address, 0xe8, 0xc105, 0x92),\r\n (True, True, True, True))\r\n self.__xbee_manager.xbee_device_event_spec_add(self,\r\n xbdm_rx_event_spec)\r\n\r\n # Create a callback specification that calls back this driver when\r\n # our device has left the configuring state and has transitioned\r\n # to the running state:\r\n xbdm_running_event_spec = XBeeDeviceManagerRunningEventSpec()\r\n xbdm_running_event_spec.cb_set(self.running_indication)\r\n self.__xbee_manager.xbee_device_event_spec_add(self,\r\n xbdm_running_event_spec)\r\n\r\n # Create a DDO configuration block for this device:\r\n xbee_ddo_cfg = XBeeConfigBlockDDO(extended_address)\r\n\r\n # Get the gateway's extended address:\r\n gw_xbee_sh, gw_xbee_sl = gw_extended_address_tuple()\r\n\r\n # Set the destination for I/O samples to be the gateway:\r\n xbee_ddo_cfg.add_parameter('DH', gw_xbee_sh)\r\n xbee_ddo_cfg.add_parameter('DL', gw_xbee_sl)\r\n\r\n # TODO: Configure the XBee pins to be Digital/Analog IO\r\n #\r\n # I.E.: Configure pins DI0 .. DI3 for digital input and \r\n # enable line monitoring on pins DIO0 .. DIO3:\r\n #for io_pin in [ 'D0', 'D1', 'D2', 'D3' ]:\r\n # xbee_ddo_cfg.add_parameter(io_pin, 3)\r\n # Enable I/O line monitoring on pins DIO0 .. DIO3:\r\n #xbee_ddo_cfg.add_parameter('IC', 0xf)\r\n #\r\n # I.E.: Configure pins DI1 .. DI3 for analog input:\r\n #for io_pin in [ 'D1', 'D2', 'D3' ]:\r\n # xbee_ddo_cfg.add_parameter(io_pin, 2)\r\n\r\n # Configure node sleep behavior:\r\n sleep_ms = SettingsBase.get_setting(self, \"sleep_ms\")\r\n awake_time_ms = SettingsBase.get_setting(self, \"awake_time_ms\")\r\n xbee_sleep_cfg = XBeeConfigBlockSleep(extended_address)\r\n if sleep_ms > 0:\r\n # Configure node to sleep for the specified interval:\r\n xbee_sleep_cfg.sleep_cycle_set(awake_time_ms, sleep_ms)\r\n else:\r\n # If sleep_ms is 0, disable sleeping on the node altogether:\r\n xbee_sleep_cfg.sleep_mode_set(SM_DISABLED)\r\n\r\n # Register the Sleep configuration block with the XBee Device Manager:\r\n self.__xbee_manager.xbee_device_config_block_add(self, xbee_sleep_cfg)\r\n\r\n # Register the DDO configuration block with the XBee Device Manager:\r\n self.__xbee_manager.xbee_device_config_block_add(self, xbee_ddo_cfg)\r\n\r\n # Indicate that we have no more configuration to add:\r\n self.__xbee_manager.xbee_device_configure(self)\r\n\r\n # Start the thread\r\n threading.Thread.start(self)\r\n\n return True", "async def start(self) -> None:\n for broadcast_port in self._broadcast_ports:\n logger.info(\"starting the udp bridge on port %s\", broadcast_port)\n protocol_factory = UdpClientProtocol(\n partial(_parse_device_from_datagram, self._on_device)\n )\n transport, protocol = await get_running_loop().create_datagram_endpoint(\n lambda: protocol_factory,\n local_addr=(\"0.0.0.0\", broadcast_port), # nosec\n family=AF_INET,\n )\n self._transports[broadcast_port] = transport\n logger.debug(\"udp bridge on port %s started\", broadcast_port)\n\n self._is_running = True", "def changeBluetoothService(enable=True):\n \n #blueServiceStatus = os.popen('systemctl status bluetooth.service').read()\n ServStatStdout = execCommand('systemctl status bluetooth.service')\n \n if enable:\n if not 'active (running)' in ServStatStdout:\n checkRoot()\n #blueServiceStatus = os.popen('sudo systemctl start bluetooth.service').read()\n blueServStartStdout = execCommand('sudo systemctl start bluetooth.service')\n return\n \n if not enable:\n if not 'inactive (dead)' in ServStatStdout:\n checkRoot()\n #blueServiceStatus = os.popen('sudo systemctl stop bluetooth.service').read()\n blueServStopStdout = execCommand('sudo systemctl stop bluetooth.service')\n return", "def create_bond(device_address=None, adapter_address=None):\n con = pexpect.spawn('sudo bluetoothctl')\n con.expect(\"bluetooth\", timeout=1)\n \n print(\"selecting adapter ...\")\n con.sendline(\"select \" + adapter_address.upper())\n\n #check to see if already paired\n print(\"checking if bond exists already ...\")\n no_bond=False\n try:\n con.sendline(\"paired-devices\")\n con.expect(device_address.upper(), timeout=1)\n except(pexpect.TIMEOUT):\n no_bond = True\n else:\n print(\"bond already exists for %s\" % (device_address.upper()))\n print(\"successfully quiting bluetoothctl since bond is already formed\")\n con.sendline(\"quit\") \n return(0) \n \n con.sendline(\"select \" + adapter_address.upper())\n \n print(\"registering agent ...\")\n try:\n con.sendline(\"agent NoInputNoOutput\")\n con.expect(['Agent registered', 'Agent is already registered'], timeout=1)\n con.sendline(\"default-agent\")\n con.expect(\"Default agent request successful\", timeout=1)\n except(pexpect.TIMEOUT):\n print(\"unable to register agent\")\n return(1)\n\n print(\"enabling pairing ...\")\n try:\n con.sendline(\"pairable on\")\n con.expect(\"Changing pairable on succeeded\", timeout=1)\n except(pexpect.TIMEOUT):\n print(\"unable to turn pairing on\")\n return(1)\n\n print(\"starting scan ...\")\n try:\n con.sendline(\"scan on\")\n devfound = con.expect(device_address.upper(), timeout=5)\n if devfound == 0:\n try:\n con.sendline(\"scan off\")\n print (\"Found device. connecting to %s\" % (device_address.upper()))\n con.sendline(\"connect \" + device_address.upper())\n con.expect(\"Connection successful\", timeout=10)\n #sleep(10) #need extra time here to finish pairing\n except(pexpect.TIMEOUT):\n print(\"could not connect to %s\" % (device_address.upper()))\n return(1)\n try:\n #explicitly pair with the device\n con.sendline(\"pair \" + device_address.upper())\n con.expect(\"Pairing successful\", timeout=5)\n except(pexpect.TIMEOUT):\n print(\"pairing not successful\")\n try:\n con.sendline(\"info \" + device_address.upper()) \n con.expect(\"Paired: yes\", timeout=1)\n except(pexpect.TIMEOUT):\n print(\"could not pair with %s\" % (device_address.upper()))\n return(1)\n else:\n con.sendline(\"trust \" + device_address.upper())\n print(\"Connection and pairing successful!\")\n #try:\n #con.sendline(\"list-attributes\")\n #con.expect(\"6e400003-b5a3-f393-e0a9-e50e24dcca9e\", timeout=2)\n #print(con.before)\n #for line in con.before:\n # read_characteristics = line\n #print(read_characteristics)\n #except(pexpect.TIMEOUT):\n #print(\"could not list the attributes\")\n #return(1)\n try:\n print(\"disconnecting temporarily ...\")\n con.sendline(\"disconnect \" + device_address.upper())\n con.expect(\"Connected: no\", timeout=5)\n except(pexpect.TIMEOUT):\n print(\"could not disconnect.. \")\n con.sendline(\"quit\")\n return(1)\n else:\n print(\"successfully quiting bluetoothctl after forming bond\")\n con.sendline(\"quit\")\n return(0)\n except(pexpect.TIMEOUT):\n con.sendline(\"scan off\")\n print(\"unable to find device %s\" % (device_address))\n return(1)", "def bt_scan():\n print(\"Searching for nearby devices...\")\n explore_devices = []\n if explorepy._bt_interface == 'sdk':\n device_manager = explorepy.exploresdk.ExploreSDK_Create()\n nearby_devices = device_manager.PerformDeviceSearch()\n for bt_device in nearby_devices:\n if \"Explore\" in bt_device.name:\n print(\"Device found: %s - %s\" % (bt_device.name, bt_device.address))\n explore_devices.append((bt_device.name, bt_device.address))\n else:\n import bluetooth\n nearby_devices = bluetooth.discover_devices(lookup_names=True)\n for address, name in nearby_devices:\n if \"Explore\" in name:\n print(\"Device found: %s - %s\" % (name, address))\n explore_devices.append((address, name))\n\n if not nearby_devices:\n print(\"No Devices found\")\n\n return explore_devices", "def getDevices():\n \n scannedDevices = list()\n \n proc = subprocess.Popen('bluetoothctl scan on', shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=8192, universal_newlines=True)\n \n time.sleep(10)\n \n proc.stdin.write('scan off')\n \n try:\n stdout, stderr = proc.communicate()\n except subprocess.TimeoutExpired:\n proc.kill()\n stdout, stderr = proc.communicate()\n\n ansiEscapePattern = re.compile(r'\\x1B[@-_][0-?]*[ -/]*[@-~]')\n stdout = ansiEscapePattern.sub('', stdout)\n \n #deviceNamePattern = re.compile('^\\[NEW\\] Device [A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2}:[A-F0-9]{2} ')\n \n for line in stdout.split('\\n'):\n if '[NEW] Device' in line:\n device = list()\n device.append(line[13:31])\n device.append(line[31:])\n scannedDevices.append(device)\n \n return scannedDevices", "def __init_bluetooth_if_required(self):\n\n if self.__bluetooth_adapter and ((self.__bluetooth_adapter._running and not self.__bluetooth_adapter._running.is_set()) or not self.__bluetooth_adapter._running):\n try:\n self.__bluetooth_adapter.start()\n return self.__bluetooth_adapter._running.is_set()\n except Exception:\n self.on_error(self, None, 'Unable to initialize Bluetooth adapter', traceback.format_exc())\n return False\n if self.__bluetooth_adapter and self.__bluetooth_adapter._running:\n return self.__bluetooth_adapter._running.is_set()\n else:\n return False", "def start(self):\n print \"Starting BluetoothController\"\n self.request_handler = RequestHandler(self.request_queue, self.send_queue,\n self.camera, self.interval, self.pulse_length)\n self.request_handler.register_stop(self.stop)\n self.request_handler.start()\n #self.request_handler.start_timelapse()\n while not self.finished:\n try:\n self.conn, self.addr, self.sock = find_connections()\n self.conn.settimeout(1)\n self.send_receive()\n except KeyboardInterrupt:\n print \"Keyboard interrupt, stopping\"\n self.stop()\n except socket_mod.error, e:\n print \"Yelp! stopping\"\n print e\n self.close_conn()\n\n print \"BluetoothController closing\"", "def start_thread(addr, callback, threshold=THRESHOLD, sleep=SLEEP,\n daily=DAILY, debug=DEBUG):\n thread = threading.Thread(\n target=bluetooth_listen,\n args=(),\n kwargs={\n 'addr': addr,\n 'threshold': threshold,\n 'callback': callback,\n 'sleep': sleep,\n 'daily': daily,\n 'debug': debug\n }\n )\n # Daemonize\n thread.daemon = True\n # Start the thread\n thread.start()\n return thread", "def start(self):\n # Fetch the XBee Manager name from the Settings Manager:\n xbee_manager_name = SettingsBase.get_setting(self, \"xbee_device_manager\")\n dm = self.__core.get_service(\"device_driver_manager\")\n self.__xbee_manager = dm.instance_get(xbee_manager_name)\n\n # Register ourselves with the XBee Device Manager instance:\n self.__xbee_manager.xbee_device_register(self)\n\n # Get the extended address of the device:\n extended_address = SettingsBase.get_setting(self, \"extended_address\")\n\n # Create a callback specification for our device address, endpoint\n # Digi XBee profile and sample cluster id:\n xbdm_rx_event_spec = XBeeDeviceManagerRxEventSpec()\n xbdm_rx_event_spec.cb_set(self._sample_indication)\n xbdm_rx_event_spec.match_spec_set(\n (extended_address, 0xe8, 0xc105, 0x92),\n (True, True, True, True))\n self.__xbee_manager.xbee_device_event_spec_add(self,\n xbdm_rx_event_spec)\n\n # Create a DDO configuration block for this device:\n xbee_ddo_cfg = XBeeConfigBlockDDO(extended_address)\n\n # Get the gateway's extended address:\n gw_xbee_sh, gw_xbee_sl = gw_extended_address_tuple()\n\n # Set the destination for I/O samples to be the gateway:\n xbee_ddo_cfg.add_parameter('DH', gw_xbee_sh)\n xbee_ddo_cfg.add_parameter('DL', gw_xbee_sl)\n \n #\"\"\" IF YOUR XBEE DEVICE DON'N SLEEP AND YOU SEND DATA FROM XBEE DEVICE TO ConnectPort X manually then uncoment the start of that line.\n # Configure the IO Sample Rate:\n # Clip sample_rate_ms to the max value of IR:\n sample_rate_ms = SettingsBase.get_setting(self, \"sample_rate_ms\")\n sample_rate_ms = min(sample_rate_ms, 0xffff)\n xbee_ddo_cfg.add_parameter('IR', sample_rate_ms)\n\n # Register this configuration block with the XBee Device Manager:\n self.__xbee_manager.xbee_device_config_block_add(self, xbee_ddo_cfg)\n\n # Setup the sleep parameters on this device:\n will_sleep = SettingsBase.get_setting(self, \"sleep\")\n sample_predelay = SettingsBase.get_setting(self, \"sample_predelay\")\n awake_time_ms = (SettingsBase.get_setting(self, \"awake_time_ms\") +\n sample_predelay)\n \n if will_sleep:\n # Sample time pre-delay, allow the circuitry to power up and\n # settle before we allow the XBee to send us a sample: \n xbee_ddo_wh_block = XBeeConfigBlockDDO(extended_address)\n xbee_ddo_wh_block.apply_only_to_modules((MOD_XB_ZB, MOD_XB_S2C_ZB,))\n xbee_ddo_wh_block.add_parameter('WH', sample_predelay)\n self.__xbee_manager.xbee_device_config_block_add(self,\n xbee_ddo_wh_block)\n\n # The original sample rate is used as the sleep rate:\n sleep_rate_ms = SettingsBase.get_setting(self, \"sample_rate_ms\")\n xbee_sleep_cfg = XBeeConfigBlockSleep(extended_address)\n if will_sleep:\n xbee_sleep_cfg.sleep_cycle_set(awake_time_ms, sleep_rate_ms)\n else:\n xbee_sleep_cfg.sleep_mode_set(SM_DISABLED)\n self.__xbee_manager.xbee_device_config_block_add(self, xbee_sleep_cfg)\n #\"\"\"\n # Register this configuration block with the XBee Device Manager:\n self.__xbee_manager.xbee_device_config_block_add(self, xbee_ddo_cfg)\n\n # Indicate that we have no more configuration to add:\n self.__xbee_manager.xbee_device_configure(self)\n \n #threading.Thread.start(self)\n \n return True", "def _device_discovered(self, device: Device) -> None:\n if device.action != \"add\":\n return\n _LOGGER.debug(\n \"Discovered Device at path: %s, triggering scan serial\",\n device.device_path,\n )\n self.hass.create_task(self._async_scan())", "def connect(self,address):\n\t\tself._sock = BluetoothSocket(RFCOMM)\n\t\tself._sock.connect((address,1))\n\t\t#If the socket connect fails, it will raise an error and terminate\n\t\tself._connected = True" ]
[ "0.66967946", "0.6670762", "0.6263368", "0.618207", "0.61527777", "0.6101039", "0.59816295", "0.5966792", "0.5943673", "0.5851195", "0.583917", "0.5830259", "0.57935345", "0.5790345", "0.5767885", "0.56893414", "0.5688346", "0.5644812", "0.56335336", "0.5622003", "0.56168526", "0.55882585", "0.55780655", "0.5550642", "0.5543339", "0.55223054", "0.5515036", "0.5509457", "0.55051744", "0.55041426" ]
0.72633237
0
Stop Bluetooth discovery on phone
def bt_stop_discovery(self): is_stop_discovery = False try: is_bluetooth_off = self.bt_radio('off') if is_bluetooth_off: logger.debug("Bluetooth discovery Stoped {}".format( self.phone_info.bluetooth_name)) is_stop_discovery = True else: logger.debug("Bluetooth discovery completed {}".format( self.phone_info.bluetooth_name)) is_stop_discovery = False except Exception as e: logger.error("Turn OFF Bluetooth Button is not Visible") logger.error(repr(e)) return is_stop_discovery
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def stop(self):\n # Unregister ourselves with the XBee Device Manager instance:\n self.__xbee_manager.xbee_device_unregister(self)\n return True", "def stopAll():\n \n # Get paired and connected devices\n pairedDevices, connectedDevices = getPairConDevices()\n \n print('\\nStoping bluetooth profiles\\n')\n \n if connectedDevices:\n \n # Power on bluetooth controller\n bluePoweronStdout = execCommand('bluetoothctl power on')\n \n for device in connectedDevices:\n\n # Disconnect bluetooth device\n blueDisconnectStdout = execCommand('bluetoothctl disconnect {}'.format(device[1]))\n\n if not 'Successful disconnected' in blueDisconnectStdout:\n print(blueDisconnectStdout)\n print('Is device connected?\\n')\n else:\n print('Device {} was successfully stopped\\n'.format(device[0]))\n \n return", "def disconnect(self):\n if self._bluetooth_interface:\n self._bluetooth_interface.disconnect()", "async def callback_homeassistant_stop(self, event: \"Event\") -> NoReturn:\n _LOGGER.debug(\"Hekr system is shutting down\")\n for device_id, device in self.devices.items():\n connector = device.connector\n listener = connector.listener\n if listener is not None and listener.is_running:\n _LOGGER.debug('Shutting down listener for device ID \"%s\"' % device_id)\n listener.stop()\n\n if connector.is_connected:\n _LOGGER.debug('Shutting down connector for device ID \"%s\"' % device_id)\n await connector.close_connection()", "async def stop(self) -> None:\n for broadcast_port in self._broadcast_ports:\n transport = self._transports.get(broadcast_port)\n\n if transport and not transport.is_closing():\n logger.info(\"stopping the udp bridge on port %s\", broadcast_port)\n transport.close()\n else:\n logger.info(\"udp bridge on port %s not started\", broadcast_port)\n\n self._is_running = False", "def stop_advertising(self):\n self._periph.stop_advertising()", "def cleanup():\n broadcast_proc.terminate()\n subprocess.call('sudo hciconfig hci0 noleadv', shell=True)\n if CELL:\n ser_command('Cell off', cell_ser)\n cell_ser.close()\n grovepi.digitalWrite(LED, 0)", "def phone_kill(self) -> None:", "def _plugin_stop(handle):\n GPIO.cleanup()\n _LOGGER.info('MAX31865 (async) Disconnected.')", "def device_disconnect(self):\n pass", "def stop_scanner():\n\n if 'SCAN' not in autorx.task_list:\n # No scanner thread running!\n # This means we likely have a SDR free already.\n return\n else:\n logging.info(\"Halting Scanner to decode detected radiosonde.\")\n _scan_sdr = autorx.task_list['SCAN']['device_idx']\n # Stop the scanner.\n autorx.task_list['SCAN']['task'].stop()\n # Relase the SDR.\n autorx.sdr_list[_scan_sdr]['in_use'] = False\n autorx.sdr_list[_scan_sdr]['task'] = None\n # Remove the scanner task from the task list\n autorx.task_list.pop('SCAN')", "def stop_device(self):\n\n self.state = 'stopped'", "def close(self):\n self.controller.DisableDevice()\n self.controller.StopPolling()\n self.controller.Disconnect(False)", "def stop(self):\n self.stopped = True\n self.broadcast('host down')", "def stop_alarm_ringtone(self):\n mixer.stop()\n mixer.quit()", "def stop_notify(self, bus):\n chrc = bus.get(BLUEZ_SVC_NAME, self.chrc_path)\n try:\n chrc.StopNotify()\n except Exception as err:\n print(\"Unable to stop notifying\")", "def stop(self):\n self.running = False\n self.hop_channel(\"auto\")", "def deactivate():\n deactivate_connection_with_mainloop(get_uuid())", "def disconnect(self):\n self.arduino.close()\n self.arduino = None", "def stopcomm(self,handle):\r\n # Convert handle back to tuple, unpack it\r\n (ip, port) = deserialize(handle)\r\n \r\n # Deletes the callback function on the specified port\r\n if ip in self.callbackFunction and port in self.callbackFunction[ip]:\r\n del self.callbackFunction[ip][port]", "def on_disconnect(self, butterfly: Butterfly):\n s = \"{}:{}\".format(butterfly.ip, butterfly.client_port)\n if s in self.butterflies:\n bf = self.butterflies.pop(s)\n # These are here by default - don't call super() if you modify the butterfly dict!\n assert isinstance(bf, tuple)\n assert len(bf) == 2\n bf[1].cancel()", "def _async_device_unavailable(\n _service_info: bluetooth.BluetoothServiceInfoBleak,\n ) -> None:\n push_lock.reset_advertisement_state()", "def _stop(self):\n def process_response(future):\n response = future.result()\n self._window.qtlog.append(response.ErrorResponse.Name(response.error_response)[14:])\n self.scanning = False\n self._window.qtlog.append(\"Scanner Stop\")\n\n response_future = self.client.StopScan.future(scanner_commands_pb2.ScannerRequest(request=1))\n response_future.add_done_callback(process_response)", "def _telegram_stop_callback(self, update: Update, _: CallbackContext):\n\n rospy.loginfo(\"Stopping Telegram ROS bridge for chat id {}\".format(self._telegram_chat_id))\n update.message.reply_text(\n \"Disconnecting chat_id {}. So long and thanks for all the fish!\"\n \" Type /start to reconnect\".format(self._telegram_chat_id)\n )\n self._telegram_chat_id = None", "def bt_disconnect(self, device_name_to_disconnect):\n is_bluetooth_disconnect = False\n try:\n is_device_connected = self.bt_is_connected_to(\n device_name_to_disconnect)\n if is_device_connected is False:\n return True\n\n logger.debug('Disconnect Bluetooth device')\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 10)\n connected_device_list = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 1)\n\n for index in range(len(connected_device_list)):\n if self.is_same_bluetooth_name(connected_device_list[index],\n device_name_to_disconnect):\n if self.phone_info.phone_type == PhoneType.IOS \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n # more_info = self.find_elements(\n # self.driver.appium_driver,\n # self.bluetooth_device_setting_button, 0)\n # more_info[index].click()\n connected_device_list[index].click()\n break\n else:\n connected_device_list[index].click()\n break\n\n self._disconnect_bluetooth_device_from_ios_device()\n if self.phone_info.phone_type == PhoneType.ANDROID:\n try:\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n 'self.android_locators.BLUETOOTH_DISCONNECT_POP_UP_OK_BUTTON_ByXPATH',\n 10)\n self.find_element(self.driver.appium_driver,\n 'self.android_locators.BLUETOOTH_DISCONNECT_POP_UP_OK_BUTTON_ByXPATH',\n 0).click()\n except:\n logger.debug(\n \"Disconnect popup button is currently not available\")\n\n is_bluetooth_disconnect = True\n\n except Exception as e:\n logger.warning(\"Need to attempt connect before disconnect\")\n logger.warning(repr(e))\n self._go_to_connected_device_screen(no_of_back_click=1)\n return is_bluetooth_disconnect", "def stop_listening(self) -> None:\n assert self.client_recv\n self.client_recv = None", "def _stop_device(self):\r\n with self._driver_lock:\r\n m = self._lib.ps2000aStop(self._handle)\r\n check_result(m)", "def close_UDP_connection(self):\n self.beacon.stop_beaconing()", "def notification_stop(self):\n ret = gattlib_notification_stop(self.connection, self._gattlib_characteristic.uuid)\n handle_return(ret)", "def disable_discovery(self):" ]
[ "0.67808104", "0.6609006", "0.64793354", "0.64446414", "0.63968825", "0.6179201", "0.6133089", "0.6108425", "0.6105471", "0.6102414", "0.6078803", "0.6070525", "0.6045644", "0.60309994", "0.6012417", "0.6008918", "0.59882665", "0.5976908", "0.5957209", "0.59390825", "0.5927639", "0.59192735", "0.58925277", "0.5879748", "0.58661747", "0.58543587", "0.58376217", "0.5831113", "0.58112884", "0.58053863" ]
0.7281296
0
Tests if phone is connected to a specific bluetooth device
def bt_is_connected_to(self, blutooth_connected_device_name): bt_is_connected = False logger.debug("Checking if Phone is connected to {!r}".format( blutooth_connected_device_name)) try: bt_is_connected = self.bt_is_connected() logger.debug("Is the phone connected to something? {!r}".format( bt_is_connected)) if bt_is_connected is False: return False self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_connected_device_list, 10) connected_device_list = self.find_elements( self.driver.appium_driver, self.bluetooth_connected_device_list, 1) if len(connected_device_list) > 0: for index in range(len(connected_device_list)): logger.debug( "Checking if {!r} is the device in question" " (i.e. {!r})".format(connected_device_list[index].text, blutooth_connected_device_name)) if self.is_same_bluetooth_name(connected_device_list[index], blutooth_connected_device_name): logger.info("Phone is connected to the target device --- {}".format( blutooth_connected_device_name)) return True except Exception as e: logger.warning( "Need to attempt connect before bt_is_connected_to.") logger.warning(repr(e)) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bt_is_connected(self):\n try:\n is_bluetooth_on = self.bt_enabled()\n\n # if bluetooth is OFF then throw Exception\n if not is_bluetooth_on:\n logger.error(\"The bluetooth is disabled on {}\".format(self.phone_info.bluetooth_name))\n\n self.bt_radio(\"on\")\n # return False\n # sys.exit(0)\n\n # displays all paired devices\n self.show_more_for_paired_devices()\n\n connected_devices = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_indicator, 0)\n\n time.sleep(1)\n if len(connected_devices) > 0:\n logger.debug(\n \"phone {} is connected with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n return True\n except Exception as e:\n logger.warning(\n \"Need to attempt connect before checking connection status.\")\n\n logger.warning(repr(e))\n # raise\n return False", "def is_paired(self, phone):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n return bt_util.android_device_in_paired_state(phone, target_addr)", "def bt_connect_and_check(self, bluetooth_device_name_to_connect):\n is_bt_connect_and_check = False\n try:\n logger.info(\"Check if bluetooth has already connected to DUT devices\")\n is_bt_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n\n if is_bt_already_connected:\n is_bt_connect_and_check = True\n else:\n logger.info(\"Start trying to connect to DUT devices\")\n self.bt_connect(bluetooth_device_name_to_connect)\n is_bt_connect_and_check = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n except Exception as e:\n logger.error(\"Need to turn on bluetooth and DUT devices\")\n logger.error(repr(e))\n return is_bt_connect_and_check", "def is_connected(cls, device_config):\n if \"console_port_name\" in device_config[\"persistent\"]:\n address = device_config[\"persistent\"][\"console_port_name\"]\n else:\n address = device_config[\"persistent\"][\"hub_port_name\"]\n return os.path.exists(address)", "def bt_start_discovery(self):\n is_start_discovery = False\n try:\n is_bluetooth_on = self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n is_bluetooth_on = True\n if is_bluetooth_on:\n logger.debug(\"Bluetooth discovery Stared on {}\".format(\n self.phone_info.bluetooth_name))\n is_start_discovery = True\n else:\n logger.debug(\"Bluetooth discovery not Stared on {}\".format(\n self.phone_info.bluetooth_name))\n except Exception as e:\n logger.error(\"Trun on Bluetooth Button is not Visible\")\n logger.error(repr(e))\n return is_start_discovery", "def bt_is_paired(self):\n is_paired = False\n try:\n self.show_more_for_paired_devices()\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 5)\n pair_device_list = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n logger.debug('Checks if the phone is paired with the any devices')\n if len(pair_device_list) > 0:\n if pair_device_list[0].text.upper() == \"PAIR NEW DEVICE\":\n return False\n\n logger.debug(\n \"phone {} paired with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n is_paired = True\n\n except Exception as e:\n logger.warning(\"Need to attempt pair before is_paired\")\n return is_paired", "def connect_magic():\n nearby_devices = bluetooth.discover_devices(lookup_names = True, duration=5)\n\n for addr, name in nearby_devices:\n print(name)\n if name == \"MindWave Mobile\":\n print \"found\"\n return (connect_bluetooth_addr(addr), addr)\n return (None, \"\")", "def is_device_connected(device_id):\n try:\n device_name = subprocess.check_output([ADB_EXECUTOR, '-s', device_id, 'shell', 'getprop', 'ro.product.model'])\n device_name = device_name.decode(DEFAULT_CHARSET).replace('\\n', '').replace('\\r', '')\n logger.info('device {} online'.format(device_name))\n except subprocess.CalledProcessError:\n return False\n return True", "def is_connected(self) -> bool:\n return self.arduino is not None", "def ble_device_matches(\n matcher: BluetoothCallbackMatcher | BluetoothMatcher,\n service_info: BluetoothServiceInfoBleak,\n) -> bool:\n device = service_info.device\n if (address := matcher.get(ADDRESS)) is not None and device.address != address:\n return False\n\n if matcher.get(CONNECTABLE, True) and not service_info.connectable:\n return False\n\n advertisement_data = service_info.advertisement\n if (\n service_uuid := matcher.get(SERVICE_UUID)\n ) is not None and service_uuid not in advertisement_data.service_uuids:\n return False\n\n if (\n service_data_uuid := matcher.get(SERVICE_DATA_UUID)\n ) is not None and service_data_uuid not in advertisement_data.service_data:\n return False\n\n if (\n manfacturer_id := matcher.get(MANUFACTURER_ID)\n ) is not None and manfacturer_id not in advertisement_data.manufacturer_data:\n return False\n\n if (manufacturer_data_start := matcher.get(MANUFACTURER_DATA_START)) is not None:\n manufacturer_data_start_bytes = bytearray(manufacturer_data_start)\n if not any(\n manufacturer_data.startswith(manufacturer_data_start_bytes)\n for manufacturer_data in advertisement_data.manufacturer_data.values()\n ):\n return False\n\n if (local_name := matcher.get(LOCAL_NAME)) is not None and (\n (device_name := advertisement_data.local_name or device.name) is None\n or not _memorized_fnmatch(\n device_name,\n local_name,\n )\n ):\n return False\n\n return True", "def is_connected():\n sta_if = network.WLAN(network.STA_IF)\n return sta_if.isconnected()", "def is_connected(self):\n try:\n if PY3:\n self.__gen_serial_id()\n cmnd = \"#{} {}\".format(self.serial_id, protocol.GET_FIRMWARE_VERSION)\n cmndString = bytes(cmnd + \"\\n\", encoding='ascii')\n self.__serial.write(cmndString)\n response = str(self.__serial.readline(),encoding='ascii')\n else:\n self.__gen_serial_id()\n cmnd = \"#{} {}\".format(self.serial_id, protocol.GET_FIRMWARE_VERSION)\n cmndString = bytes(cmnd + \"\\n\")\n self.__serial.write(cmndString)\n response = self.__serial.readline()\n except serial.serialutil.SerialException:\n self.__isConnected = False\n if self.__serial.isOpen() and self.__isConnected:\n return True\n else:\n return False", "def test_gatt_connect_get_connected_devices(self):\n gatt_server_cb = self.per_ad.droid.gattServerCreateGattServerCallback()\n gatt_server = self.per_ad.droid.gattServerOpenGattServer(\n gatt_server_cb)\n self.gatt_server_list.append(gatt_server)\n try:\n bluetooth_gatt, gatt_callback, adv_callback = (\n orchestrate_gatt_connection(self.cen_ad, self.per_ad))\n self.bluetooth_gatt_list.append(bluetooth_gatt)\n except GattTestUtilsError as err:\n self.log.error(err)\n return False\n conn_cen_devices = self.cen_ad.droid.bluetoothGetConnectedLeDevices(\n bt_profile_constants['gatt'])\n conn_per_devices = self.per_ad.droid.bluetoothGetConnectedLeDevices(\n bt_profile_constants['gatt_server'])\n target_name = self.per_ad.droid.bluetoothGetLocalName()\n error_message = (\"Connected device {} not found in list of connected \"\n \"devices {}\")\n if not any(d['name'] == target_name for d in conn_cen_devices):\n self.log.error(error_message.format(target_name, conn_cen_devices))\n return False\n # For the GATT server only check the size of the list since\n # it may or may not include the device name.\n target_name = self.cen_ad.droid.bluetoothGetLocalName()\n if not conn_per_devices:\n self.log.error(error_message.format(target_name, conn_per_devices))\n return False\n self.adv_instances.append(adv_callback)\n return self._orchestrate_gatt_disconnection(bluetooth_gatt,\n gatt_callback)", "def is_connected_drm():\n drm_status = xbee.atcmd(AT_CMD_DI)\n if drm_status is None or drm_status not in drm_status_connected:\n return False\n return True", "def bt_try_connect(self, bluetooth_device_name_to_connect,\n contact_sharing=False): # TODO: Need to update to\n # use the new/refactored bt_connect() design from above.\n is_bluetooth_connect = False\n try:\n is_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n if is_already_connected is True:\n is_bluetooth_connect = True\n else:\n is_bt_paired = self.bt_is_paired_to(\n bluetooth_device_name_to_connect)\n if contact_sharing:\n if is_bt_paired:\n if self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.previously_paired_device_button, 5)\n self.find_element(self.driver.appium_driver,\n self.previously_paired_device_button,\n 0).click()\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver, self.paired_device_list,\n 10)\n pair_element = self.find_elements(\n self.driver.appium_driver, self.paired_device_list,\n 1)\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n bluetooth_device_name_to_connect):\n pair_element[index].click()\n # self._bt_swipe_and_connect(pair_element,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n is_bluetooth_connect = True\n self._go_to_connected_device_screen(\n no_of_back_click=1)\n return is_bluetooth_connect\n else:\n if is_bt_paired:\n self.bt_unpair(bluetooth_device_name_to_connect)\n self.bt_radio('off')\n self.bt_radio('on')\n\n try:\n if '8.1' in self.phone_info.os_version or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n sleep(10)\n except:\n logger.debug(\"Pair new device option is not available\")\n is_device_found = False\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 5)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 1)\n # Wait till bluetooth device found in list and click when it\n # is visible in list\n for retry in range(1):\n if retry == 0:\n for index in range(len(element_list)):\n element_text = element_list[index].text\n # For some reason my iPhone 6 (iOS 11.1.1) is\n # getting stuck here because one of the\n # element's text is None.\n # So adding bit to ignore that.\n if type(element_text) is not str:\n logger.warn(\n \"Found pairing list element's text was \"\n \"None! Ignoring for now.\")\n continue\n if self.is_same_bluetooth_name(element_list[index],\n bluetooth_device_name_to_connect):\n element_list[index].click()\n # self._bt_swipe_and_connect(element_list,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n logger.debug(\"Connecting to \" +\n bluetooth_device_name_to_connect)\n is_device_found = True\n # NOTE: Removed a bunch of stuff after this...\n break\n else:\n is_device_found = self._bt_retry_to_connect(\n bluetooth_device_name_to_connect)\n if is_device_found == False:\n if '8.1' in self.phone_info.os_version \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.bt_radio('off')\n self.bt_radio('on')\n self.perform_bottom_to_up_swipe(\n self.driver.appium_driver)\n logger.debug(\"Retries count : \" + str(retry))\n sleep(1)\n else:\n # The below can become strangely slow (take ~12\n # seconds) randomly, so skipping it...\n # is_bt_button_visible = self.__verify_current_screen()\n # logger.debug(\"The BT button is visible? {\n # }\".format(is_bt_button_visible))\n # if not is_bt_button_visible:\n # self.__retry_to_bt_connect(\n # bluetooth_device_name_to_connect)\n break\n if is_device_found:\n is_bluetooth_connect = True\n else:\n self.take_screenshot(self.driver.appium_driver,\n 'bt_connect')\n logger.error(\"Not connecting to given mobile Device\")\n except Exception as e:\n if '8.1' in self.phone_info.os_version or \\\n self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.take_screenshot(self.driver.appium_driver, 'bt_connect')\n logger.error(\n \"Connection is not successfully with bluetooth device\")\n logger.error(repr(e))\n return is_bluetooth_connect", "def is_connected_to_device(self):\n is_connected_to_device = ctypes.c_bool()\n\n result = self._lib.NRFJPROG_is_connected_to_device(ctypes.byref(is_connected_to_device))\n if result != NrfjprogdllErr.SUCCESS:\n raise APIError(result)\n\n return is_connected_to_device.value", "async def connected(self) -> bool:\n args = ['-t', f\"DEVICE INFO,{self.conf['device_address']}\"]\n output = await self.run_vh(args)\n return \"IN USE BY: NO ONE\" not in output", "def check_connectivity(self):\n r = self.run_cmd(\"get-state\")\n return r.startswith(\"device\")", "def is_connected_drm():\n return get_runt(\"drm.connected\") == \"true\"", "def bt_is_not_connected_to(self, bluetooth_not_connected_device_name):\n try:\n is_bt_connected_to_device = self.bt_is_connected_to(\n bluetooth_not_connected_device_name)\n if not is_bt_connected_to_device:\n logger.debug(\n 'Bluetooth is not connected with given device {}'.format(\n bluetooth_not_connected_device_name))\n return True\n\n logger.debug('Bluetooth is connected with given device {}'.format(\n bluetooth_not_connected_device_name))\n except Exception as e:\n logger.warning(\n \"Perform unpair/disconnect before performing \"\n \"bt_is_not_connected_to \")\n logger.warning(repr(e))\n return False", "def discover(bt_addr):\n print \"performing inquiry...\"\n nearby_devices = bluetooth.discover_devices(lookup_names = True)\n print \"Found %d devices\" % len(nearby_devices)\n \n for addr, name in neaby_devices:\n print \" %s - %s\" % (addr, name)", "def connectPhone(self):\n value = os.popen(self.checkPhone)\n\n for data in value.readline():\n sDate = str(data)\n if sDate.find(\"device\"):\n return True\n return False", "def bt_is_paired_to(self, paired_bluetooth_device):\n is_paired_with_device = False\n try:\n bt_is_paired = self.bt_is_paired()\n if not bt_is_paired:\n return is_paired_with_device\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 10)\n pair_element = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n paired_bluetooth_device):\n is_paired_with_device = True\n break\n except Exception as e:\n logger.warning(\"Need to attempt pair or unpair before is_paired.\")\n logger.warning(repr(e))\n # raise\n return is_paired_with_device", "def is_dialing(self) -> bool:", "def isConnected():", "def pair(self, phone, companion_app=True):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n if bt_util.android_device_in_connected_state(phone, target_addr):\n self.logger.info('Already paired and connected, skipping pairing.')\n else:\n if bt_util.android_device_in_paired_state(phone, target_addr):\n self.logger.info(\n 'Device is paired but not connected, unpair first.')\n if not bt_util.bt_unpair(phone, self.dut):\n raise TestActsError('Unable to unpair the device')\n bt_util.bt_pair_and_connect(phone, self.dut)\n self.logger.info('DEVICE PAIRED')\n if companion_app:\n profiles = PROFILES_CONNECTED.copy()\n profiles.update(COMP_PROFILE_CONNECTED)\n else:\n profiles = PROFILES_CONNECTED\n self.logger.info(profiles)\n if not bt_util.check_device_bt(device=self.dut, profiles=profiles):\n raise TestActsError('Dut BT status check failed.')\n else:\n return True", "def bt_connect(self, bluetooth_device_name_to_connect, perform_unpair=True,\n no_of_attempt=1, enable_ga=False):\n try:\n is_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n if is_already_connected is True:\n return True\n\n if not perform_unpair:\n return self.connect_paired_device(\n bluetooth_device_name_to_connect)\n\n return self._connect_bluetooth_device(\n bluetooth_device_name_to_connect, no_of_attempt, enable_ga)\n\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver, 'bt_connect')\n if '8.1' in self.phone_info.os_version or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n logger.error(\"Connection failed {} with bluetooth device\".format(\n bluetooth_device_name_to_connect))\n logger.error(repr(e))\n return False", "def check_chip_ble_devices_advertising(devCtrl, name, deviceDetails=None):\n ble_chip_device = scan_chip_ble_devices(devCtrl)\n if ble_chip_device is None or len(ble_chip_device) == 0:\n log.info(\"No BLE CHIP device found\")\n return False\n\n chip_device_found = False\n\n for ble_device in ble_chip_device:\n if deviceDetails is not None:\n if (ble_device[\"name\"] == name and\n int(ble_device[\"discriminator\"]) == int(deviceDetails[\"Discriminator\"]) and\n int(ble_device[\"vendorId\"]) == int(deviceDetails[\"VendorID\"]) and\n int(ble_device[\"productId\"]) == int(deviceDetails[\"ProductID\"])):\n chip_device_found = True\n break\n else:\n if (ble_device[\"name\"] == name):\n chip_device_found = True\n break\n\n return chip_device_found", "def detect():\n try:\n s = serial.Serial(port = 0, baudrate = 19200, parity = 'O', timeout=1)\n except Exception, e:\n log = logging.getLogger('root')\n log.exception(e)\n return False\n else:\n return True\n finally:\n s.close()", "def bt_get_discovered_devices(self):\n discovered_bluetooth_device = []\n try:\n self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n time.sleep(10)\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 10)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_discovered_device_list,\n 1)\n\n # To add connected bluetooth device name in list\n for index in range(len(element_list)):\n discovered_bluetooth_device.append(\n str(element_list[index].text.replace('\\u200e', '')))\n logger.debug(\"List of Discovered Devices:\" + str(\n discovered_bluetooth_device))\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver,\n '__retry_to_bt_connect')\n logger.error(\"No device are discoverable .\")\n logger.error(repr(e))\n return discovered_bluetooth_device" ]
[ "0.76891696", "0.7073294", "0.6921121", "0.6748067", "0.66977954", "0.6673443", "0.66152287", "0.6541235", "0.6521378", "0.6482565", "0.6402953", "0.64011633", "0.6394357", "0.6369237", "0.6361586", "0.6347135", "0.63391274", "0.63338757", "0.629647", "0.6224777", "0.6150923", "0.6148049", "0.6143567", "0.6128847", "0.61284167", "0.6125405", "0.609876", "0.60787326", "0.6033775", "0.6002213" ]
0.75018513
1
Tests if phone is not connected to a specific bluetooth device
def bt_is_not_connected_to(self, bluetooth_not_connected_device_name): try: is_bt_connected_to_device = self.bt_is_connected_to( bluetooth_not_connected_device_name) if not is_bt_connected_to_device: logger.debug( 'Bluetooth is not connected with given device {}'.format( bluetooth_not_connected_device_name)) return True logger.debug('Bluetooth is connected with given device {}'.format( bluetooth_not_connected_device_name)) except Exception as e: logger.warning( "Perform unpair/disconnect before performing " "bt_is_not_connected_to ") logger.warning(repr(e)) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def bt_is_connected(self):\n try:\n is_bluetooth_on = self.bt_enabled()\n\n # if bluetooth is OFF then throw Exception\n if not is_bluetooth_on:\n logger.error(\"The bluetooth is disabled on {}\".format(self.phone_info.bluetooth_name))\n\n self.bt_radio(\"on\")\n # return False\n # sys.exit(0)\n\n # displays all paired devices\n self.show_more_for_paired_devices()\n\n connected_devices = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_indicator, 0)\n\n time.sleep(1)\n if len(connected_devices) > 0:\n logger.debug(\n \"phone {} is connected with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n return True\n except Exception as e:\n logger.warning(\n \"Need to attempt connect before checking connection status.\")\n\n logger.warning(repr(e))\n # raise\n return False", "def is_connected(self) -> bool:\n return self.arduino is not None", "def usb_connectivity_disabled(self):\n return self._usb_connectivity_disabled", "def test_verify_state_of_a_device_when_disconnected_from_the_device():", "def bt_stop_discovery(self):\n is_stop_discovery = False\n try:\n is_bluetooth_off = self.bt_radio('off')\n if is_bluetooth_off:\n logger.debug(\"Bluetooth discovery Stoped {}\".format(\n self.phone_info.bluetooth_name))\n is_stop_discovery = True\n else:\n logger.debug(\"Bluetooth discovery completed {}\".format(\n self.phone_info.bluetooth_name))\n is_stop_discovery = False\n except Exception as e:\n logger.error(\"Turn OFF Bluetooth Button is not Visible\")\n logger.error(repr(e))\n return is_stop_discovery", "def is_connected_drm():\n drm_status = xbee.atcmd(AT_CMD_DI)\n if drm_status is None or drm_status not in drm_status_connected:\n return False\n return True", "def the_user_should_not_be_able_to_get_the_state_of_the_device():\n print(\"Trying to get status with device disconnected\")\n bln_result1 = web_app.get_state()\n assert(bln_result1, False)", "def is_paired(self, phone):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n return bt_util.android_device_in_paired_state(phone, target_addr)", "def the_current_device_is_disconnected():\n assert web_app.disconnect_from_device()", "def wait_for_bluetooth_disconnection(self, timeout=60):\n result = True\n apollo_status = self.dut.get_bt_status()\n self.logger.info('Waiting for the disconnection.')\n time.sleep(1)\n ini_time = time.time()\n while len(apollo_status) != len(\n [s for s in apollo_status.values() if s == 'FALSE']):\n apollo_status = self.dut.get_bt_status()\n if (time.time() - ini_time) > timeout:\n self.logger.warning('Timeout waiting for the disconnection.')\n return False\n time.sleep(1)\n return result", "def bt_is_connected_to(self, blutooth_connected_device_name):\n bt_is_connected = False\n logger.debug(\"Checking if Phone is connected to {!r}\".format(\n blutooth_connected_device_name))\n try:\n\n bt_is_connected = self.bt_is_connected()\n logger.debug(\"Is the phone connected to something? {!r}\".format(\n bt_is_connected))\n\n if bt_is_connected is False:\n return False\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 10)\n connected_device_list = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_device_list, 1)\n\n if len(connected_device_list) > 0:\n for index in range(len(connected_device_list)):\n logger.debug(\n \"Checking if {!r} is the device in question\"\n \" (i.e. {!r})\".format(connected_device_list[index].text,\n blutooth_connected_device_name))\n if self.is_same_bluetooth_name(connected_device_list[index],\n blutooth_connected_device_name):\n logger.info(\"Phone is connected to the target device --- {}\".format(\n blutooth_connected_device_name))\n return True\n except Exception as e:\n logger.warning(\n \"Need to attempt connect before bt_is_connected_to.\")\n logger.warning(repr(e))\n return False", "def bt_is_paired(self):\n is_paired = False\n try:\n self.show_more_for_paired_devices()\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.paired_device_list, 5)\n pair_device_list = self.find_elements(self.driver.appium_driver,\n self.paired_device_list, 0)\n\n logger.debug('Checks if the phone is paired with the any devices')\n if len(pair_device_list) > 0:\n if pair_device_list[0].text.upper() == \"PAIR NEW DEVICE\":\n return False\n\n logger.debug(\n \"phone {} paired with some bluetooth device\".format(\n self.phone_info.bluetooth_name))\n is_paired = True\n\n except Exception as e:\n logger.warning(\"Need to attempt pair before is_paired\")\n return is_paired", "def random_bluetooth(self, prob=0.5):\n update_execution_log([\"Smartphone Bluetooth Off\", self._id_phone])\n self._bluetooth = False if random.rand() < prob else True", "def bt_connect_and_check(self, bluetooth_device_name_to_connect):\n is_bt_connect_and_check = False\n try:\n logger.info(\"Check if bluetooth has already connected to DUT devices\")\n is_bt_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n\n if is_bt_already_connected:\n is_bt_connect_and_check = True\n else:\n logger.info(\"Start trying to connect to DUT devices\")\n self.bt_connect(bluetooth_device_name_to_connect)\n is_bt_connect_and_check = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n except Exception as e:\n logger.error(\"Need to turn on bluetooth and DUT devices\")\n logger.error(repr(e))\n return is_bt_connect_and_check", "def bt_disconnect(self, device_name_to_disconnect):\n is_bluetooth_disconnect = False\n try:\n is_device_connected = self.bt_is_connected_to(\n device_name_to_disconnect)\n if is_device_connected is False:\n return True\n\n logger.debug('Disconnect Bluetooth device')\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 10)\n connected_device_list = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 1)\n\n for index in range(len(connected_device_list)):\n if self.is_same_bluetooth_name(connected_device_list[index],\n device_name_to_disconnect):\n if self.phone_info.phone_type == PhoneType.IOS \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n # more_info = self.find_elements(\n # self.driver.appium_driver,\n # self.bluetooth_device_setting_button, 0)\n # more_info[index].click()\n connected_device_list[index].click()\n break\n else:\n connected_device_list[index].click()\n break\n\n self._disconnect_bluetooth_device_from_ios_device()\n if self.phone_info.phone_type == PhoneType.ANDROID:\n try:\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n 'self.android_locators.BLUETOOTH_DISCONNECT_POP_UP_OK_BUTTON_ByXPATH',\n 10)\n self.find_element(self.driver.appium_driver,\n 'self.android_locators.BLUETOOTH_DISCONNECT_POP_UP_OK_BUTTON_ByXPATH',\n 0).click()\n except:\n logger.debug(\n \"Disconnect popup button is currently not available\")\n\n is_bluetooth_disconnect = True\n\n except Exception as e:\n logger.warning(\"Need to attempt connect before disconnect\")\n logger.warning(repr(e))\n self._go_to_connected_device_screen(no_of_back_click=1)\n return is_bluetooth_disconnect", "def is_connected(cls, device_config):\n if \"console_port_name\" in device_config[\"persistent\"]:\n address = device_config[\"persistent\"][\"console_port_name\"]\n else:\n address = device_config[\"persistent\"][\"hub_port_name\"]\n return os.path.exists(address)", "def is_dialing(self) -> bool:", "def is_connected_drm():\n return get_runt(\"drm.connected\") == \"true\"", "def isBusy(self):\n state = caget(self.pvBase + \":CHAN1:DeviceStatus_RBV\")\n return state != \"2\"", "def check_connectivity(self):\n r = self.run_cmd(\"get-state\")\n return r.startswith(\"device\")", "def check_device_state(self):", "def detect():\n try:\n s = serial.Serial(port = 0, baudrate = 19200, parity = 'O', timeout=1)\n except Exception, e:\n log = logging.getLogger('root')\n log.exception(e)\n return False\n else:\n return True\n finally:\n s.close()", "def bt_unpair(self, target):\n is_target_unpaired = False\n try:\n # if phone is not paired with any device, return True\n any_paired_device = self.find_element(self.driver.appium_driver, self.paired_device_list, 0)\n wait(2)\n if any_paired_device is None:\n logger.info(\"There is no paired device.\")\n return True\n\n logger.debug('Unpair {} Bluetooth device'.format(\n target))\n\n # enter paired bluetooth device page\n target_detail_tab_xpath = '//android.widget.TextView[@text=\"{}\"]' \\\n '/../../descendant-or-self::android.widget.LinearLayout/android.widget.LinearLayout/' \\\n 'android.widget.ImageView[@resource-id=\"com.coloros.wirelesssettings:id/deviceDetails\"]'\\\n .format(target)\n try:\n self.driver.appium_driver.find_element_by_xpath(target_detail_tab_xpath).click()\n except:\n logger.error(\"Cannot find the target detail tab.\")\n return False\n else:\n logger.error(\"Found target detail tab and cliked it.\")\n\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n fail_cnt = 0\n while bt_unpair_button is None and fail_cnt < 5:\n wait(2)\n fail_cnt += 1\n bt_unpair_button = self.find_element(self.driver.appium_driver, self.bt_unpair_button, 0)\n\n if fail_cnt == 5:\n logger.error(\"Cannot find bt_unpair_button.\")\n return False\n else:\n logger.info('Found unpair button and clicking it.')\n bt_unpair_button.click()\n is_target_unpaired = True\n\n except Exception as e:\n self.take_screenshot(self.driver.appium_driver, 'bt_unpair')\n logger.warning(\"Need to attempt pair before unpair\")\n logger.warning(repr(e))\n return is_target_unpaired", "def test_is_connected__not_connection(self):\n self.switch.connection = None\n self.switch.is_active = MagicMock()\n self.switch.is_active.return_value = True\n\n self.assertFalse(self.switch.is_connected())", "def is_on(self):\n return (\n self._device.batterylevel != SHCBatteryDevice.BatteryLevelService.State.OK\n )", "def the_user_should_not_be_able_to_connect_to_another_device():\n print(\"Trying to connect 2 devices at once\")\n bln_result = web_app.connect_to_device2()\n assert(bln_result, False)", "def _async_device_unavailable(\n _service_info: bluetooth.BluetoothServiceInfoBleak,\n ) -> None:\n push_lock.reset_advertisement_state()", "def bt_start_discovery(self):\n is_start_discovery = False\n try:\n is_bluetooth_on = self.bt_radio('on')\n if '8.1' in self.phone_info.os_version:\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n is_bluetooth_on = True\n if is_bluetooth_on:\n logger.debug(\"Bluetooth discovery Stared on {}\".format(\n self.phone_info.bluetooth_name))\n is_start_discovery = True\n else:\n logger.debug(\"Bluetooth discovery not Stared on {}\".format(\n self.phone_info.bluetooth_name))\n except Exception as e:\n logger.error(\"Trun on Bluetooth Button is not Visible\")\n logger.error(repr(e))\n return is_start_discovery", "def is_incall_dialing(self) -> bool:", "def test_gatt_connect_get_connected_devices(self):\n gatt_server_cb = self.per_ad.droid.gattServerCreateGattServerCallback()\n gatt_server = self.per_ad.droid.gattServerOpenGattServer(\n gatt_server_cb)\n self.gatt_server_list.append(gatt_server)\n try:\n bluetooth_gatt, gatt_callback, adv_callback = (\n orchestrate_gatt_connection(self.cen_ad, self.per_ad))\n self.bluetooth_gatt_list.append(bluetooth_gatt)\n except GattTestUtilsError as err:\n self.log.error(err)\n return False\n conn_cen_devices = self.cen_ad.droid.bluetoothGetConnectedLeDevices(\n bt_profile_constants['gatt'])\n conn_per_devices = self.per_ad.droid.bluetoothGetConnectedLeDevices(\n bt_profile_constants['gatt_server'])\n target_name = self.per_ad.droid.bluetoothGetLocalName()\n error_message = (\"Connected device {} not found in list of connected \"\n \"devices {}\")\n if not any(d['name'] == target_name for d in conn_cen_devices):\n self.log.error(error_message.format(target_name, conn_cen_devices))\n return False\n # For the GATT server only check the size of the list since\n # it may or may not include the device name.\n target_name = self.cen_ad.droid.bluetoothGetLocalName()\n if not conn_per_devices:\n self.log.error(error_message.format(target_name, conn_per_devices))\n return False\n self.adv_instances.append(adv_callback)\n return self._orchestrate_gatt_disconnection(bluetooth_gatt,\n gatt_callback)" ]
[ "0.6554116", "0.6459646", "0.6418169", "0.6350835", "0.632681", "0.62795407", "0.62670743", "0.62643623", "0.6235742", "0.6226124", "0.61696416", "0.6103744", "0.6090443", "0.6086667", "0.60765886", "0.6050864", "0.604585", "0.6026215", "0.6014424", "0.60113806", "0.6004195", "0.5990942", "0.59697956", "0.5935999", "0.5933865", "0.5922042", "0.5849751", "0.5834355", "0.5825383", "0.5824008" ]
0.72711813
0
Set Buletooth Name of Mobile Device.
def set_name(self, set_device_name): is_device_name_set = False if self.phone_info.phone_type == PhoneType.IOS: is_general_visible = False try: try: # verify that General Button self.find_element(self.driver.appium_driver, 'self.ios_locators.GENERAL_NAVIGATION_BUTTON_ByXPATH', 1).is_displayed() is_general_visible = True except: logger.debug("General Button is currently not visible ") if is_general_visible: pass else: self.driver.appium_driver.close_app() self.driver.appium_driver.launch_app() logger.error('Navigate to general and about in settings') self.find_element(self.driver.appium_driver, self.general_button_settings).click() self.wait_till_element_to_be_visible( self.driver.appium_driver, self.status_button, 10) self.find_element(self.driver.appium_driver, self.status_button).click() self.find_element(self.driver.appium_driver, self.device_name).click() text_field = self.find_element(self.driver.appium_driver, self.device_name_text_box).clear() self.driver.appium_driver.set_value(text_field, set_device_name) self.find_element(self.driver.appium_driver, self.set_name_button).click() is_device_name_set = True logger.debug('Set New Name of Mobile Phone - ', set_device_name) except Exception as e: logger.warning("Bluetooth Device name is not Set") logger.debug(repr(e)) elif self.phone_info.phone_type == PhoneType.ANDROID: name_text_box = False is_bluetooth_button__visible = self.__verify_current_screen() try: try: self.find_element(self.driver.appium_driver, self.device_name_text_box, 1).is_displayed() name_text_box = True except: logger.debug("Device name text box is not visible") if name_text_box is True: device_name = self.find_element(self.driver.appium_driver, self.device_name_text_box).clear() self.driver.appium_driver.set_value(device_name, set_device_name) self.find_element(self.driver.appium_driver, self.set_name_button).click() is_device_name_set = True logger.debug( ':Set New Name of Mobile Phone - ' + set_device_name) elif name_text_box is False: if is_bluetooth_button__visible: pass else: self.testcase_action = 'STEP -: Go to Bluetooth ' \ 'option from settings - ' self._go_to_bluetooth_button() bluetooth_element = self.find_element( self.driver.appium_driver, self.bluetooth_button_on_off_button, 1) # if bluetooth is OFF then throw Exception if bluetooth_element.text is False or \ bluetooth_element.text == 'OFF': bluetooth_element.click() logger.debug( "Bluetooth is turned on in device with name " + self.phone_info.bluetooth_name) else: logger.debug( "Bluetooth is already on " + self.phone_info.bluetooth_name) self.find_element(self.driver.appium_driver, self.bluetooth_more_options_button).click() self.wait_till_element_to_be_visible( self.driver.appium_driver, self.device_name, 10) self.find_element(self.driver.appium_driver, self.device_name).click() self.wait_till_element_to_be_visible( self.driver.appium_driver, self.device_name_text_box, 10) device_name = self.find_element(self.driver.appium_driver, self.device_name_text_box).clear() self.driver.appium_driver.set_value(device_name, set_device_name) self.find_element(self.driver.appium_driver, self.set_name_button).click() is_device_name_set = True logger.debug( ':Set New Name of Mobile Phone - ' + set_device_name) except Exception as e: logger.warning("Bluetooth Device name is not Set") logger.debug(repr(e)) return is_device_name_set
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def the_user_changes_the_name_of_the_device(name):\n web_app.change_property_softassert(\"name\",name)", "def set_husb_name(self, h, line_number=0):\n self.husb_name = h\n self._husb_name = line_number", "def set_service_name(name):\n emit(UPDATE_SERVICE_SIGNAL, BREADCRUMB_SENDER, name=name)", "def setName(self, name):\n self.name = str(name)", "def setName(self,value):\n assert value == None or type(value) == str, repr(value)+' is not a valid name'\n self._name = value", "def set_name(self, newname=\"\"):\n self.name = newname", "def player_b_name(self, player_b_name):\n\n self._player_b_name = player_b_name", "def set_name(self, application_name):\r\n self._name = application_name", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def setName(self, name):\n self.name = name", "def setName(self, *args):\n return _libsbml.Port_setName(self, *args)", "def device_display_name(self, device_display_name):\n\n self._device_display_name = device_display_name", "async def set_device_name(self,\n device_name,\n wait_for_response=True,\n reset_inactivity_timeout=True,\n response_timeout_in_seconds=None):\n command = _create_set_device_name_command(device_name=device_name,\n sequence_number=self._get_and_increment_command_sequence_number(),\n wait_for_response=wait_for_response,\n reset_inactivity_timeout=reset_inactivity_timeout)\n\n await self._send_command(command,\n response_timeout_in_seconds)", "def setName(self, name):\n\t\tself.label.setText(name)", "def setName(self, name: str, /) -> Any:\n ...", "def setName(self, *args):\n return _libsbml.FluxBound_setName(self, *args)", "def setName(self, name): \n\n self._name = name", "def setName(self, name):\n self._name = name", "def setName(self, name):\n self._name = name", "def setName(self, name):\n # type: (str)->None\n self._validator.validate_one('name', VALID_OPTS['name'], name)\n self._ifAttributes['name'] = str(name)", "def name(self, value: str) -> None:\n self._name = value", "def name(self, name: str):\n self.inst['targetname'] = name", "def set_name(self, name: str) -> None:\n lib.wlr_seat_set_name(self._ptr, name.encode())", "def set_bandname(self,value):\n if value is not None:\n if type(value) != str and type(value) != np.string_:\n raise TypeError(\"The bandname must be a string\", type(value))\n \n self._properties[\"bandname\"] = value", "def set_vendor_name(self, vendor_name_prefix):\n global vendor_name\n self.vendor_unique_id = self.random_string_generator(6)\n vendor_name = vendor_name_prefix + self.vendor_unique_id\n self.set_value_into_input_field(self.vendor_name_textbox_locator, vendor_name)\n return vendor_name", "def set_name(self, name):\n\t\tself.name_ = name", "def new_name(self,new_name):\n self.name = new_name", "def set_name(self, name):\n self.name = name" ]
[ "0.6283922", "0.6186607", "0.6144288", "0.6112249", "0.59848183", "0.5911058", "0.58778536", "0.5868408", "0.5848298", "0.5848298", "0.5848298", "0.5848298", "0.5808407", "0.5796167", "0.5793217", "0.5782618", "0.57755697", "0.57703006", "0.5767888", "0.5767599", "0.5767599", "0.5743565", "0.57151186", "0.57034296", "0.56987405", "0.5637542", "0.5631673", "0.5619657", "0.56140745", "0.55870783" ]
0.69250643
0
Get bluetooth MAC addrress of Mobile Device.
def get_mac(self): bluetooth_mac_address = '' is_general_visible = False if self.phone_info.phone_type == PhoneType.IOS: try: try: # verify that General Button is visible self.find_element(self.driver.appium_driver, 'self.ios_locators.GENERAL_NAVIGATION_BUTTON_ByXPATH', 0).is_displayed() is_general_visible = True except: logger.debug("General Button is currently not visible ") if is_general_visible: pass else: self.find_element(self.driver.appium_driver, self.general_button_settings).click() self.wait_till_element_to_be_visible( self.driver.appium_driver, self.status_button, 10) self.find_element(self.driver.appium_driver, self.status_button, 2).click() bluetooth_mac_address = self.find_element( self.driver.appium_driver, self.bluetooth_mac_addess, 2).text logger.debug("Bluetooth Mac Address:" + bluetooth_mac_address) except Exception as e: logger.error("Bluetooth Mac Address is not Visible") logger.debug(repr(e)) elif self.phone_info.phone_type == PhoneType.ANDROID: try: self.find_element(self.driver.appium_driver, self.general_button_settings).click() self.wait_till_element_to_be_visible(self.driver.appium_driver, self.status_button, 10) self.find_element(self.driver.appium_driver, self.status_button).click() self.testcase_action = 'STEP -: Get Bluetooth Mac Address' bluetooth_mac_address = self.find_element( self.driver.appium_driver, self.bluetooth_mac_addess, 2).text logger.debug("Bluetooth Mac Address:" + bluetooth_mac_address) except Exception as e: logger.warning("Mac address is not available") logger.debug(repr(e)) return bluetooth_mac_address
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getMac(self):\n # Import netifaces here to prevent error importing this module in setup.py\n import netifaces\n interfaces = ['eth0', 'wlan0']\n try:\n interfaces.append(netifaces.gateways()['default'][netifaces.AF_INET][1])\n except:\n pass\n for interface in interfaces:\n try:\n return netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr']\n except ValueError:\n pass\n except:\n exception('Error getting MAC address')\n return None", "def _get_mac_address():\n if not sys.platform.startswith('linux'):\n raise RuntimeError(\n 'Cannot get the MAC address on non-Linux platforms'\n )\n ifname = get_default_iface_name_linux()\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n info = fcntl.ioctl(s.fileno(), 0x8927,\n struct.pack('256s', bytes(ifname, 'utf-8')[:15]))\n return ''.join('%02x' % b for b in info[18:24])", "def get_mac_address():\n eth0_interface = 'eth0'\n addresses = netifaces.ifaddresses(eth0_interface)[netifaces.AF_LINK][0]\n mac_address = addresses['addr']\n return mac_address", "def get_mac():\n\n interface = [x for x in netifaces.interfaces() if 'wlan' in x or 'wlp' in x][0]\n return netifaces.ifaddresses(interface)[netifaces.AF_LINK][0]['addr']", "def get_mac_address(self):\n\t\treturn call_sdk_function('PrlVmDevNet_GetMacAddress', self.handle)", "def get_mac_address(self):\n\t\treturn call_sdk_function('PrlSrvCfgNet_GetMacAddress', self.handle)", "def mac_address(self) -> str:\n return self._device.mac", "def mac(self):\n return self.device.settings[\"device\"][\"mac\"]", "def mac(self):\n return self.device.settings[\"device\"][\"mac\"]", "def __get_mac_address(self):\n str_hex_mac = uuid.UUID(int=uuid.getnode()).hex[-12:]\n return str_hex_mac", "def mac(self):\n mac = hexlify(WLAN().config('mac'), ':').decode()\n return mac.upper() # MAC-address in upper case", "def get_mac_address(self, result, host):\n if \"mac\" in result['scan'][host][\"addresses\"]:\n return result['scan'][host][\"addresses\"][\"mac\"]\n else:\n return \"\"", "def get_mac(self) -> str:\n self.sendline(\"iw {} info\".format(self.iface_dut))\n # We are looking for MAC definition of STA\n # wdev 0x1\n # addr 96:4e:c9:cc:7a:2c\n # type managed\n self.expect(\"addr (?P<mac>..:..:..:..:..:..)\\r\\n\\t(type|ssid)\")\n return self.match.group('mac')", "def mac_address(self):\n if self._mac_address is None:\n expression = expressions.WPA_MAC\n name = expressions.MAC_ADDRESS_NAME\n command = self.status_command\n self._mac_address = self._match(expression,\n name,\n command)\n return self._mac_address", "def get_mac_address(hostname):\n\n url = \"https://{}/redfish/v1/Managers/1/EthernetInterfaces/1/\".format(hostname)\n eth_dict = requests.get(url, auth=(USER,PASS),verify=False).json()\n mac_address = eth_dict['MACAddress']\n\n LOGGER.info(\"IPMI BMC %s reports MAC address as %s\", hostnameh, mac_address)\n\n return mac_address", "def _get_mac(self):\n return self.__mac", "def mac_address(self):\n return self._mac_address", "def get_mac_address(self):\n self.__not_implemented()", "def get_mac(self) -> str:\n hex_mac = hexlify(self.message)[160:172].decode().upper()\n return (\n hex_mac[0:2]\n + \":\"\n + hex_mac[2:4]\n + \":\"\n + hex_mac[4:6]\n + \":\"\n + hex_mac[6:8]\n + \":\"\n + hex_mac[8:10]\n + \":\"\n + hex_mac[10:12]\n )", "def get_my_mac():\n\n mac_set = get_my_mac_set(iface_filter=get_default_route()[1])\n return mac_set.pop()", "def get_adapter_mac(cls, client_object):\n parsed_data = DefaultCRUDImpl.get_adapter_info(\n client_object)\n for record in parsed_data['table']:\n if record['dev'] == client_object.name:\n return record['mac']\n pylogger.warning('Did not find a MAC address for adapter %r on %r' %\n (client_object.name, client_object.ip))", "def get_mac(self, node_id):\n nc = '%02x' % self.node_class\n nr_iface = '%02x' % self.nr_host_interface\n node_id = '%08x' % node_id\n\n return '%s:%s:%s:%s:%s:%s' % (nc, nr_iface, node_id[0:2], node_id[2:4], node_id[4:6], node_id[6:8])", "def bmc_mac_address(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"bmc_mac_address\")", "def mac(self) -> str:\n return self.camera_info[\"wifi_mac\"]", "def get_mac_address(selected_network_device, base_api_url, user, passwd): \n url = '{}/{}'.format(base_api_url, selected_network_device)\n device_mac_address = ''\n try:\n response = requests.get(url, verify=False, auth=(user, passwd),\n timeout=5)\n except requests.exceptions.ConnectionTimeout:\n logging.error('failed to establish connection to get mac address')\n\n try:\n network_device_info = response.json()\n except ValueError:\n logging.error('check URL, iDRAC user and password may be invalid')\n logging.info('{}'.format(url))\n\n try:\n device_mac_address = network_device_info[u'MACAddress']\n except KeyError:\n logging.error('No MAC Address found for network devices')\n logging.info('{}'.format(selected_network_device))\n\n return device_mac_address", "def get_mac_address(ifname):\n try:\n return open('/sys/class/net/' + ifname + '/address') \\\n .readline().strip()\n except:\n SysTools.logger.error(\"Failed to get mac-address of %s\", ifname)\n return \"00:00:00:00:00:00\"", "def get_device_info(self, mac_address):\n try:\n out = self.get_output(\"info \" + mac_address)\n except BluetoothctlError, e:\n print(e)\n return None\n else:\n return out", "def get_network_device_mac(devices, user, passwd, base_api_url):\n network_device_mac_address = ''\n\n if devices:\n selected_network_device = generate_network_devices_menu(devices, purpose='DHCP')\n network_device_mac_address = get_mac_address(selected_network_device, base_api_url, user, passwd)\n\n if network_device_mac_address:\n logging.info('device {} mac address is {}'.format(selected_network_device, network_device_mac_address))\n \n return network_device_mac_address", "def getMacAddress(self, wlanInterface):\n self.wlanInterface = wlanInterface\n self.storeMacAddress=[]\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', '%s'[:15]) % str(self.wlanInterface))\n self.storeMacAddress.append(''.join(['%02x:' % ord(char) for char in info[18:24]])[:-1])\n return self.storeMacAddress", "def MacAddress(self):\n if self.force_auto_sync:\n self.get('MacAddress')\n return self._MacAddress" ]
[ "0.7543795", "0.7485377", "0.7469252", "0.74010116", "0.73277986", "0.7310747", "0.7210467", "0.71960765", "0.71960765", "0.71486706", "0.70828235", "0.70595086", "0.70355076", "0.69185084", "0.6892908", "0.6890958", "0.68855166", "0.68241364", "0.68089026", "0.6738221", "0.67272294", "0.6724906", "0.67223734", "0.66987807", "0.668289", "0.66435504", "0.6634279", "0.6610982", "0.6555823", "0.6537371" ]
0.7547702
0
Airplane Mode in mobile device
def set_airplane_mode(self, action): is_action_performed = False is_airplane_mode_on_off_visible = False settings_more_button = \ 'self.android_locators.SETTINGS_MORE_BUTTON_ByXPATH' airplane_mode_on_off_toggle = \ 'self.android_locators.AIRPLANE_MODE_ON_OFF_ByID' if self.phone_info.phone_type == PhoneType.IOS: airplane_mode_on_off_toggle = \ 'self.ios_locators.AIRPLANE_MODE_ON_OFF_ByXPATH' no_sim_card_installed_msg = \ 'self.ios_locators.NO_SIM_CARD_INSTALLED_ByXPATH' no_sim_card_installed_ok_button = \ 'self.ios_locators.NO_SIM_CARD_INSTALLED_OK_BUTTON_ByXPATH' try: try: # verify that Airplane Mode Button is visible is_airplane_mode_on_off_visible = self.find_element( self.driver.appium_driver, airplane_mode_on_off_toggle, 0).is_displayed() except: logger.debug( "Airplane Mode ON/OFF button is currently not visible") if self.phone_info.phone_type == PhoneType.ANDROID: if not is_airplane_mode_on_off_visible: self.driver.appium_driver.close_app() self.driver.appium_driver.launch_app() time.sleep(1) self.wait_till_element_to_be_visible( self.driver.appium_driver, settings_more_button, 5) self.find_element(self.driver.appium_driver, settings_more_button, 1).click() # self.wait_till_element_to_be_visible( # self.driver.appium_driver, airplane_mode_on_off_toggle) logger.debug( "Click on more button to make Airplane Mode visible") airplane_mode_toggle_status = self.find_element( self.driver.appium_driver, airplane_mode_on_off_toggle).text if airplane_mode_toggle_status.upper() == action.upper(): is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) else: self.find_element(self.driver.appium_driver, airplane_mode_on_off_toggle, 0).click() is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) self.driver.appium_driver.back() elif self.phone_info.phone_type == PhoneType.IOS: if not is_airplane_mode_on_off_visible: self.driver.appium_driver.close_app() self.driver.appium_driver.launch_app() time.sleep(1) airplane_mode_toggle_status = self.find_element( self.driver.appium_driver, airplane_mode_on_off_toggle, 0).text if action.upper() == "ON": if (airplane_mode_toggle_status == False) or \ (airplane_mode_toggle_status == '1'): is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) try: self.wait_till_element_to_be_visible( self.driver.appium_driver, 'self.ios_locators.OK_BUTTON_AFTER_BLUETOOTH_OFF_ByID', 3) self.find_element(self.driver.appium_driver, 'self.ios_locators.OK_BUTTON_AFTER_BLUETOOTH_OFF_ByID', 0).click() except: pass else: self.find_element(self.driver.appium_driver, airplane_mode_on_off_toggle, 0).click() try: self.wait_till_element_to_be_visible( self.driver.appium_driver, 'self.ios_locators.OK_BUTTON_AFTER_BLUETOOTH_OFF_ByID', 3) self.find_element(self.driver.appium_driver, 'self.ios_locators.OK_BUTTON_AFTER_BLUETOOTH_OFF_ByID', 0).click() except: pass is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) elif action.upper() == "OFF": if (airplane_mode_toggle_status == True) or \ (airplane_mode_toggle_status == '0'): is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) else: self.find_element(self.driver.appium_driver, airplane_mode_on_off_toggle, 0).click() time.sleep(1) is_action_performed = True logger.debug( "Airplane Mode button set as {}".format(action)) else: logger.debug( "Only ON/OFF operation is possible with Airplane " "Mode. {} option is not permitted".format( action)) except Exception as e: logger.error( "Exception occured while performing Airplane mode {} ".format( action)) logger.error(repr(e)) return is_action_performed
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _arm_and_offboard(self):\n arming_client = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool)\n set_mode_client = rospy.ServiceProxy('/mavros/set_mode', SetMode)\n\n offb_set_mode = SetMode()\n offb_set_mode.custom_mode = \"OFFBOARD\"\n\n arm_cmd = CommandBool()\n arm_cmd.value = True\n\n wait_time = rospy.Duration(1)\n\n last_request = rospy.Time.now()\n while not rospy.is_shutdown() and (self.mavros_state.mode != \"OFFBOARD\" or not self.mavros_state.armed):\n if self.mavros_state.mode != \"OFFBOARD\" and (rospy.Time.now() - last_request > wait_time):\n res = set_mode_client(0, offb_set_mode.custom_mode)\n if res and res.mode_sent:\n rospy.loginfo(self.name + \": Offboard enabled\")\n last_request = rospy.Time.now()\n else:\n if not self.mavros_state.armed and (rospy.Time.now() - last_request > wait_time):\n res = arming_client(True)\n if res and res.success:\n rospy.loginfo(self.name + \": Vehicle armed\")\n last_request = rospy.Time.now()\n return", "def arm_and_takeoff(aTargetAltitude):\r\n\r\n print(\"Basic pre-arm checks\")\r\n # Don't try to arm until autopilot is ready\r\n while not vehicle.is_armable:\r\n print(\" Waiting for vehicle to initialise...\")\r\n time.sleep(1)\r\n\r\n print(\"Arming Throttle\")\r\n # Copter should arm in GUIDED mode\r\n\t# XXX : what the heck is this?\r\n #vehicle.mode = VehicleMode(\"GUIDED\")\r\n vehicle.armed = True\r\n\r\n # Confirm vehicle armed before attempting to take off\r\n while not vehicle.armed:\r\n print(\" Waiting for arming...\")\r\n time.sleep(1)", "def is_on(self):\n camera = self.coordinator.data[self._camera_id]\n if self._switch_type == \"record_motion\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_MOTION else False\n elif self._switch_type == \"record_always\":\n enabled = True if camera[\"recording_mode\"] == TYPE_RECORD_ALLWAYS else False\n else:\n enabled = True if camera[\"ir_mode\"] == self._ir_on_cmd else False\n return enabled", "def toggle_airplanes(self):\n if self.locations_map.show_airplanes:\n self.locations_map.show_airplanes = False\n else:\n if self.locations_map.zoom > 5:\n self.locations_map.show_airplanes = True\n self.locations_map.start_getting_locations_in_fov()\n else:\n self.btn_toggle_airplanes.state = 'normal'\n show_message_popup(\"Zoom level must be greater than 5.\")", "def hvac_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"hvac_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n ac_mode = self.ac.status.ac_mode\n value = self.HVAC_MODE_MAPPING[ac_mode]\n _LOGGER.debug(f\"hvac_mode: returning {value} (derived from {ac_mode})\")\n return value\n else:\n _LOGGER.debug(f\"hvac_mode: returning HVAC_MODE_OFF - device is off\")\n return HVAC_MODE_OFF", "def fan_mode(self):\n if self.ac.status is None:\n _LOGGER.debug(f\"fan_mode: status is None, returning None\")\n return None\n if self.ac.status.is_on:\n fan_speed = self.ac.status.fan_speed\n value = self.FAN_MODE_MAPPING[fan_speed]\n _LOGGER.debug(f\"fan_mode: returning {value} (derived from {fan_speed})\")\n return value\n else:\n _LOGGER.debug(f\"fan_mode: returning FAN_OFF - device is off\")\n return FAN_OFF", "def is_on(self):\n if self._switch_type == \"record_motion\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_MOTION\n elif self._switch_type == \"record_always\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_ALWAYS\n elif self._switch_type == \"record_smart\":\n return self._camera_data[\"recording_mode\"] == TYPE_RECORD_SMARTDETECT\n elif self._switch_type == \"ir_mode\":\n return self._camera_data[\"ir_mode\"] == self._ir_on_cmd\n elif self._switch_type == \"hdr_mode\":\n return self._camera_data[\"hdr_mode\"] is True\n elif self._switch_type == \"high_fps\":\n return self._camera_data[\"video_mode\"] == TYPE_HIGH_FPS_ON\n else:\n return self._camera_data[\"status_light\"] == \"True\"", "def getPaAdaptiveMode(self, channel, unitCode=0):\n resp = self.XAPCommand('PAA', channel, unitCode=unitCode)\n return bool(int(resp))", "def operation(self) -> str:\n return self.vera_device.get_hvac_mode()", "def DualMode(self) -> bool:", "def verify_ap_connection_mode(self, ap_mac, discovery_method = \"\"):\n ap_obj = self.mac_to_ap[ap_mac]\n\n logging.info(\"Get apmgrinfo of the AP %s\" % ap_mac)\n start_time = time.time()\n while True:\n apmgrinfo = ap_obj.get_ap_mgr_info()\n apmgrinfo = self._map_apmgrinfo_keys(apmgrinfo)\n\n if apmgrinfo and apmgrinfo[\"State\"] == \"RUN\":\n break\n\n time.sleep(2)\n if time.time() - start_time > 120:\n raise Exception(\"AP '%s' was not in RUN state\" % ap_mac)\n\n logging.debug(\"Obtained info: %s\" % apmgrinfo)\n\n logging.info(\"Get detailed information in ZD's webui about the AP %s\" % ap_mac)\n ap_info = APS.get_ap_detail_info_by_mac_addr(self.zd, ap_mac)\n logging.debug(\"Obtained infor: %s\" % ap_info)\n\n # Verify if the connection mode shown on AP's CLI and ZD are the same and correct\n conn_mode_in_zd = ap_info['tunnel_mode'].lower()\n # Use only first two characters (L2 or L3)\n conn_mode_in_zd = conn_mode_in_zd[:2]\n\n conn_mode_in_ap = apmgrinfo['Tunnel/Sec Mode'].split(\"/\")[0].strip().lower()\n conn_mode_in_ap = conn_mode_in_ap[:2]\n\n if conn_mode_in_ap != conn_mode_in_zd:\n msg = \"The connection mode shown on AP's CLI was '%s'\" % conn_mode_in_ap\n msg += \", which was different from the mode shown on ZD's webui '%s'\" % \\\n conn_mode_in_zd\n return msg\n\n discovery_reason = {\"fixed-pri\": \"Preferred Primary\",\n \"fixed-sec\": \"Preferred Secondary\",\n \"dns\": \"DNS\", \"dhcp\": \"DHCP\",\n \"record\": \"Last ZoneDir Joined\", \"l2\": \"L2 Discovery\"}\n\n if discovery_method in [\"fixed-pri\", \"fixed-sec\", \"dns\", \"dhcp\"]:\n if conn_mode_in_ap != \"l3\":\n msg = (\"The connection mode was %s instead of L3 [AP %s] [dmode %s]\" %\n (conn_mode_in_ap.upper(), ap_mac, discovery_method))\n return msg\n\n if apmgrinfo['Discover Director By'] != discovery_reason[discovery_method]:\n msg = \"The discover method showed on AP's CLI was '%s'\" % \\\n apmgrinfo['Discover Director By']\n msg += \", it should have been '%s'\" % discovery_reason[discovery_method]\n return msg\n\n elif discovery_method == \"record\":\n if apmgrinfo['Discover Director By'] != discovery_reason[discovery_method]:\n msg = \"The discover method showed on AP's CLI was '%s'\" % \\\n apmgrinfo['Discover Director By']\n msg += \", it should have been '%s'\" % discovery_reason[discovery_method]\n return msg\n\n else:\n if self.mac_to_vlan[ap_mac] == self.zd_vlan:\n if conn_mode_in_ap != \"l2\":\n msg = (\"The connection mode was %s instead of L2 [AP %s] [dmode %s]\" %\n (conn_mode_in_ap.upper(), ap_mac, discovery_method))\n return msg\n\n if apmgrinfo['Discover Director By'] not in discovery_reason[\"l2\"]:\n msg = \"The discover method showed on AP's CLI was '%s'\" % \\\n apmgrinfo['Discover Director By']\n msg += \", it should have been '%s'\" % discovery_reason[\"l2\"]\n return msg\n\n else:\n if conn_mode_in_ap != \"l3\":\n msg = (\"The connection mode was %s instead of L3 [AP %s] [dmode %s]\" %\n (conn_mode_in_ap.upper(), ap_mac, discovery_method))\n return msg\n\n return \"\"", "def _select_arm(self):\n pass", "def create_ap(self):\n self.ap.active(True)\n self.ap.config(essid=Connection.AP_SSID)", "def mode_remote(self):\n self.send(\"!MR\")\n # time.sleep(2.0)\n # No feedback, so query to verify set\n got = self.get_mode()\n assert got == \"R\", got", "def is_armed_custom_bypass(self):\n return self == ArmingState.ARMED_CUSTOM_BYPASS", "def setMode(self, request, context):\n \n self.vehicle.mode = VehicleMode(str(request.mode))\n self.vehicle.wait_ready('mode')\n \n return droneconnect_pb2.Null()", "def set_monitor_mode(controller_name):\n subprocess.run([\"ip\", \"link\", \"set\", wifi_name, \"down\"])\n subprocess.run([\"airmon-ng\", \"check\", \"kill\"])\n subprocess.run([\"iw\", wifi_name, \"set\", \"monitor\", \"none\"])\n subprocess.run([\"ip\", \"link\", \"set\", wifi_name, \"up\"])", "def hvac_mode(self):\n dps_mode = self._device.get_property(PROPERTY_TO_DPS_ID[ATTR_HVAC_MODE])\n\n if dps_mode is not None:\n return GoldairTuyaDevice.get_key_for_value(HVAC_MODE_TO_DPS_MODE, dps_mode)\n else:\n return STATE_UNAVAILABLE", "def __init__(self, device_mode, loop):\n self.loop = loop\n self.device_mode = device_mode\n if self.device_mode == \"stationary\":\n self.openface = OpenFaceInstance()\n self.openface.startProcess()\n self.stationary_eye_tracker = StationaryEyeTracker()\n elif self.device_mode == \"mobile\":\n self.openpose = OpenPoseInstance()\n self.openpose.startProcess()\n self.mobile_eye_tracker = MobileEyeTracker()\n self.mobile_eye_tracker.calibrate()\n\n self.wristband = Wristband(self.loop)", "def setPaAdaptiveMode(self, channel, isEnabled, unitCode=0):\n resp = self.XAPCommand('PAA', channel, (1 if isEnabled else 0), unitCode=unitCode)\n return bool(int(resp))", "def usb_mode() -> str:", "def fan_mode(self) -> str | None:\n if self.vera_device.get_fan_mode() == \"ContinuousOn\":\n return FAN_ON\n return FAN_AUTO", "def palm_land(self):\n self.palm_landing = True\n self.drone.palm_land()", "def land_on_asv(self, duration=rospy.Duration(600, 0)):\n start = rospy.Time.now()\n # First step approaching the launchpad (side or behind)\n offset = [\n 0.0,\n rospy.get_param(\"~dist_initial_landing\", -5.0),\n rospy.get_param(\"~altitude_initial_landing\", 10.)\n ]\n if \"behind\" in rospy.get_param(\"~landing_approach\", \"side\"):\n rospy.loginfo(\"Approaching ASV from behind ...\")\n else:\n offset = [offset[1], offset[0], offset[2]]\n rospy.loginfo(\"Approaching ASV from side ...\")\n self.follow_target('%s_launchpad' % self.namespace, True, offset, 0.,\n None, rospy.Duration(3, 0), duration)\n # Second step approaching the launchpad (side or behind)\n landed = self.ACTION_FAIL\n offset = [0.0, 0.0, rospy.get_param(\"~takeoff_altitude\", 10.)]\n rospy.loginfo(\"Trying to land on ASV ...\")\n while (rospy.Time.now() - start <\n duration) and not (rospy.is_shutdown()) and (\n not self.external_intervened) and (not self.landed):\n if landed == self.ACTION_SUCCESS:\n self._rate.sleep()\n continue\n duration = duration - (rospy.Time.now() - start)\n start = rospy.Time.now()\n self.follow_target('%s_launchpad' % self.namespace, True, offset,\n 0., None, rospy.Duration(3, 0), duration)\n rangefinder_ok = (self._min_range > -1)\n rangefinder_ok = rangefinder_ok and (\n (self.rangefinder - self._min_range) <\n (self.MINIMUM_ALTITUDE + 0.1))\n rel_alt_ok = (self._rel_alt[-1] <\n self.MINIMUM_ALTITUDE) and (self._min_range <= -1)\n if rel_alt_ok or rangefinder_ok:\n rospy.loginfo(\"UAV is landing ...\")\n duration = duration - (rospy.Time.now() - start)\n start = rospy.Time.now()\n landed = self.guided_mode(duration, mode='land')\n self._rate.sleep()\n elif offset[-1] <= self.MINIMUM_ALTITUDE:\n offset[-1] -= 0.1\n else:\n offset[-1] = self.MINIMUM_ALTITUDE\n # Status check to report\n landed = int(self.landed)\n if (rospy.Time.now() - start) > duration:\n landed = self.OUT_OF_DURATION\n if self.external_intervened:\n landed = self.EXTERNAL_INTERVENTION\n return landed", "def _isstandby(self):\n return self.dp.state()==PyTango.DevState.STANDBY", "def toggle_airports(self):\n if self.locations_map.show_airports:\n self.locations_map.show_airports = False\n else:\n if self.locations_map.zoom > 5:\n self.locations_map.show_airports = True\n self.locations_map.start_getting_locations_in_fov()\n else:\n self.btn_toggle_airports.state = 'normal'\n show_message_popup(\"Zoom level must be greater than 5.\")", "def force_switch_on(self):\n self.turn_on_modem()", "def enable():\n if not _status_apf():\n return __apf_cmd(\"-s\")", "def check_enable_mode(self, *args, **kwargs):\n pass", "def indoor_air_quality(self):\n # name, command, signals, delay\n return self._i2c_read_words_from_cmd(command=[0x20, 0x08], reply_size=2, delay=0.05)" ]
[ "0.5949399", "0.5559423", "0.5521669", "0.5517303", "0.5516471", "0.5408404", "0.5400011", "0.53811467", "0.5377833", "0.53648144", "0.53482664", "0.53304946", "0.52924544", "0.5291542", "0.52402145", "0.52201784", "0.52087885", "0.52014166", "0.51950693", "0.5189626", "0.517895", "0.5175445", "0.51691926", "0.5162631", "0.5155256", "0.5127347", "0.51145077", "0.5107832", "0.51052725", "0.50972456" ]
0.71507704
0
Enable/Disable Contact sharing on Mobile device ex. phone_obj.bluetooth.enable_contact_sharing('KLEOS',True)
def enable_contact_sharing(self, dut_name, enable=False): try: is_device_connected = self.bt_is_connected_to(dut_name) if is_device_connected is False: logger.warning( "Currently no bluetooth device is connected with" " {}".format(self.phone_info.bluetooth_name)) return False # self.bt_connect(dut_name) self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_connected_device_list, 10) connected_device_list = self.find_elements( self.driver.appium_driver, self.bluetooth_connected_device_list, 1) for index in range(len(connected_device_list)): if self.is_same_bluetooth_name(connected_device_list[index], dut_name): if (self.phone_info.os_version.startswith('11')) or (self.phone_info.os_version.startswith('8')) \ or (self.phone_info.os_version.startswith('9')) or self.phone_info.os_version.startswith('10'): more_info = self.find_elements( self.driver.appium_driver, self.bluetooth_device_setting_button, 0) more_info[index].click() else: self.find_element(self.driver.appium_driver, self.bluetooth_settings_button, 1).click() break if self.phone_info.phone_type == PhoneType.ANDROID: if '8.1' in self.phone_info.os_version or 'S8' in self.phone_info.bluetooth_name or \ (self.phone_info.os_version.startswith('9')) or (self.phone_info.os_version.startswith('10')): self.wait_till_element_to_be_visible( self.driver.appium_driver, self.contact_sharing_button_in_android_8_1_switch, 20) contact_sharing_status = self.find_element( self.driver.appium_driver, self.contact_sharing_button_in_android_8_1_switch, 0) else: self.wait_till_element_to_be_visible( self.driver.appium_driver, self.contact_sharing_checkbox, 20) contact_sharing_status = self.find_element( self.driver.appium_driver, self.contact_sharing_checkbox, 0).\ get_attribute("checked") # Now click that button if we're in the wrong state. switch_is_on = self.is_switch_on(contact_sharing_status) if enable != switch_is_on: logger.debug( "For phone {!r}, found that Switch is currently {}." "Toggling so will be {}.".format( self.phone_info.bluetooth_name, "Enable" if switch_is_on else "Disable", "Enable" if enable else "Disable", )) self.find_element(self.driver.appium_driver, self.contact_sharing_button, 0).click() self.driver.appium_driver.back() self.bt_disconnect(dut_name) self._go_to_connected_device_screen(1) self.bt_connect(dut_name, perform_unpair=False) else: logger.debug( "For phone {!r}, found that Switch is already {}." "Leaving it that way.".format( self.phone_info.bluetooth_name, "ON" if switch_is_on else "OFF", )) return True except Exception as e: logger.warning( "Could not enable/disable contact sharing on connected " "mobile devices {}" .format(self.phone_info.bluetooth_name)) logger.warning(repr(e)) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enable_disable_media_sharing(self, dut_name, enable=True):\n try:\n is_bluetooth_button__visible = self.__verify_current_screen()\n if not is_bluetooth_button__visible:\n self._go_to_bluetooth_button()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 20)\n device_name = self.find_elements(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 0)\n\n for index in range(len(device_name)):\n if self.is_same_bluetooth_name(device_name[index], dut_name):\n if (self.phone_info.os_version.startswith('11')) or (self.phone_info.os_version.startswith('8')):\n more_info = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_device_setting_button, 0)\n more_info[index].click()\n else:\n self.find_element(self.driver.appium_driver,\n self.bluetooth_settings_button,\n 1).click()\n break\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_sharing_switch, 10)\n media_sharing_element = self.find_element(\n self.driver.appium_driver, self.media_sharing_switch, 0)\n\n # Now click that button if we're in the wrong state.\n is_media_sharing_action_performed = self._toggle_switch(\n media_sharing_element, enable)\n logger.debug(\n \"Media Sharing option is set to {} on connected bluetooth devices {}\".format(\n enable, dut_name))\n return is_media_sharing_action_performed\n except Exception as e:\n logger.warning(\n \"Could not enable/disable media sharing on connected mobile devices\"\n .format(self.phone_info.bluetooth_name))\n logger.warning(repr(e))\n return False", "def set_comms_mode(self):", "def EnableBroadcast(self) -> bool:", "def EnableBroadcast(self) -> bool:", "def bt_try_connect(self, bluetooth_device_name_to_connect,\n contact_sharing=False): # TODO: Need to update to\n # use the new/refactored bt_connect() design from above.\n is_bluetooth_connect = False\n try:\n is_already_connected = self.bt_is_connected_to(\n bluetooth_device_name_to_connect)\n if is_already_connected is True:\n is_bluetooth_connect = True\n else:\n is_bt_paired = self.bt_is_paired_to(\n bluetooth_device_name_to_connect)\n if contact_sharing:\n if is_bt_paired:\n if self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.previously_paired_device_button, 5)\n self.find_element(self.driver.appium_driver,\n self.previously_paired_device_button,\n 0).click()\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver, self.paired_device_list,\n 10)\n pair_element = self.find_elements(\n self.driver.appium_driver, self.paired_device_list,\n 1)\n for index in range(len(pair_element)):\n if self.is_same_bluetooth_name(pair_element[index],\n bluetooth_device_name_to_connect):\n pair_element[index].click()\n # self._bt_swipe_and_connect(pair_element,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n is_bluetooth_connect = True\n self._go_to_connected_device_screen(\n no_of_back_click=1)\n return is_bluetooth_connect\n else:\n if is_bt_paired:\n self.bt_unpair(bluetooth_device_name_to_connect)\n self.bt_radio('off')\n self.bt_radio('on')\n\n try:\n if '8.1' in self.phone_info.os_version or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 10)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_pair_new_device_in_android_8_1_button,\n 2).click()\n sleep(10)\n except:\n logger.debug(\"Pair new device option is not available\")\n is_device_found = False\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 5)\n element_list = self.find_elements(self.driver.appium_driver,\n self.bluetooth_pair_device,\n 1)\n # Wait till bluetooth device found in list and click when it\n # is visible in list\n for retry in range(1):\n if retry == 0:\n for index in range(len(element_list)):\n element_text = element_list[index].text\n # For some reason my iPhone 6 (iOS 11.1.1) is\n # getting stuck here because one of the\n # element's text is None.\n # So adding bit to ignore that.\n if type(element_text) is not str:\n logger.warn(\n \"Found pairing list element's text was \"\n \"None! Ignoring for now.\")\n continue\n if self.is_same_bluetooth_name(element_list[index],\n bluetooth_device_name_to_connect):\n element_list[index].click()\n # self._bt_swipe_and_connect(element_list,\n # index) # Not sure if this is required for\n # tests to work? I can get my Nexus6P (\n # Android 6.0.1) and iPhone 7 Plus (iOS\n # 10.3.2) to work without it... (So far)\n logger.debug(\"Connecting to \" +\n bluetooth_device_name_to_connect)\n is_device_found = True\n # NOTE: Removed a bunch of stuff after this...\n break\n else:\n is_device_found = self._bt_retry_to_connect(\n bluetooth_device_name_to_connect)\n if is_device_found == False:\n if '8.1' in self.phone_info.os_version \\\n or self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.bt_radio('off')\n self.bt_radio('on')\n self.perform_bottom_to_up_swipe(\n self.driver.appium_driver)\n logger.debug(\"Retries count : \" + str(retry))\n sleep(1)\n else:\n # The below can become strangely slow (take ~12\n # seconds) randomly, so skipping it...\n # is_bt_button_visible = self.__verify_current_screen()\n # logger.debug(\"The BT button is visible? {\n # }\".format(is_bt_button_visible))\n # if not is_bt_button_visible:\n # self.__retry_to_bt_connect(\n # bluetooth_device_name_to_connect)\n break\n if is_device_found:\n is_bluetooth_connect = True\n else:\n self.take_screenshot(self.driver.appium_driver,\n 'bt_connect')\n logger.error(\"Not connecting to given mobile Device\")\n except Exception as e:\n if '8.1' in self.phone_info.os_version or \\\n self.phone_info.os_version.startswith('9') or self.phone_info.os_version.startswith('10'):\n self.driver.appium_driver.back()\n self.take_screenshot(self.driver.appium_driver, 'bt_connect')\n logger.error(\n \"Connection is not successfully with bluetooth device\")\n logger.error(repr(e))\n return is_bluetooth_connect", "def canShare(self):\n return False", "def toggled_comunication(self):\n if self.actionPC_Monitor.isChecked() and self.actionPC_Monitor.isEnabled():\n self.actionPC_Monitor.setEnabled(0)\n self.actionPC_Sensor_Actuador.setChecked(0)\n self.actionPC_Sensor_Actuador.setEnabled(1)\n self.monitor_environment()\n \n elif self.actionPC_Sensor_Actuador.isChecked() and self.actionPC_Sensor_Actuador.isEnabled():\n self.actionPC_Sensor_Actuador.setEnabled(0)\n self.actionPC_Monitor.setChecked(0)\n self.actionPC_Monitor.setEnabled(1)\n self.actuator_environment()", "def is_screenshare(self, is_screenshare):\n \n self._is_screenshare = is_screenshare", "def set_toggle_devices_enabled(self, track, xclip, ident, value = None):\n for device in track.devices:\n if(hasattr(device, 'parameters')):\n self._parent._device_actions.set_device_on_off(device, track, xclip, ident);", "def set_enabled(self, bEnabled):\n\t\tcall_sdk_function('PrlShare_SetEnabled', self.handle, bEnabled)", "def enable_screen_sharing(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_screen_sharing\")", "def force_switch_on(self):\n self.turn_on_modem()", "def set_host_sharing_enabled(self, bVmHostSharingEnabled):\n\t\tcall_sdk_function('PrlVmCfg_SetHostSharingEnabled', self.handle, bVmHostSharingEnabled)", "def set_contact_mechanisms(cls, records, name, value=None):\n Party = Pool().get('party.party')\n\n for record in records:\n Party.write([record.party], {'contact_mechanisms': value})", "def test_modify_phonebook(self):\n bt_contacts_utils.generate_contact_list(self.contacts_destination_path,\n PSE_CONTACTS_FILE, 100)\n phone_numbers_added = bt_contacts_utils.import_device_contacts_from_vcf(\n self.pse, self.contacts_destination_path, PSE_CONTACTS_FILE)\n if not self.connect_and_verify(phone_numbers_added):\n return False\n\n bt_contacts_utils.erase_contacts(self.pse)\n bt_contacts_utils.generate_contact_list(self.contacts_destination_path,\n PSE_CONTACTS_FILE, 110, 2)\n phone_numbers_added = bt_contacts_utils.import_device_contacts_from_vcf(\n self.pse, self.contacts_destination_path, PSE_CONTACTS_FILE)\n return self.connect_and_verify(phone_numbers_added)", "def send_sms_via_modem(self, mobile, text=\"\"):\n\n mobile = self.sanitise_phone(mobile)\n\n # Add '+' before country code\n mobile = \"+\" + mobile\n\n try:\n self.modem.send_sms(mobile, text)\n return True\n except:\n return False", "def share_contact(self, name, sender_email):\n contact = self.pull_one_contact(name)[0]\n \n from_email = \"[email protected]\"\n from_password = \"INST326Final\" \n the_name = contact[0]\n number = contact[1]\n email = contact[2]\n zipcode = contact[3]\n \n message = f\"\"\"Subject:New shared contact! \\n\n Name: {the_name},\\n \n Number: {number},\\n\n Email: {email},\\n\n Zip Code: {zipcode} \n \"\"\" \n \n context = ssl.create_default_context()\n with smtplib.SMTP_SSL(\"smtp.gmail.com\", 465, context=context) as server:\n server.login(from_email, from_password)\n server.sendmail(from_email, sender_email, message)\n print(f\"\"\"The contact for {name} has been sent to {sender_email}.\\n\n They may have to check their junk folder.\"\"\")", "def enable_mobile(self, android_package: str = None, android_activity: str = None, device_serial: str = None):\n if not android_package:\n raise AttributeError(\"android_package must be passed in\")\n self.mobile_options = {\n \"androidPackage\": android_package\n }\n if android_activity:\n self.mobile_options[\"androidActivity\"] = android_activity\n if device_serial:\n self.mobile_options[\"androidDeviceSerial\"] = device_serial", "def enable_receiver(self):\n self.set_receiver(True)", "def support_contacts(self, support_contacts):\n self._support_contacts = support_contacts", "def common_contact_authorization(dialersetting, str_contact):\n whitelist = dialersetting.whitelist\n blacklist = dialersetting.blacklist\n\n if whitelist == '*':\n whitelist = ''\n if blacklist == '*':\n blacklist = ''\n\n if whitelist and len(whitelist) > 0:\n try:\n result = re.search(whitelist, str_contact)\n if result:\n return True\n except ValueError:\n logger.error('Error to identify the whitelist')\n\n if blacklist and len(blacklist) > 0:\n try:\n result = re.search(blacklist, str_contact)\n if result:\n return False\n except ValueError:\n logger.error('Error to identify the blacklist')\n\n return True", "def enable_call_forwarding_busy(self, dtmf_code: str, number: str) -> None:", "def share():\n return True", "def initialize(self):\n self.ros.enable()\n self.phone_link.enable()", "def block_contacts(self, contacts):\n self._post('contact_actions', None, self._build_params(contacts=contacts, action='block'))", "def enable_radio(self):\n self.acquire_response(b'AT*R1')", "def pair(self, phone, companion_app=True):\n bt_util = BTUtils()\n target_addr = self.dut.bluetooth_address\n if bt_util.android_device_in_connected_state(phone, target_addr):\n self.logger.info('Already paired and connected, skipping pairing.')\n else:\n if bt_util.android_device_in_paired_state(phone, target_addr):\n self.logger.info(\n 'Device is paired but not connected, unpair first.')\n if not bt_util.bt_unpair(phone, self.dut):\n raise TestActsError('Unable to unpair the device')\n bt_util.bt_pair_and_connect(phone, self.dut)\n self.logger.info('DEVICE PAIRED')\n if companion_app:\n profiles = PROFILES_CONNECTED.copy()\n profiles.update(COMP_PROFILE_CONNECTED)\n else:\n profiles = PROFILES_CONNECTED\n self.logger.info(profiles)\n if not bt_util.check_device_bt(device=self.dut, profiles=profiles):\n raise TestActsError('Dut BT status check failed.')\n else:\n return True", "def enable_acm_fullaccess(self):\n self._request({\"enable-acm-fullaccess\": True})", "def is_enabled(self):\n\t\treturn bool(call_sdk_function('PrlShare_IsEnabled', self.handle))", "def set_raw_contact(self, value: Atoms):\n self._raw_contact = value" ]
[ "0.622553", "0.5562689", "0.5413457", "0.5413457", "0.53648555", "0.52825093", "0.5253237", "0.5244934", "0.52176595", "0.5212403", "0.5204729", "0.5198043", "0.51253", "0.51239824", "0.5117424", "0.5071274", "0.5070263", "0.50200707", "0.5019455", "0.50189924", "0.50056666", "0.4983985", "0.49617106", "0.49508637", "0.49433395", "0.4930417", "0.48817414", "0.48763537", "0.48680502", "0.4866542" ]
0.7482193
0
Set Call Routing option on iOS Mobile device ex. phone_obj.bluetooth.select_call_audio_routing('automatic')
def select_call_audio_routing(self, target_type): is_action_performed = False try: call_audio_routing_status = self.get_audio_call_routing_status() if target_type.lower() not in call_audio_routing_status.lower(): if not self._is_audio_call_routing_option_availalble(): self._go_to_audio_call_routing_option() self.find_element(self.driver.appium_driver, 'self.ios_locators.CALL_AUDIO_ROUTING_BUTTON_ByXPATH').click() logger.debug("Clicked on Call Audio Routing button") call_routing_options_list = self.find_elements( self.driver.appium_driver, 'self.ios_locators.OPTION_CALL_ROUTING_BUTTON_ByXPATH') for call_routing_option in call_routing_options_list: if target_type.lower() in call_routing_option.text.lower(): call_routing_option.click() logger.debug( "Audio Call Routing Option {} is Selected ".format( target_type)) is_action_performed = True self.find_element(self.driver.appium_driver, 'self.ios_locators.ACCESSIBILITY_BUTTON_ByID', 0).click() break self.wait_till_element_to_be_visible(self.driver.appium_driver, 'self.ios_locators.GENERAL_BUTTON_ByID', 5) self.find_element(self.driver.appium_driver, 'self.ios_locators.GENERAL_BUTTON_ByID').click() self.wait_till_element_to_be_visible(self.driver.appium_driver, 'self.ios_locators.SETTING_BUTTON_ByID', 5) self.find_element(self.driver.appium_driver, 'self.ios_locators.SETTING_BUTTON_ByID', 0).click() except Exception as e: logger.error( "Exception occurred while setting call routing mode {} " "".format( target_type)) logger.error(repr(e)) return is_action_performed
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enable_call_forwarding_busy(self, dtmf_code: str, number: str) -> None:", "def set_routing(self, rinfo):\n\n self.routing = [ self.Routing(*r) for r in rinfo ]", "def phone_config(self, sip_server: str = \"\") -> None:", "def call_home_phone(self, sender, message):\n if self.home_phone:\n phone_call.call(sender, message, self.home_phone)", "def call_cell_phone(self, sender, message):\n if self.cell_phone:\n phone_call.call(sender, message, self.cell_phone)", "def route(self, rule: str, **option: Any) -> None:\n pass", "def route(self):\n self.route_gnd()\n self.route_vdd()\n self.route_access_tx()", "def call(self, callee: \"SIPPhoneTemplate\") -> None:", "def route(self, rule: str, **option: Any) -> None:\n self._application.route(rule, **option)", "def enable_radio(self):\n self.acquire_response(b'AT*R1')", "def _initiate(self, call):\n if not self.gsm_call:\n raise Exception(\"No connectivity\")\n number = str(call.number)\n logger.info(\"initiate call to %s\", number)\n call_id = yield WaitDBus(self.gsm_call.Initiate, number, \"voice\")\n call_id = int(call_id)\n logger.info(\"call id : %d\", call_id)\n self.lines[call_id] = call\n # TODO: mabe not good idea to store this in the call itself,\n # beside, it makes pylint upset.\n call.__id = call_id", "def setRouterOptions(self, options):\n pass", "def disable_call_forwarding_busy(self, dtmf_code: str) -> None:", "def antenna_set(self):", "def changeRingSetting(self):\n #Input code to accommodate function of Ring setting", "def setPhone(self,phone):\r\n self.phone = phone", "def phone_start(self) -> None:", "def SetWiredAutoConnectMethod(self, method):\n config = ConfigParser.ConfigParser()\n config.read(self.app_conf)\n config.set(\"Settings\",\"wired_connect_mode\", int(method))\n config.write(open(self.app_conf, \"w\"))\n self.wired_connect_mode = int(method)", "def set_addressing_mode(mode):\n send_command(0x20)\n send_command(mode)", "def setHostRoute( self, ip, intf ):\n return self.cmd( 'route add -host ' + ip + ' dev ' + intf )", "def configure_route(self, *args, **kwargs):\n if \"neighbor\" in kwargs:\n kwargs['handle'] = \"$\" + kwargs.pop(\"neighbor\")\n self.ixia.ixia_emulation_bgp_route_config(*args, **kwargs)\n assert self.ixia.check_return_code() == \"\"\n\n # Get IxNet port name and neighbor id from handler name\n port = tuple([int(x) for x in kwargs['handle'].split(\"_\")[-4:-1]])\n _port = \"_\".join(map(str, port))\n neighbor_id = kwargs['handle'].split(\"_\")[-1]\n\n if \"r_handler\" not in self.bgp_dict:\n self.bgp_dict[port]['r_handler'] = {}\n\n # Create bgp routers handles list\n # return_code example:\n # {bgp_routes {::ixNet::OBJ-/vport:1/protocols/bgp/neighborRange:2/routeRange:3\n # ::ixNet::OBJ-/vport:1/protocols/bgp/neighborRange:2/routeRange:4} }\n # {status 1}\n _rlist = self.ixia.tcl(\"keylget return_code bgp_routes\")\n _rlist = _rlist.split(\" \")\n for item in _rlist:\n _id = item.split(\":\")[-1]\n _index = _rlist.index(item)\n self.bgp_dict[port]['r_handler'][_id] = \"bgp_routes_{0}_n{1}_{2}\".format(_port, neighbor_id, _id)\n self.ixia.set_var(**{self.bgp_dict[port]['r_handler'][_id]: \"[lindex [keylget return_code bgp_routes] {0}]\".format(_index)})\n\n return self.bgp_dict[port]['r_handler'].copy()", "def patch_config_flow_modem():\n return patch(\n \"homeassistant.components.modem_callerid.config_flow.PhoneModem.test\",\n )", "def setup_fan():\n global dev_fan\n dev_fan = iot_fan.Fan(config.option('pin_name', 'Fan'))\n fan_init()", "def phonecall():\n phone_number = choice(phone_numbers)\n r = twiml.Response()\n r.dial(phone_number)\n return str(r)", "def setMatrixRouting(self, inChannel, outChannel, state=1, inGroup=\"I\",\n outGroup=\"O\", unitCode=0):\n res = self.XAPCommand(\"MTRX\",inChannel, inGroup,\n outChannel, outGroup, state, unitCode=unitCode)\n return res", "def initialize(self):\n self.ros.enable()\n self.phone_link.enable()", "def phone(self, phone):\n\n self._phone = phone", "def phone(self, phone):\n\n self._phone = phone", "def phone(self, phone):\n\n self._phone = phone", "def phone(self, phone):\n\n self._phone = phone" ]
[ "0.5423326", "0.5412785", "0.53772193", "0.53401875", "0.5183472", "0.5178245", "0.5069476", "0.5050856", "0.5033115", "0.5030598", "0.49928564", "0.4977638", "0.49533805", "0.4948706", "0.4892624", "0.48704916", "0.48552707", "0.4847174", "0.47952464", "0.47616562", "0.4719147", "0.47132188", "0.47062638", "0.47002783", "0.46967846", "0.4696353", "0.46925485", "0.46925485", "0.46925485", "0.46925485" ]
0.6607859
0
Enable/Disable Media sharing on Mobile device ex. phone_obj.bluetooth.enable_disable_media_sharing('KLEOS',True)
def enable_disable_media_sharing(self, dut_name, enable=True): try: is_bluetooth_button__visible = self.__verify_current_screen() if not is_bluetooth_button__visible: self._go_to_bluetooth_button() self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_connected_device_list, 20) device_name = self.find_elements(self.driver.appium_driver, self.bluetooth_connected_device_list, 0) for index in range(len(device_name)): if self.is_same_bluetooth_name(device_name[index], dut_name): if (self.phone_info.os_version.startswith('11')) or (self.phone_info.os_version.startswith('8')): more_info = self.find_elements( self.driver.appium_driver, self.bluetooth_device_setting_button, 0) more_info[index].click() else: self.find_element(self.driver.appium_driver, self.bluetooth_settings_button, 1).click() break self.wait_till_element_to_be_visible(self.driver.appium_driver, self.media_sharing_switch, 10) media_sharing_element = self.find_element( self.driver.appium_driver, self.media_sharing_switch, 0) # Now click that button if we're in the wrong state. is_media_sharing_action_performed = self._toggle_switch( media_sharing_element, enable) logger.debug( "Media Sharing option is set to {} on connected bluetooth devices {}".format( enable, dut_name)) return is_media_sharing_action_performed except Exception as e: logger.warning( "Could not enable/disable media sharing on connected mobile devices" .format(self.phone_info.bluetooth_name)) logger.warning(repr(e)) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enable_contact_sharing(self, dut_name, enable=False):\n try:\n is_device_connected = self.bt_is_connected_to(dut_name)\n if is_device_connected is False:\n logger.warning(\n \"Currently no bluetooth device is connected with\"\n \" {}\".format(self.phone_info.bluetooth_name))\n return False\n\n # self.bt_connect(dut_name)\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 10)\n connected_device_list = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 1)\n\n for index in range(len(connected_device_list)):\n if self.is_same_bluetooth_name(connected_device_list[index],\n dut_name):\n if (self.phone_info.os_version.startswith('11')) or (self.phone_info.os_version.startswith('8')) \\\n or (self.phone_info.os_version.startswith('9')) or self.phone_info.os_version.startswith('10'):\n more_info = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_device_setting_button, 0)\n more_info[index].click()\n else:\n self.find_element(self.driver.appium_driver,\n self.bluetooth_settings_button,\n 1).click()\n break\n if self.phone_info.phone_type == PhoneType.ANDROID:\n if '8.1' in self.phone_info.os_version or 'S8' in self.phone_info.bluetooth_name or \\\n (self.phone_info.os_version.startswith('9')) or (self.phone_info.os_version.startswith('10')):\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.contact_sharing_button_in_android_8_1_switch, 20)\n contact_sharing_status = self.find_element(\n self.driver.appium_driver,\n self.contact_sharing_button_in_android_8_1_switch,\n 0)\n else:\n self.wait_till_element_to_be_visible(\n self.driver.appium_driver,\n self.contact_sharing_checkbox, 20)\n contact_sharing_status = self.find_element(\n self.driver.appium_driver,\n self.contact_sharing_checkbox, 0).\\\n get_attribute(\"checked\")\n\n # Now click that button if we're in the wrong state.\n\n switch_is_on = self.is_switch_on(contact_sharing_status)\n if enable != switch_is_on:\n logger.debug(\n \"For phone {!r}, found that Switch is currently {}.\"\n \"Toggling so will be {}.\".format(\n self.phone_info.bluetooth_name,\n \"Enable\" if switch_is_on else \"Disable\",\n \"Enable\" if enable else \"Disable\",\n ))\n self.find_element(self.driver.appium_driver,\n self.contact_sharing_button, 0).click()\n self.driver.appium_driver.back()\n self.bt_disconnect(dut_name)\n self._go_to_connected_device_screen(1)\n self.bt_connect(dut_name, perform_unpair=False)\n\n else:\n logger.debug(\n \"For phone {!r}, found that Switch is already {}.\"\n \"Leaving it that way.\".format(\n self.phone_info.bluetooth_name,\n \"ON\" if switch_is_on else \"OFF\",\n ))\n\n return True\n except Exception as e:\n logger.warning(\n \"Could not enable/disable contact sharing on connected \"\n \"mobile devices {}\"\n .format(self.phone_info.bluetooth_name))\n logger.warning(repr(e))\n return False", "def set_media_volume_sync(self, dut_name, enable=True):\n try:\n if self.phone_info.phone_type == PhoneType.ANDROID and 'SM' in self._get_android_phone_model():\n is_bt_connected_to_device = self.bt_is_connected_to(dut_name)\n if not is_bt_connected_to_device:\n logger.debug(\n 'For phone found that DUT {} is not connected with {} , '\n 'So Media Volume Sync option is not available '.format(\n dut_name,\n self.phone_info.bluetooth_name))\n return False\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_more_options,\n 5)\n self.find_element(self.driver.appium_driver,\n self.bluetooth_more_options, 0).click()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_volume_text,\n 10)\n self.find_element(self.driver.appium_driver,\n self.media_volume_text, 0).click()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_volume_sync_switch,\n 10)\n\n volume_sync_switch = self.find_element(\n self.driver.appium_driver, self.media_volume_sync_switch,\n 0)\n\n # Now click that button if we're in the wrong state.\n is_media_volume_sync = self._toggle_switch(volume_sync_switch,\n enable)\n self.driver.appium_driver.back()\n logger.debug(\n \"Media Volume option is set to {} on connected bluetooth devices {}\".format(\n enable, dut_name))\n return is_media_volume_sync\n logger.warning(\n \"Media Volume Sync Option is not available on {} connected bluetooth devices\".format(\n self.phone_info.bluetooth_name))\n except Exception as e:\n logger.warning(\n \"Could not enable/disable Media Volume Sync on connected mobile devices {}\"\n .format(self.phone_info.bluetooth_name))\n logger.warning(repr(e))\n return False", "def enable_screen_sharing(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_screen_sharing\")", "def set_toggle_devices_enabled(self, track, xclip, ident, value = None):\n for device in track.devices:\n if(hasattr(device, 'parameters')):\n self._parent._device_actions.set_device_on_off(device, track, xclip, ident);", "def disable_mute(self):\n self.mute = False", "def is_screenshare(self, is_screenshare):\n \n self._is_screenshare = is_screenshare", "def canShare(self):\n return False", "def enable_media_cards(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_media_cards\")", "def setSafetyMute(self, isEnabled, unitCode=0):\n resp = self.XAPCommand('SFTYMUTE', (1 if isEnabled else 0), unitCode=unitCode)\n return bool(int(resp))", "def _mute_toggle(self, device_path):\n device = self.pulse_bus.get_object(\"org.PulseAudio.Core1.Device\",\n device_path)\n old_mute = bool(device.Get(\"org.PulseAudio.Core1.Device\", \"Mute\"))\n device.Set(\"org.PulseAudio.Core1.Device\", \"Mute\", dbus.Boolean(not old_mute, variant_level=1))\n\n self._mute_update_handler(new_mute=device.Get(\"org.PulseAudio.Core1.Device\", \"Mute\"),\n dev_path=device_path)", "def set_enabled(self, bEnabled):\n\t\tcall_sdk_function('PrlShare_SetEnabled', self.handle, bEnabled)", "def disable_radio(self):\n self.acquire_response(b'AT*R0')", "def send_mute(self, dev, action=None):\n if action not in ['true', 'false', None]:\n action = None\n\n with self.smqtt as client:\n client.publish(\n IOTLinkTopics.MUTE.format(\n domain=dev['domain'],\n device_name=dev['name']\n ),\n payload=action\n )", "def EnableBroadcast(self) -> bool:", "def EnableBroadcast(self) -> bool:", "def is_screenshare(self):\n return self._is_screenshare", "async def toggle(self, ctx):\r\n serverid = ctx.message.server.id\r\n if self.adkillr[serverid]['toggle'] is True:\r\n self.adkillr[serverid]['toggle'] = False\r\n e = discord.Embed(description='**AntiAdv is now disabled.**')\r\n await self.bot.say(embed=e)\r\n elif self.adkillr[serverid]['toggle'] is False:\r\n self.adkillr[serverid]['toggle'] = True\r\n e = discord.Embed(description='**AntiAdv is now enabled.**')\r\n await self.bot.say(embed=e)\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)", "def toggle_audio_feedback(self, enabled):\r\n self.config.audio_feedback = enabled", "def uses_media(self):\n return True", "def mute():\n request_command(tv_command=TVCommand.mute)", "def dnt_share_app():\r\n msg, status = \"\", True\r\n try:\r\n\r\n 'Click on Do not share button'\r\n flag1 = ui_controls.button(get_obj_identifier('a'))\r\n #flag2 = ui_controls.button(get_obj_identifier('share_dontShare_btn'))\r\n \r\n \r\n\r\n status = False if not(flag1) else True\r\n except Exception as excp:\r\n traceback.print_exc()\r\n msg += str(excp)\r\n status = False\r\n return status, msg", "def handle_mic_mute(_):\n loop.mute()", "async def async_turn_on(self, **kwargs: Any) -> None:\n\n self._previous_mic_level = self.device.mic_volume\n self._previous_record_mode = self.device.recording_settings.mode\n await self.device.set_privacy(True, 0, RecordingMode.NEVER)", "def set_host_sharing_enabled(self, bVmHostSharingEnabled):\n\t\tcall_sdk_function('PrlVmCfg_SetHostSharingEnabled', self.handle, bVmHostSharingEnabled)", "def getSafetyMute(self, unitCode=0):\n resp = self.XAPCommand('SFTYMUTE', unitCode=unitCode)\n return bool(int(resp))", "def setAllowScreenReaders(self,value):\n self.PDFreactorConfiguration.in1[\"allowScreenReaders\"] = value", "def mute(self) -> None:\n self.system.notify(\"Jarvis::Muted\")\n self.media.mute(True)", "def force_switch_on(self):\n self.turn_on_modem()", "def mute(self, value: bool) -> None:\n self.audio_mixer.setmute(value)\n self.update_status()", "def toggle_mute(cls) -> bool:\n raise NotImplementedError" ]
[ "0.62880474", "0.59915227", "0.5709176", "0.56337523", "0.557575", "0.55689085", "0.55470765", "0.5503501", "0.54586434", "0.54444385", "0.54250693", "0.53110087", "0.5295516", "0.5264291", "0.5264291", "0.5259133", "0.5258376", "0.52491593", "0.5245353", "0.51939344", "0.5127555", "0.5122233", "0.51071376", "0.50788313", "0.502889", "0.501074", "0.500108", "0.4990796", "0.49696076", "0.49605903" ]
0.76120937
0
Enable/Disable Media Sync Volume on Mobile device ex. phone_obj.bluetooth.set_media_volume_sync(enable=True)
def set_media_volume_sync(self, dut_name, enable=True): try: if self.phone_info.phone_type == PhoneType.ANDROID and 'SM' in self._get_android_phone_model(): is_bt_connected_to_device = self.bt_is_connected_to(dut_name) if not is_bt_connected_to_device: logger.debug( 'For phone found that DUT {} is not connected with {} , ' 'So Media Volume Sync option is not available '.format( dut_name, self.phone_info.bluetooth_name)) return False self.wait_till_element_to_be_visible(self.driver.appium_driver, self.bluetooth_more_options, 5) self.find_element(self.driver.appium_driver, self.bluetooth_more_options, 0).click() self.wait_till_element_to_be_visible(self.driver.appium_driver, self.media_volume_text, 10) self.find_element(self.driver.appium_driver, self.media_volume_text, 0).click() self.wait_till_element_to_be_visible(self.driver.appium_driver, self.media_volume_sync_switch, 10) volume_sync_switch = self.find_element( self.driver.appium_driver, self.media_volume_sync_switch, 0) # Now click that button if we're in the wrong state. is_media_volume_sync = self._toggle_switch(volume_sync_switch, enable) self.driver.appium_driver.back() logger.debug( "Media Volume option is set to {} on connected bluetooth devices {}".format( enable, dut_name)) return is_media_volume_sync logger.warning( "Media Volume Sync Option is not available on {} connected bluetooth devices".format( self.phone_info.bluetooth_name)) except Exception as e: logger.warning( "Could not enable/disable Media Volume Sync on connected mobile devices {}" .format(self.phone_info.bluetooth_name)) logger.warning(repr(e)) return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def enable_disable_media_sharing(self, dut_name, enable=True):\n try:\n is_bluetooth_button__visible = self.__verify_current_screen()\n if not is_bluetooth_button__visible:\n self._go_to_bluetooth_button()\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 20)\n device_name = self.find_elements(self.driver.appium_driver,\n self.bluetooth_connected_device_list,\n 0)\n\n for index in range(len(device_name)):\n if self.is_same_bluetooth_name(device_name[index], dut_name):\n if (self.phone_info.os_version.startswith('11')) or (self.phone_info.os_version.startswith('8')):\n more_info = self.find_elements(\n self.driver.appium_driver,\n self.bluetooth_device_setting_button, 0)\n more_info[index].click()\n else:\n self.find_element(self.driver.appium_driver,\n self.bluetooth_settings_button,\n 1).click()\n break\n\n self.wait_till_element_to_be_visible(self.driver.appium_driver,\n self.media_sharing_switch, 10)\n media_sharing_element = self.find_element(\n self.driver.appium_driver, self.media_sharing_switch, 0)\n\n # Now click that button if we're in the wrong state.\n is_media_sharing_action_performed = self._toggle_switch(\n media_sharing_element, enable)\n logger.debug(\n \"Media Sharing option is set to {} on connected bluetooth devices {}\".format(\n enable, dut_name))\n return is_media_sharing_action_performed\n except Exception as e:\n logger.warning(\n \"Could not enable/disable media sharing on connected mobile devices\"\n .format(self.phone_info.bluetooth_name))\n logger.warning(repr(e))\n return False", "def _mute_toggle(self, device_path):\n device = self.pulse_bus.get_object(\"org.PulseAudio.Core1.Device\",\n device_path)\n old_mute = bool(device.Get(\"org.PulseAudio.Core1.Device\", \"Mute\"))\n device.Set(\"org.PulseAudio.Core1.Device\", \"Mute\", dbus.Boolean(not old_mute, variant_level=1))\n\n self._mute_update_handler(new_mute=device.Get(\"org.PulseAudio.Core1.Device\", \"Mute\"),\n dev_path=device_path)", "async def async_toggle(self):\n await self.async_mute_volume(not self._muted)", "async def async_turn_on(self):\n await self.async_mute_volume(False)", "async def async_mute_volume(self, mute: bool) -> None:\n await self._client.set_muted(mute)\n self.async_write_ha_state()", "def mute_volume(self, mute):\n mute_numeric = '1' if mute else '0'\n self._lms.query(self._id, 'mixer', 'muting', mute_numeric)\n self.update_ha_state()", "async def async_turn_on(self, **kwargs: Any) -> None:\n\n self._previous_mic_level = self.device.mic_volume\n self._previous_record_mode = self.device.recording_settings.mode\n await self.device.set_privacy(True, 0, RecordingMode.NEVER)", "def _volume(call: ServiceCall) -> None:\n mute_key_mapping = {\n ATTR_TOGGLE: KEY_MUTE_TOGGLE,\n ATTR_ON: KEY_MUTE_ON,\n ATTR_OFF: KEY_MUTE_OFF,\n }\n for cmd, att in call.data.items():\n if cmd == CMD_UP:\n _process_volume(KEY_VOLUME_UP, att)\n elif cmd == CMD_DOWN:\n _process_volume(KEY_VOLUME_DOWN, att)\n elif cmd == CMD_MUTE:\n hdmi_network.send_command(\n KeyPressCommand(mute_key_mapping[att], dst=ADDR_AUDIOSYSTEM)\n )\n hdmi_network.send_command(KeyReleaseCommand(dst=ADDR_AUDIOSYSTEM))\n _LOGGER.info(\"Audio muted\")\n else:\n _LOGGER.warning(\"Unknown command %s\", cmd)", "async def async_mute_volume(self, mute: bool) -> None:\n if mute:\n await self._volumio.mute()\n else:\n await self._volumio.unmute()", "async def async_mute_volume(self, mute):\n if not (self._slave_mode and self._multiroom_wifidirect):\n if self._is_master:\n value = await self.async_call_linkplay_httpapi(\"setPlayerCmd:slave_mute:{0}\".format(str(int(mute))), None)\n else:\n value = await self.async_call_linkplay_httpapi(\"setPlayerCmd:mute:{0}\".format(str(int(mute))), None)\n \n if value == \"OK\":\n self._muted = bool(int(mute))\n else:\n _LOGGER.warning(\"Failed mute/unmute volume. Device: %s, Got response: %s\", self.entity_id, value)\n else:\n value = await self._master.async_call_linkplay_httpapi(\"multiroom:SlaveVolume:{0}:{1}\".format(self._slave_ip, str(int(mute))), None)\n if value == \"OK\":\n self._muted = bool(int(mute))\n else:\n _LOGGER.warning(\"Failed mute/unmute volume. Device: %s, Got response: %s\", self.entity_id, value)", "def mute(self, status=None):\n if status is None:\n status = not self.status.volume_muted\n\n self._socket_client.receiver_controller.set_volume_muted(status)", "def set_volume(self):\n import fcntl\n import struct\n try:\n knob = struct.pack(\"III\", 0, 0, self.volume) # VOLUME_DEVICE_ID, VOLUME_KNOB_ID, volume_level\n fcntl.ioctl(self.mixer_fd, 3, knob)\n except:\n pass", "async def async_mute_volume(self, mute: bool) -> None:\n await self._group.set_muted(mute)\n self.async_write_ha_state()", "async def async_mute_volume(self, mute: bool) -> None:\n await self._projector.send_command(MUTE)", "def mute(self, status=None):\n if status is not None:\n st = status\n else:\n st = not status.volume_muted\n\n self._socket_client.receiver_controller.set_volume_muted(st)", "def mute_volume(self, mute: bool) -> None:\n self._monoprice.set_mute(self._zone_id, mute)", "def mute(self, value: bool) -> None:\n self.audio_mixer.setmute(value)\n self.update_status()", "def setSafetyMute(self, isEnabled, unitCode=0):\n resp = self.XAPCommand('SFTYMUTE', (1 if isEnabled else 0), unitCode=unitCode)\n return bool(int(resp))", "def setVolume(self):\n\n self.mediaplayer.audio_set_volume(self.volumeslider.value())", "def mute():\n request_command(tv_command=TVCommand.mute)", "def set_volume_music(self, value):\n\t\tif self._setting.get(FIFE_MODULE, \"PlaySounds\"):\n\t\t\tself.emitter['bgsound'].setGain(value)", "def on_set_volume(self, event):\n self.currentVolume = self.volumeCtrl.GetValue()\n self.mplayer.SetProperty(\"volume\", self.currentVolume)", "def setSyncMode(self, IsPauseOn = True):\n self._IsPauseOn = IsPauseOn", "async def async_set_muted(self, mute):\n self._muted = mute", "def set_volume(self, percent):\n self._socket_client.receiver_controller.set_volume(float(percent / 100))", "def _enable_sync(self, enable_sync: bool = True):\n self.__enable_sync = enable_sync", "def set_volume_level(self, volume):\n self._device.set_volume(mute=False, volume=int(volume * 100))\n self._volume = volume", "async def async_turn_off(self):\n await self.async_mute_volume(True)", "def enable_vmedia(self, set_vmedia_state):\n\n if not isinstance(set_vmedia_state, bool):\n msg = ('The parameter \"%(parameter)s\" value \"%(value)s\" for '\n 'vmedia is invalid. Valid values are: True/False.' %\n {'parameter': 'ServiceEnabled',\n 'value': set_vmedia_state})\n raise exception.InvalidInputError(msg)\n sushy_system = self._get_sushy_system()\n sdflex_virtual_media.VirtualMedia.enable_vmedia(sushy_system,\n set_vmedia_state)", "def OnToggleVolume(self, evt):\r\n is_mute = self.player.audio_get_mute()\r\n\r\n self.player.audio_set_mute(not is_mute)\r\n # update the volume slider;\r\n # since vlc volume range is in [0, 200],\r\n # and our volume slider has range [0, 100], just divide by 2.\r\n self.volume_var.set(self.player.audio_get_volume())" ]
[ "0.65493476", "0.63896316", "0.6337036", "0.62550724", "0.6208844", "0.6174221", "0.6143452", "0.6109035", "0.60698545", "0.60579324", "0.6046454", "0.6025239", "0.60240513", "0.6016948", "0.6011351", "0.59646314", "0.5930123", "0.5896877", "0.5860657", "0.58456665", "0.57859236", "0.5756996", "0.57409203", "0.5723778", "0.5718346", "0.57153124", "0.5687984", "0.5675387", "0.56670433", "0.5646532" ]
0.81021565
0
Returns all the rects that ants have updated on screen and clears update array. Used for screen update optimization
def GetRectUpdates(cls): temp = [] temp += cls.updateArray cls.updateArray.clear() return temp
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def refresh_statuses(self):\n if self.skill_tree_displaying:\n return\n _, self.buff_rects, self.debuff_rects = player_panel_renderer.draw_status(self.player_dict['status']['buffs'],\n self.player_dict['status']['debuffs'],\n refresh=True)", "def __update(self):\n for b in self.__borders:\n b.redraw()\n\n for w in self.__allWins:\n w.refresh()", "def update_rect(self):\n self._update_tiles()", "def redrawAll(self):\n self.canvas.delete(ALL)\n self.gridBG = []\n self.gridBusy = []\n for row in range(self.rows):\n self.gridBG.append([])\n self.gridBusy.append([])\n for col in range(self.cols):\n self.gridBG[row].append(self.drawCell(row, col, self.colors['idle'], bgPattern=\"gray75\"))\n self.gridBusy[row].append(0)\n for row in range(self.rows + 1):\n self.canvas.create_line(\n self.margin,\n self.margin + row * self.cellSize,\n self.margin + self.cols * self.cellSize,\n self.margin + row * self.cellSize,\n dash=(self.dashBlack, self.dashWhite)\n )\n for col in range(self.cols + 1):\n self.canvas.create_line(\n self.margin + col * self.cellSize,\n self.margin,\n self.margin + col * self.cellSize,\n self.margin + self.rows * self.cellSize,\n dash=(self.dashBlack, self.dashWhite)\n )", "def getDisplayRects():\n\tHANDLE_MONITOR, HDC_MONITOR, SCREEN_RECT = list(range(3))\n\n\t# My experiments show this needs to be no more than 3 (for 4 iterations\n\t# through the loop), but use 150 in case there are pathological systems.\n\t# Note that one iteration of the loop takes about 90us on a Q6600.\n\ttries = 150\n\tlastRects = None\n\tfor _ in range(tries):\n\t\ttry:\n\t\t\tmonitors = win32api.EnumDisplayMonitors(None, None)\n\t\texcept SystemError:\n\t\t\t# If you are changing your monitor configuration while EnumDisplayMonitors\n\t\t\t# is enumerating the displays, it may throw SystemError. We just try\n\t\t\t# again in this case.\n\t\t\tlastRects = None\n\t\telse:\n\t\t\tfor m in monitors:\n\t\t\t\tm[HDC_MONITOR].Close()\n\t\t\trects = list(m[SCREEN_RECT] for m in monitors)\n\t\t\ttry:\n\t\t\t\tfor rect in rects:\n\t\t\t\t\tcheckRect(rect)\n\t\t\texcept ValueError:\n\t\t\t\tlastRects = None\n\t\t\telse:\n\t\t\t\tif rects == lastRects:\n\t\t\t\t\treturn rects\n\t\t\t\telse:\n\t\t\t\t\tlastRects = rects\n\n\traise RectFailed(\"Could not get stable rect information after %d tries; \"\n\t\t\"last was %r.\" % (tries, lastRects))", "def _update_detections(self, boxes_contours):\n used_boxes = np.zeros(len(boxes_contours), np.bool)\n if boxes_contours is None or len(boxes_contours) == 0:\n for detection in self.detections:\n detection.update(None)\n return used_boxes\n\n for detection in self.detections:\n rd = detection.relative_distance_with(boxes_contours)\n min_rd = rd.min()\n argmin_rd = rd.argmin()\n if min_rd < self.dist_thresh:\n if used_boxes[argmin_rd]:\n detection.is_hidden = True\n\n detection.update(boxes_contours[argmin_rd])\n used_boxes[argmin_rd] = True\n else:\n detection.update(None)\n\n return used_boxes", "def update_rect(self):\n self._update_vertex_list()", "def update_all_elements(self):\n self.screen.blit(self.background_image, (0, 0))\n self.pad_sprites.draw(self.screen)\n self.obstacle_sprites.draw(self.screen)\n self.meteor_sprites.update()\n self.meteor_sprites.draw(self.screen)\n self.player_sprite.update()\n self.player_sprite.draw(self.screen)\n if not self.lander.is_controllable():\n self.screen.blit(self.alert_instruments, (0, 0))\n self.show_on_screen(\"UNCONTROLLABLE\", (120, 82))\n elif self.lander_failure():\n self.screen.blit(self.alert_instruments, (0, 0))\n self.show_on_screen(\"Failure of \" + str(self.failure), (120, 82))\n else:\n self.screen.blit(self.instruments, (0, 0))\n self.update_lander_meters()", "def draw (self):\n screen = self.screen\n dirty = False\n for z, displays in self.layers.iteritems():\n for display in displays:\n drew = display.draw(screen)\n # if made changes to the surface\n if drew:\n # set any displays that overlap this one dirty\n for d in display.overlapped:\n d.dirty = True\n dirty |= drew\n return dirty", "def redrawAll(screen, data):\n data.groups.terrain.draw(screen)\n data.groups.walls.draw(screen)\n data.groups.player.draw(screen)\n data.groups.projectiles.draw(screen)\n data.groups.spawners.draw(screen)\n data.groups.monsters.draw(screen)\n data.groups.items.draw(screen)\n data.groups.ui.draw(screen)\n data.groups.damagedWalls.draw(screen)\n if data.screenUI != None:\n screen.blit(data.screenUI, (0, 0))", "def model_refresh(self):\n for x in range(self._dim):\n for y in range(self._dim):\n if self._board[x][y]:\n self.canvas.itemconfig(self.rect[y,x], fill=self._secondary_color)\n else:\n self.canvas.itemconfig(self.rect[y,x], fill=self._primary_color)", "def update_screen(self):\r\n\r\n # Redraw the screen during each pass through the loop.\r\n self._screen.fill(self._bb_settings.bg_color)\r\n\r\n # Redraw all markers around edge of board\r\n\r\n # Draw the play button if the game is inactive\r\n if self._stats.get_status() == \"Start_game\":\r\n for button in self._play_mode_button_list:\r\n button.draw_button()\r\n elif self._stats.get_status() == \"replay\":\r\n for button in self._replay_button_list:\r\n button.draw_button()\r\n else:\r\n self.blitme()\r\n shoot_markers = self.get_entry_exit()\r\n atom_markers = self.get_atom_guess()\r\n for marker in shoot_markers.values():\r\n marker[1].draw_marker()\r\n for atom in atom_markers.values():\r\n atom.draw_marker()\r\n # Make the most recently drawn screen visible.\r\n pygame.display.flip()", "def update(self):\n self.detections = []\n return self.detections", "def _update_bullets(self):\n # Remove bullets that have reached top of screen\n self.bullets.update() ## MOVED HERE !!!!!!\n \n for bullet in self.bullets.copy():\n if bullet.rect.bottom <= 0:\n self.bullets.remove(bullet)\n # print(len(self.bullets)) \n\n self._check_bullet_alien_collisions()", "def draw (self):\n screen = self.screen\n dirty = False\n for display in self.displays:\n dirty |= display.draw(screen)\n return dirty", "def _getredrawguarantee(self, skipclear=0):\n window = self._window\n for entry in self._list:\n cmd = entry[0]\n if cmd == 'clear' and self._bgcolor != None and not skipclear:\n r = Qd.NewRgn()\n Qd.RectRgn(r, window.qdrect())\n return r\n if cmd == 'image':\n xscrolloffset, yscrolloffset = window._scrolloffset()\n mask, image, srcx, srcy, coordinates, w, h, units = entry[1:]\n dstx, dsty = self._convert_coordinates(coordinates[:2], units=units)\n dstrect = dstx, dsty, dstx+w, dsty+h\n r = Qd.NewRgn()\n Qd.RectRgn(r, dstrect)\n return r\n return None", "def glclear(self):\n self.pixels = [\n [color(self.r, self.g, self.b) for x in range(self.width)]\n for y in range(self.height)\n ]", "def drawChanges(self):\n self.draw(wait=False)\n draw(self.values,color='yellow',bbox=None,clear=False,shrink=self.shrink)", "def _reset_cache(self):\n self._tick_positions = array([], dtype=float)\n self._tick_extents = array([], dtype=float)\n self._cache_valid = False\n return", "def update_visuals(self, hit_list):\n\n for y in range(self.r):\n for x in range(self.c):\n if hit_list[y][x] == 1: # Miss\n self.tiles[y][x].miss = True\n elif hit_list[y][x] == 2: # Hit\n self.tiles[y][x].hit = True\n else:\n # So that this can reset\n self.tiles[y][x].miss = False\n self.tiles[y][x].hit = False\n \n self.redraw()", "def draw_all(self, im, idx):\n if idx in self.frames_to_boxes_dict:\n self.draw_boxes(im, self.frames_to_boxes_dict[idx])\n\n hud_info = {\"bottles\": 0, \"fragments\":0, \"others\":0}\n if idx in self.frames_to_update_hud:\n hud_info = self.frames_to_update_hud[idx].copy()\n hud_info[\"update\"] = True\n else:\n # find the last hud info\n for x in self.frames_to_update_hud.keys():\n if idx < x:\n break\n hud_info = self.frames_to_update_hud[x]\n self.draw_hud(im, hud_info)", "def updateWorld(self):\n\t self.screen.clear()\n self.update()\n self.screen.refresh()", "def update_invalid(self):\n self.invalid = []\n for i in range(9):\n for j in range(9):\n if not self.check_if_locked((i, j)) and not self.check_entered((i, j)) and self.grid[i][j] != 0:\n self.invalid.append((i, j))", "def updateGlobal(self):\n state = self.getState()\n n = len(self.myPlotCanvasList)\n for i in range(n):\n if self.myPlotCanvasList[i] is not None:\n self.myPlotCanvasList[i].myUpdateGlobal(state)", "def clear_annotation(self):\n\n self.xValues = []\n self.yValues = []\n self.colors = []\n\n self.stop_video()", "def _clear(self):\n self._fillitem = self._fillpath = None\n for item in self.items:\n self.screen._delete(item)\n self.currentLineItem = self.screen._createline()\n self.currentLine = []\n if self._drawing:\n self.currentLine.append(self._position)\n self.items = [self.currentLineItem]\n self.clearstamps()", "def _update_boxes(self, x,y):\n\t\talloc = self.alloc2img()\n\t\t\n\t\tif not rect_contains(alloc, x,y):\n\t\t\t# The mouse has left the widget\n\t\t\tself._changed_rect = None\n\t\t\tself._boxes_under_cursor = []\n\t\t\treturn True\n\t\t\n\t\tif self._changed_rect is None or not rect_contains(self._changed_rect, x, y):\n\t\t\tif len(self.model) == 0: return False\n\t\t\t# The mouse left the common area\n#\t\t\tif __debug__: print '(%i,%i)' % (x,y),\n\t\t\t\n#\t\t\tif __debug__: print \"Old rect:\", tuple(self._changed_rect) if self._changed_rect is not None else self._changed_rect,\n\t\t\tself._changed_rect = None\n\t\t\t\t\n\t\t\t\n\t\t\t# Calculate new boxes\n\t\t\tnewboxes = self.find_boxes_under_coord(x,y)\n\t\t\tself._boxes_under_cursor = newboxes\n#\t\t\tif __debug__: print \"newboxes:\", newboxes,\n\t\t\t\n\t\t\t# Update the caching rectangle\n\t\t\tif len(newboxes):\n\t\t\t\tchanged = newboxes[0].rect\n\t\t\telse: # Outside of any boxes, use allocation\n\t\t\t\tchanged = alloc\n\t\t\tfor b in newboxes[1:]:\n\t\t\t\tchanged = changed.intersect(b.rect)\n\t\t\tfor r in self.model:\n\t\t\t\tb = r[self.box_col]\n\t\t\t\tif b not in newboxes:\n\t\t\t\t\tchanged = rect_diff(changed, b.rect, (x,y))\n\t\t\tif changed == alloc: # This is so extrodinarily BAD that we should test for it.\n\t\t\t\t# It's bad because if it were true, the cache would never clear\n\t\t\t\tfrom warnings import warn\n\t\t\t\twarn(\"The chosen change rect was the allocation. THIS SHOULD'T HAPPEN.\")\n\t\t\t\tchanged = None\n\t\t\tif __debug__: print \"Change rect:\", changed\n\t\t\tself._changed_rect = changed\n\t\t\tassert changed is None or rect_contains(changed, x,y)\n\t\t\tif __debug__: self.queue_draw()\n\t\t\treturn True\n\t\telse:\n\t\t\treturn False", "def refresh(self, data):\n for region in self.loaded_regions:\n self.regions[region].unload()\n super().clear()\n self.data = data\n self.loaded_regions = set()\n self.draw_visible_area()", "def _update_screen(self):\n self.screen.fill(self.bg_colour)\n\n if not self.waiting:\n self._check_cells()\n self._update_cells()\n for row in self.cells:\n for cell in row:\n cell.draw_cell()\n \n pygame.display.flip()", "def clear_complete_lines():\n global board\n\n\n nb = []\n fn = []\n for idl, line in enumerate(board):\n if 0 in line:\n # Not full\n nb.append(line)\n else:\n fn.append(idl)\n\n if fn:\n # Update the board information\n board = new_board_lines(len(fn)) + nb\n\n # clear\n d_line = [obj for obj in scene.objects if type(obj) is box and obj.y in fn]\n for _ in xrange(10):\n rate(20)\n for obj in d_line:\n obj.opacity -= 0.1\n for obj in d_line:\n obj.visible = 0\n\n\n # decline\n for n in fn:\n for obj in (obj for obj in scene.objects if type(obj) is box and obj.y < n):\n obj.y += 1\n\n return fn" ]
[ "0.6439309", "0.6139465", "0.6006498", "0.5894537", "0.5695805", "0.56834847", "0.56322706", "0.561175", "0.5580826", "0.55793214", "0.5508129", "0.54993623", "0.5488387", "0.5486736", "0.54347324", "0.5434304", "0.54333586", "0.5427185", "0.5426566", "0.54118574", "0.5411804", "0.54056007", "0.5405587", "0.53975517", "0.5393419", "0.5376119", "0.5372158", "0.5362839", "0.5352525", "0.53452337" ]
0.7783788
0
Kills all living ants within the given Rect
def KillAntsInRect(cls, givenRect=pygame.Rect(1,1,1,1)): assert type(givenRect) == pygame.Rect for a in cls.antArray: if givenRect.x+givenRect.w > a.x >= givenRect.x and givenRect.y+givenRect.h > a.y >= givenRect.y: a.isAlive = False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def die(self):\n self.pjs.bombermen.remove(self)\n for block in self.physics.blocks[self.stype]:\n if block == self.rects[0]:\n self.physics.blocks[self.stype].remove(block)", "def kill(self, enemys):\n for enemy in enemys:\n for bullet in self.bullets:\n if enemy.rect.colliderect(bullet.rect):\n enemy.take_damage(bullet.power)\n if(enemy.hp <= 0):\n enemys.remove(enemy)\n self.bullets.remove(bullet)", "def kill(self, coord):\n if self.chart[coord] == ALIVE:\n self.create_rectangle((coord[0] + 1) * UNIT, coord[1] * UNIT,\n coord[0] * UNIT, (coord[1] + 1) * UNIT,\n fill=DEATH, width=THICKNESS, outline='black')\n self.chart[coord] = DEAD", "def kill(self):\n for piece in self.board.pieces:\n piece.destroyed = True", "def _kill_group(self, x, y):\n if self[x, y] not in self.TURNS:\n raise BoardError('Can only kill black or white group')\n\n group = self.get_group(x, y)\n score = len(group)\n\n for x1, y1 in group:\n self[x1, y1] = self.EMPTY\n\n return score", "def check_enemies(self):\n for enemy in self.pjs.enemies:\n for block in enemy.rects:\n if block.overlap(self.rects[0]):\n self.killer = enemy\n return", "def remove(self, screen_rect):\n if not self.rect.colliderect(screen_rect):\n self.kill()", "def del_rectangles(image, rects):\n for r in rects:\n cv2.rectangle(image,\n (r.x, r.y),\n (r.x + r.w - 1, r.y + r.h - 1),\n color=0,\n thickness=-1)", "def check_bounds(self):\n for i, missile in enumerate(self.missile_list):\n if missile.out_of_bounds(self.world):\n del self.missile_list[i]\n self.gameevents.add(\"bounds_remove\", \"missile\")\n for i, shell in enumerate(self.shell_list):\n if shell.out_of_bounds(self.world):\n del self.shell_list[i]\n self.gameevents.add(\"bounds_remove\", \"shell\")", "def rotateRect(rectList, breakingBrick, itemsList, enemiesList, bullets, gunsList, points):\n X, Y, ENMYVX, ENMYVY, ENMYIDLE, ENMYINFLOOR = 0, 1, 4, 5, 6, 7\n GUNSTATE, GUNCOUNT, GUNTYPE = 4, 5, 6\n # Deleting any offscreen Rects\n for index in range(len(breakingBrick) - 1, -1, -1): # Going through all of the brick debris and deleting them once they exit below the screen\n if breakingBrick[index][1] > 600:\n del breakingBrick[index]\n for list in range(len(itemsList)): # Going through all of the items and deleting them once they scroll off the screen\n for item in range(len(itemsList[list]) - 1, -1, -1):\n if itemsList[list][item][0] < -300 or itemsList[list][item][1] > 650:\n del itemsList[list][item]\n for list in range(len(rectList)): # Going through all of the rects and deleting them once they scroll off the screen\n for rect in range(len(rectList[list]) - 1, -1, -1):\n if rectList[list][rect][0] < -700:\n del rectList[list][rect]\n for list in range(len(enemiesList)): # Going through all of the enemies and deleting them once they scroll off the screen\n for rect in range(len(enemiesList[list]) - 1, -1, -1):\n if enemiesList[list][rect][0] < -300 or enemiesList[list][rect][1] > 650:\n del enemiesList[list][rect]\n for point in range(len(points) - 1, -1, -1): # Going through all of the points indicators and deleting them once the counter reaches zero\n if points[point][2] == 0:\n del points[point]\n # Activating and deactivating all enemies\n for list in range(len(enemiesList)): # Going through all of the enemies\n for enemy in range(len(enemiesList[list]) - 1, -1, -1):\n if enemiesList[list] == goombas or enemiesList[list] == spinys: # If they are goombas or spinys\n # Activating goombas and spinys if they get close to the screen\n if enemiesList[list][enemy][ENMYIDLE] == 0 and enemiesList[list][enemy][X] < 800:\n enemiesList[list][enemy][ENMYIDLE] = 1\n # Deleting them if they are crushed by mario and the death counter reaches zero\n elif enemiesList[list][enemy][ENMYIDLE] == 2 and enemiesList[list][enemy][ENMYINFLOOR] == 0:\n points.append([enemiesList[list][enemy][0], enemiesList[list][enemy][1], 40, 100])\n del enemiesList[list][enemy]\n elif enemiesList[list] == bullets: # If they are bullets\n # Deleting bullets if they are too far off screen\n if enemiesList[list][enemy][0] < -1600 or enemiesList[list][enemy][0] > 1600:\n del enemiesList[list][enemy]\n for gun in range(len(gunsList) - 1, -1, -1): # Going through all of the guns\n # Activating guns if they get close and deleting them if they get too far back\n if gunsList[gun][0] < 1600:\n gunsList[gun][GUNSTATE] = 1\n if gunsList[gun][0] < -1600:\n del gunsList[gun]", "def test_rectangle_kill(self):\n before_b = \"\"\"\\\n before\n aaaxxxbbb\n aaaxxxbbb\n aaaxxxbbb\n aaaxxxbbb\n after\n \"\"\"\n after_b = \"\"\"\\\n before\n aaabbb\n aaabbb\n aaabbb\n aaabbb\n after\n \"\"\"\n self.run_test(\n before_b=before_b,\n after_b=after_b,\n before_sel=(\"2.3\", \"5.6\"),\n after_sel=(\"5.3\", \"5.3\"),\n command_name=\"rectangle-kill\",\n )", "def still_attack_area(self):\n min_range, max_range = self.curr_unit.get_weapon_range()\n self.attack_area = []\n self.move_area = []\n self.__set_attack_area(self.curr_sel, min_range, max_range)", "def act(self):\n\n current_room = self.game_map.rooms[self.room]\n\n if self.cooldown_ctr == 0:\n num_others = sum(current_room) - 1\n\n if num_others:\n # How likely it is to kill is proportional to the amount of other people present in the room\n threshold = 1 - (num_others / len(current_room))\n\n if random.random() < threshold:\n # Kill!\n # Select the IDs of all others in the room that are present and are not one of the impostors\n # This is easily changed for multiple known impostor IDs\n # IDs from Impostors start at self.num_crew and go up to self.num_crew + self.num_imp - 1\n\n present_crewmates = [x for x in range(len(current_room)) if\n not current_room[x] == 0 and not x >= self.num_crew]\n\n # If we are in a room with only impostors, this can happen\n if len(present_crewmates) == 0:\n return\n\n to_kill = random.sample(present_crewmates, 1)[0]\n\n self.game_map.add_room_event(self.room, RoomEvent(EventType.KILL, self.agent_id, \"Kill\"))\n\n self.logger.log(f\"Impostor {self.agent_id} kills {to_kill}!\", Logger.LOG | Logger.PRINT_VISUAL)\n\n self.reset_cooldown()\n\n return to_kill\n else:\n self.cooldown_ctr -= 1\n\n # Only is called when no kill has occurred.\n self.__move()", "def kill(self):\n # stuff\n pygame.sprite.Sprite.kill(self)", "def kill(self):\n # stuff\n pygame.sprite.Sprite.kill(self)", "def handle_collisions():\n for sprite in sprite_group:\n for other in pygame.sprite.spritecollide(sprite, sprite_group, False):\n if sprite is not other and DO_KILL:\n sprite.kill()\n other.kill()", "def clean(self):\n for i in range(len(self.asteroid_type) - 1, -1, -1):\n x, y = self.get_coords(self.asteroid_type[i])\n if x < -self.gap:\n self.del_asteroid(i)", "def destroy_all():\n player_loc = _player_loc()\n minec = _get_mc()\n rad = 10\n for x in xrange(player_loc.x - rad, player_loc.x + rad):\n for y in xrange(player_loc.y - rad, player_loc.y + rad):\n for z in xrange(player_loc.z - rad, player_loc.z + rad):\n if minec.getBlock(x, y, z) == Bot._BOT_BLOCK:\n minec.setBlock(x, y, z, _AIR)", "def on_min_hit_points(self):\n self.kill()", "def unoccupied(self):\n self.is_occupied = 0\n for hex in self.fon:\n hex.remove_neighbor()\n hex.set_quality()", "def kill(self):\n for tlight in self.trafficLights:\n self.trafficLights[tlight].stop()", "def clear_press(self):\n\n for win in self.window.additional_windows:\n win.del_win()\n\n pos = self.window.physics_canvas.physics_objects\n self.window.physics_canvas.physics_objects = []\n\n for obj in pos:\n self.window.physics_canvas.canvas.delete(obj.canvas_id)\n\n for force in self.window.physics_canvas.interacting_forces:\n force.remove()\n\n for particle in self.window.physics_canvas.particles:\n self.window.physics_canvas.canvas.delete(particle.canvas_id)", "def kill(self):\n if self.living == True:\n self.living = False\n self.arrow_enter_callback(self)", "def destroy(explosions,inkblots,hero,deaths,stats):\n explosion_hits_inkblot = pygame.sprite.groupcollide(explosions,inkblots,False,True,pygame.sprite.collide_mask)\n explosion_hits_hero = pygame.sprite.spritecollideany(hero,explosions,pygame.sprite.collide_mask)\n explosion_hits_death = pygame.sprite.groupcollide(explosions,deaths,False,True,pygame.sprite.collide_mask)\n \n if explosion_hits_inkblot:\n stats.inkblot_killed()\n if explosion_hits_death:\n stats.death_killed()\n if explosion_hits_hero != None:\n hero.alive = False", "def reset(self):\n for lane in self.lanes.values():\n lane.puck_area.clear_widgets()\n lane.patrons = list()\n lane.disabled = False\n lane.beers = list()\n\n self.message_holder.remove_widget(self.you_lose_label)\n self.message_holder.remove_widget(self.you_win_label)", "def unaway(self):\n self.away()", "def pop_bubble(self):\n i = self.cursor_bubble_collide()\n if i != -1:\n bubble = self.all_bubbles.sprites()[i]\n bubble.bubblekill()\n self.increase_score(bubble.get_value() * Settings.points_multiplier)", "def remove_rect(self, r, obj):\n cells = self._cells_for_rect(r)\n for c in cells:\n self._remove(c, obj)", "def clear_trash(self):\n for ball in self.balls:\n if ball.status == 4:\n self.balls.remove(ball)\n for bull in self.p.bullets:\n if bull[0].status == 4:\n self.p.bullets.remove(bull)", "def stopMovementAll(self):\n self.stopMovementX()\n self.stopMovementY()\n self.stopMovementZ()" ]
[ "0.59094775", "0.5874609", "0.58699363", "0.58131564", "0.5798624", "0.57615143", "0.57418394", "0.57174635", "0.57074404", "0.56431526", "0.5636674", "0.5634212", "0.55774325", "0.5563028", "0.5563028", "0.5557454", "0.5555643", "0.5487772", "0.5455775", "0.5404981", "0.53954345", "0.53353614", "0.5325692", "0.53079504", "0.5299401", "0.5294203", "0.52407223", "0.52383584", "0.52334726", "0.52304864" ]
0.7688294
0
Ant turns left and moves its current step that direction
def MoveLeftStep(self): if self.facing == 0: self.facing = 3 self.x -= self.stepLeft elif self.facing == 1: self.facing = 0 self.y -= self.stepUp elif self.facing == 2: self.facing = 1 self.x += self.stepRight elif self.facing == 3: self.facing = 2 self.y += self.stepDown
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def move_left(self):\n self.yaw_motor.step_backward()", "def turn_left(self):\n temp = self.direction[0]\n self.direction[0] = self.direction[1]\n self.direction[1] = -temp", "def go_left(self):\n self.change_x = -6\n self.direction = \"L\"", "def left(self, angle):\r\n self.dir -= math.radians(angle)", "def turn_ship_left(self):\n self.degrees += movement", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def turn_left(self):\n\t\tself.direction = (self.direction - 1)%4", "def turn_left(self):\n self.facing_direction -= self.config\n if self.facing_direction < 0:\n self.facing_direction += 8\n self.x, self.y = self.compute_positions()", "def steerleft(self):\n self.direction = self.direction+self.steering\n if self.direction > 360:\n self.direction = 0+90\n self.image, self.rect = rot_center(self.image_orig,self.rect,self.direction)", "def left():\n Robot.rotate(\"LEFT\")", "def move_left(self,distance):\n self.turn_left()\n self.move_forward(distance)\n # self.log_arr.append(\"left\")", "def leftTurn(self):\n #print('leftTurn\\r')\n self.linearVector = Vector3(x=0.0, y=0.0, z=0.0)\n self.angularVector = Vector3(x=0.0, y=0.0, z=1.0)", "def go_left(self):\n self.rect.centerx -= self.__dx", "def MoveRightStep(self):\n if self.facing == 0:\n self.facing = 1\n self.x += self.stepLeft\n elif self.facing == 1:\n self.facing = 2\n self.y += self.stepUp\n elif self.facing == 2:\n self.facing = 3\n self.x -= self.stepRight\n elif self.facing == 3:\n self.facing = 0\n self.y -= self.stepDown", "def turn_left(self, duration):\n self.set_motor(self.left_motor, 'right', 0.5)\n self.set_motor(self.right_motor, 'right', 0.5)\n time.sleep(duration)", "def turnLeft(ev3):\n ev3.set_angle(\"A\", \"-30\", \"-90\")\n ev3.set_angle(\"B\", \"30\", \"90\")\n ev3.set_angle(\"C\", \"-30\", \"-90\")", "def move_left(self):\n if self.change_valid(dx=-1):\n self.x -= 1", "def turn_left(self):\n self.direction_mod_offset -= 1\n self.calculate_offset_mapping()\n direction_num = self.direction_mod_offset % len(self.direction_arr)\n client.rotateToYawAsync(direction_num * 90).join()", "def left(self, param):\n\t\tglobal estop_flag, move_state\n\t\t#If input angle is zero, set angle to default\n\t\tif param:\n\t\t\tangle = param\n\t\telse:\n\t\t\tangle = riu.default_angle\n\n\t\tsignal.alarm(0) #Disable timer interrupt for the duration of the movement\n\t\t#safely grab current yaw\n\t\twith self.move_state_lock:\n\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t#Set goal to yaw+angle. Add 360 then mod to account for negative angles but avoid going over 360\n\t\tgoal = (current_yaw + angle) % 360\n\t\thalf_goal = (current_yaw + angle/2) % 360\n\t\tif self.angle_lock:\n\t\t\tif goal >= 315 and goal < 45:\n\t\t\t\tgoal = self.zeroed_angle\n\t\t\telif goal >= 45 and goal < 135:\n\t\t\t\tgoal = self.zeroed_angle + 90\n\t\t\telif goal >= 135 and goal < 225:\n\t\t\t\tgoal = self.zeroed_angle + 180\n\t\t\telif goal >= 225 and goal < 315:\n\t\t\t\tgoal = self.zeroed_angle + 270\n\t\tgoal = goal % 360\n\t\thalf_goal = (current_yaw + angle/2) % 360\n\t\thalfway_flag = False #used to flag if we've already sent out a halfway message\n\t\t#Anonymous function that calculates the current counterclockwise distance to the goal\n\t\tchkdist = lambda pos, goal: round(goal - pos + 360 * (goal < pos), 1)\n\t\t#Gets current distance and initially sets previous distance = distance\n\t\tdistance = chkdist(current_yaw, goal)\n\t\tprev_dist = distance\n\t\t\"\"\"Continues to move while absolute distance is not within angular_error and counterclockwise\n\t\tdistance is not increasing. NOTE: absolute distance is the shortest distance in either direction,\n\t\twhile counterclockwise distance is the distance using only counterclockwise movement.\n\t\tThe angular_error condition was added because the movements tended to end within the first few \n\t\tcycles due to some float error. With the error condition, the movement can only end when inside\n\t\tat least the general area of the goal.\"\"\"\n\t\twhile distance <= prev_dist or self.get_abs_dist(current_yaw, goal) > riu.angular_error:\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\t#Construct and publish left turn message\n\t\t\t\ttwist_msg = Twist()\n\t\t\t\ttwist_msg.angular.z = riu.turn_rate\n\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#If distance to goal is less than half the initial distance, publish the half done message\n\t\t\t\tif distance <= half_goal and not halfway_flag:\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\t\t\t\t#Update current position\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t\t\t#Set previous distance, then update distance based on new position\n\t\t\t\tprev_dist = distance\n\t\t\t\tdistance = chkdist(current_yaw, goal)\n\t\t\trospy.sleep(.2)\n\t\t#After loop exit, publish stop message and send done message to cmd_queue\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval) #Restart timer", "def move_left(self):\n\t\tself.set_x_vector(-1 * constants.DONKEY_SPEED)", "def left_twist(self):\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()\n self.turn_by_deg(-179)\n #time.sleep(.1)\n self.stop()", "def _animateLeftAlien(self):\n incr = self.speedChange()\n max = self._maxAlien()\n min = self._minAlien()\n left = self.getLPos()\n\n if left > ALIEN_H_SEP:\n if self._direction == True:\n for a in self._aliens:\n for b in a:\n if b != None:\n b.x -= ALIEN_H_WALK\n self._time = 0\n self._alienStep += 1\n n = 0\n if left <= ALIEN_H_SEP:\n if self._direction == True:\n for a in self._aliens:\n for b in a:\n if b != None:\n b.y -= ALIEN_V_SEP\n self._direction = False\n self._time = 0\n self._alienStep += 1", "def left(self,degrees):\n assert (type(degrees) in [int, float]), \"parameter degrees:%s is not a valid number\" % `distance`\n self._turtle.left(degrees)", "def rotate_left(self, angle, maze, game_display):\n for _ in range(angle):\n self.rotate(maze=maze, direction=-1, game_display=game_display)", "def go_left(self):\n self.change_x = -6", "def go_left(self):\n self.change_x = -6", "def left(self, angle):\n self.right(-angle)", "def left(self):\r\n if self.d in direction_tuple:\r\n index = direction_tuple.index(self.d)\r\n if index == 0:\r\n self.d = direction_tuple[3]\r\n else:\r\n self.d = direction_tuple[index - 1]\r\n else:\r\n print(\"NO VALID ROBOT POSITION\")", "def left_forward(self):\n self.left_motor.run_forever(speed_sp=self.MAX_SPEED)", "def turn_left(self):\n turn = self.__heading + Ship.TURN\n if turn >= Ship.MAX_HEADING:\n turn -= Ship.MAX_HEADING\n self.__heading = turn" ]
[ "0.73117554", "0.726275", "0.7228604", "0.7218252", "0.71775174", "0.71680087", "0.7089251", "0.70846677", "0.6892116", "0.6867011", "0.6843716", "0.6843561", "0.6814537", "0.68001723", "0.67373395", "0.6727266", "0.67113245", "0.66811067", "0.66772133", "0.66585416", "0.6652725", "0.6641327", "0.66228426", "0.6617733", "0.6613589", "0.6613589", "0.6589863", "0.6571981", "0.65597606", "0.6555769" ]
0.7978667
0
Ant turns right and moves its current step that direction
def MoveRightStep(self): if self.facing == 0: self.facing = 1 self.x += self.stepLeft elif self.facing == 1: self.facing = 2 self.y += self.stepUp elif self.facing == 2: self.facing = 3 self.x -= self.stepRight elif self.facing == 3: self.facing = 0 self.y -= self.stepDown
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def right(self, angle):\r\n self.dir += math.radians(angle)", "def move_right(self):\n self.yaw_motor.step_forward()", "def turn_right(self):\n temp = self.direction[0]\n self.direction[0] = -self.direction[1]\n self.direction[1] = temp", "def go_right(self):\n self.change_x = 6\n self.direction = \"R\"", "def turn_right(self):\n self.facing_direction += self.config\n if self.facing_direction > 7:\n self.facing_direction -= 8\n self.x, self.y = self.compute_positions()", "def turn_ship_right(self):\n self.degrees -= movement", "def MoveBasicRight(self):\n if self.facing == 0:\n self.facing = 1\n self.x += 1\n elif self.facing == 1:\n self.facing = 2\n self.y += 1\n elif self.facing == 2:\n self.facing = 3\n self.x -= 1\n elif self.facing == 3:\n self.facing = 0\n self.y -= 1", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def right(self):\n self.move(1,0)", "def move_right(self,distance):\n self.turn_right()\n self.move_forward(distance)\n # self.log_arr.append(\"right\")", "def go_right(self):\n self.rect.centerx += self.__dx", "def right():\n Robot.rotate(\"RIGHT\")", "def turn(self, dir):\n if dir.upper() == 'R':\n if self.direction == 3:\n self.direction = 0\n else:\n self.direction += 1\n if dir.upper() == 'L':\n if self.direction == 0:\n self.direction = 3\n else:\n self.direction -= 1", "def rightTurn(self):\n #print('rightTurn\\r')\n #self.linearVector = Vector3(x=0.0, y=0.0, z=0.0)\n #self.angularVector = Vector3(x=0.0, y=0.0, z=-1.0)\n self.linearVector = Vector3(x=0.0, y=0.0, z=0.0)\n self.angularVector = Vector3(x=0.0, y=0.0, z=-1.0)", "def turn_right(self):\n pass", "def move_right(self):\n if self.change_valid(dx=1):\n self.x += 1", "def right(self, angle):\r\n self.rotation += angle", "def right(self):\r\n z = len(direction_tuple)\r\n if self.d in direction_tuple:\r\n index = direction_tuple.index(self.d)\r\n if index == (z-1):\r\n self.d = direction_tuple[0]\r\n else:\r\n self.d = direction_tuple[index + 1]\r\n else:\r\n print(\"NO VALID ROBOT POSITION\")", "def right(self, angle):\n self.matrix = matrixMultiply(yawMatrix(angle), self.matrix)\n self.directionOut()\n self.delay()", "def right(self, param):\n\t\tglobal estop_flag, move_state\n\t\t#If input angle is zero, set angle to default\n\t\tif param:\n\t\t\tangle = param\n\t\telse:\n\t\t\tangle = riu.default_angle\n\n\t\tsignal.alarm(0) #Disable timer interrupt for the duration of the movement\n\t\t#safely grab current yaw\n\t\twith self.move_state_lock:\n\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t#Set goal to yaw+angle. Add 360 then mod to account for negative angles but avoid going over 360\n\t\tgoal = (current_yaw - angle + 360) % 360\n\t\tif self.angle_lock:\n\t\t\tif goal >= 315 and goal < 45:\n\t\t\t\tgoal = self.zeroed_angle\n\t\t\telif goal >= 45 and goal < 135:\n\t\t\t\tgoal = self.zeroed_angle + 90\n\t\t\telif goal >= 135 and goal < 225:\n\t\t\t\tgoal = self.zeroed_angle + 180\n\t\t\telif goal >= 225 and goal < 315:\n\t\t\t\tgoal = self.zeroed_angle + 270\n\t\tgoal = goal % 360\n\t\thalf_goal = (current_yaw - angle/2 + 360) % 360\n\t\thalfway_flag = False #used to flag if we've already sent out a halfway message\n\t\t#Anonymous function that calculates the current clockwise distance to the goal\n\t\tchkdist = lambda pos, goal: round(pos - goal + 360 * (goal > pos), 1)\n\t\t#Gets current distance and initially sets previous distance = distance\n\t\tdistance = chkdist(current_yaw, goal)\n\t\tprev_dist = distance\n\t\t\"\"\"Continues to move while absolute distance is not within angular_error and clockwise\n\t\tdistance is not increasing. NOTE: absolute distance is the shortest distance in either direction,\n\t\twhile clockwise distance is the distance using only clockwise movement.\n\t\tThe angular_error condition was added because the movements tended to end within the first few \n\t\tcycles due to some float error. With the error condition, the movement can only end when inside\n\t\tat least the general area of the goal.\"\"\"\n\t\twhile distance <= prev_dist or self.get_abs_dist(current_yaw, goal) > riu.angular_error:\n\t\t\tif estop_flag:\n\t\t\t\tself.publisher.publish(Mover.stop_msg)\n\t\t\telse:\n\t\t\t\t#Build and publish right turn message\n\t\t\t\ttwist_msg = Twist()\n\t\t\t\ttwist_msg.angular.z = -1 * riu.turn_rate\n\t\t\t\tself.publisher.publish(twist_msg)\n\t\t\t\t#If distance to goal is less than half the initial distance, publish the half done message\n\t\t\t\tif distance <= half_goal and not halfway_flag:\n\t\t\t\t\thalfway_flag = True\n\t\t\t\t\tself.status_pub.publish(String(\"half\"))\n\t\t\t\t#Update current position\n\t\t\t\twith self.move_state_lock:\n\t\t\t\t\tcurrent_yaw = (math.degrees(move_state['yaw']) + 360) % 360\n\t\t\t\t#Update previous distance, then update distance based on current position\n\t\t\t\tprev_dist = distance\n\t\t\t\tdistance = chkdist(current_yaw, goal)\n\t\t\trospy.sleep(.2)\n\t\t#After loop end, send stop message and send done message to cmd_queue\t\n\t\tself.publisher.publish(Mover.stop_msg)\n\t\tself.status_pub.publish(String(\"done\"))\n\t\tsignal.alarm(Mover.ready_message_interval) #Restart timer", "def settle(self):\n if (self.angle >= self.max_angle) or (\n self.angle <= -self.max_angle\n ): # time to reverse\n print(\"reverse\", self.angle, self.max_angle)\n self.speed *= -0.9 # damped\n self.max_angle *= 0.9\n if self.speed > 0:\n self.angle = self.max_angle\n else:\n self.angle = -self.max_angle\n\n self.angle += radians(self.speed)\n print(self.angle, self.max_angle, self.speed)\n self.x = self.cx + self.length * sin(self.angle)\n self.y = self.cy + self.length * cos(self.angle)", "def turn_right(self):\n self.direction_mod_offset += 1\n self.calculate_offset_mapping()\n direction_num = self.direction_mod_offset % len(self.direction_arr)\n client.rotateToYawAsync(direction_num * 90).join()", "def move_right(self):\r\n self.left += self.__speed", "def turnRight(ev3):\n ev3.set_angle(\"A\", \"30\", \"90\")\n ev3.set_angle(\"B\", \"-30\", \"-90\")\n ev3.set_angle(\"C\", \"30\", \"90\")", "def right_forward(self):\n self.right_motor.run_forever(speed_sp=self.MAX_SPEED)", "def turn(dir, speed, runtime):\n\trightMotor.run_timed(duty_cycle_sp=-dir*speed, time_sp=runtime)\n\tleftMotor.run_timed(duty_cycle_sp=dir*speed, time_sp=runtime)", "def go_right(self):\n self.change_x = 6", "def go_right(self):\n self.change_x = 6", "def step(self, action):\n # print(action)\n distances = self.agent.return_distances(self.agent.corners, self.agent.line_pos)\n\n left = distances[0]\n right = distances[1]\n self.agent.distances.append({\n 'left': left,\n 'right': right\n })\n reward = 0\n if action == 1:\n self.agent.angle -= 90\n if self.agent.angle < 0:\n self.agent.angle = 0\n self.agent.direction_history.append('left')\n self.reset_raycasts(self.agent.angle)\n self.render()\n if left > right:\n reward += 5\n else:\n reward -= 5\n\n elif action == 2:\n self.agent.angle += 90\n if self.agent.angle >= 360:\n self.agent.angle = 0\n\n self.reset_raycasts(self.agent.angle)\n self.render()\n self.agent.direction_history.append('right')\n if left < right:\n reward += 5\n else:\n reward -= 5\n\n elif action == 0:\n self.agent.direction_history.append('forward')\n if self.agent.angle >= 360: self.agent.angle == 0\n if self.agent.angle == 0 or self.agent.angle == 360:\n self.agent.agent_position['y'] -= 10\n self.reset_raycasts(self.agent.angle)\n elif self.agent.angle == 90: \n self.agent.agent_position['x'] += 10\n self.reset_raycasts(self.agent.angle)\n elif self.agent.angle == 180: \n self.agent.agent_position['y'] += 10\n self.reset_raycasts(self.agent.angle)\n elif self.agent.angle == 270:\n self.agent.agent_position['x'] -= 10\n self.reset_raycasts(self.agent.angle)\n \n if left + right >= 50:\n reward += 5\n\n self.render()\n\n elif action == 3:\n self.agent.direction_history.append('reverse')\n if self.agent.angle == 0:\n self.agent.agent_position['y'] += 10\n self.reset_raycasts(self.agent.angle)\n self.render()\n elif self.agent.angle == 90: \n self.agent.agent_position['x'] -= 10\n self.reset_raycasts(self.agent.angle)\n self.render()\n elif self.agent.angle == 180: \n self.agent.agent_position['y'] -= 10\n self.reset_raycasts(self.agent.angle)\n self.render()\n elif self.agent.angle == 270:\n self.agent.agent_position['x'] += 10\n self.reset_raycasts(self.agent.angle)\n self.render()\n \n if left + right <= 50:\n reward += 5\n\n \n else:\n reward -= 5\n\n if \"forward\" not in self.agent.direction_history[len(self.agent.direction_history)-6:len(self.agent.direction_history)-1]:\n reward -= 10\n\n \n info = {}\n if self.agent.check_collision():\n reward -= 10\n self.reset() \n self.agent.rewards.append({\n 'leftDistance': left,\n 'rightDistance': right,\n 'reward': reward,\n })\n self.render()\n print(f\"REWARD: {reward}\")\n # self.render()\n # print(self.agent.direction_history[-1])\n self.agent.rewards.append(reward)\n return np.array([left, right]), reward, False, info", "def moveStep(self):\n\t\tif self.pos[0] <= self.boundsX[0] or \\\n\t\t(self.pos[0]+ 2*(self.radius)) >= self.boundsX[1]:\n\t\t\tself.dir[0] *= -1\n\t\t\t\n\t\tself.pos[0] += self.dir[0]*self.speed\n\t\tself.pos[1] += self.dir[1]*self.speed" ]
[ "0.7485096", "0.7361091", "0.7325981", "0.73201174", "0.7136343", "0.69774735", "0.69543254", "0.6937253", "0.6920816", "0.6871856", "0.67709595", "0.6770527", "0.6758423", "0.6755053", "0.6742616", "0.67374164", "0.673683", "0.67274386", "0.6718059", "0.6709373", "0.6685019", "0.6671189", "0.6632991", "0.6589718", "0.65857846", "0.65723395", "0.654613", "0.654613", "0.65359974", "0.6535154" ]
0.79314137
0
Moves one space in the current ant direction facing
def MoveCurrentSpace(self): if self.facing == 0: self.y -= 1 elif self.facing == 1: self.x += 1 elif self.facing == 2: self.y += 1 elif self.facing == 3: self.x -= 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def moveForward(self):\n if self.onGround:\n self.vx = 4", "def MoveLeftStep(self):\n if self.facing == 0:\n self.facing = 3\n self.x -= self.stepLeft\n elif self.facing == 1:\n self.facing = 0\n self.y -= self.stepUp\n elif self.facing == 2:\n self.facing = 1\n self.x += self.stepRight\n elif self.facing == 3:\n self.facing = 2\n self.y += self.stepDown", "def move_turtle(self):\n self.forward(self.move_speed)", "def _move(self):\n self.pos += self.direction # add direction vector\n self.direction += self.gravity # add gravity to direction\n self.direction = self.direction.elementwise() * self.drag # apply drag to direction", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def movement(self):\n self.rect.left -= self.speedx #to move the asteroid to the left", "def MoveRightStep(self):\n if self.facing == 0:\n self.facing = 1\n self.x += self.stepLeft\n elif self.facing == 1:\n self.facing = 2\n self.y += self.stepUp\n elif self.facing == 2:\n self.facing = 3\n self.x -= self.stepRight\n elif self.facing == 3:\n self.facing = 0\n self.y -= self.stepDown", "def turn_ship_left(self):\n self.degrees += movement", "def step(self):\r\n\r\n self.velocity = 1\r\n new_pos = self.pos\r\n self.model.space.move_agent(self, new_pos)", "def turn(self, dir):\n if dir.upper() == 'R':\n if self.direction == 3:\n self.direction = 0\n else:\n self.direction += 1\n if dir.upper() == 'L':\n if self.direction == 0:\n self.direction = 3\n else:\n self.direction -= 1", "def automove(self):\n if self.x < self.end_cinematic_x_pos:\n self.x += self.SHIP_SPEED\n if self.x > self.end_cinematic_x_pos:\n self.x -= self.SHIP_SPEED\n if self.y < self.end_cinematic_y_pos:\n self.y += self.SHIP_SPEED\n if self.y > self.end_cinematic_y_pos:\n self.y -= self.SHIP_SPEED", "def move_car(self):\n a = self.h / 50\n self.x += self.speed_x / FPS\n if self.x + 170 * a >= 1100:\n self.dir = -1\n self.speed_x = -self.speed_x\n if self.x - 170 * a <= 50:\n self.dir = 1\n self.speed_x = -self.speed_x", "def Move180(self):\n if self.facing == 0:\n self.facing = 1\n self.x -= self.stepLeft\n elif self.facing == 1:\n self.facing = 2\n self.y -= self.stepUp\n elif self.facing == 2:\n self.facing = 3\n self.x += self.stepRight\n elif self.facing == 3:\n self.facing = 0\n self.y += self.stepDown", "def move(self, direction):\n pass", "def MoveBasicRight(self):\n if self.facing == 0:\n self.facing = 1\n self.x += 1\n elif self.facing == 1:\n self.facing = 2\n self.y += 1\n elif self.facing == 2:\n self.facing = 3\n self.x -= 1\n elif self.facing == 3:\n self.facing = 0\n self.y -= 1", "def move(self):\n self.x += math.sin(self.angle) * self.speed\n self.y -= math.cos(self.angle) * self.speed\n # Next, account for gravity\n (self.angle, self.speed) = addVectors((self.angle, self.speed), gravity)\n # Then, friction / drag\n self.speed *= drag", "def move(self, direction):\n # replace with your code\n pass", "def move(self, direction):\n # replace with your code\n pass", "def take_step(self):\n if self.facing == 0:\n self.new_loc = (self.new_loc[0], self.new_loc[1] + 1)\n elif self.facing == 1:\n self.new_loc = (self.new_loc[0] + 1, self.new_loc[1])\n elif self.facing == 2:\n self.new_loc = (self.new_loc[0], self.new_loc[1] - 1)\n else:\n self.new_loc = (self.new_loc[0] - 1, self.new_loc[1])", "def turn_ship_right(self):\n self.degrees -= movement", "def _go(self, distance):\n ende = self._position + self._orient * distance\n self._goto(ende)", "def move(self):\n\n # get the location we WOULD go to\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n while (abs (newX) > self.BOX_RANGE) or (abs(newY) > self.BOX_RANGE):\n # print(\"choosing new direction... \",end=\"\")\n self.chooseNewDirection()\n # print(self.dx, self.dy)\n newX = self.xcor() + self.dx\n newY = self.ycor() + self.dy\n\n # now move our monster\n super().move()", "def accelerateForwards(self,movementSpeed=0.1):\n self.xMomentum+=math.sin(self.faceHeading*(math.pi/180))*movementSpeed\n self.yMomentum+=math.cos(self.faceHeading*(math.pi/180))*movementSpeed", "def moveBackward(self):\n if self.onGround:\n self.vx = -4", "def move(self):\n \n self.position = self.wander()", "def moveStep(self):\n\t\tif self.pos[0] <= self.boundsX[0] or \\\n\t\t(self.pos[0]+ 2*(self.radius)) >= self.boundsX[1]:\n\t\t\tself.dir[0] *= -1\n\t\t\t\n\t\tself.pos[0] += self.dir[0]*self.speed\n\t\tself.pos[1] += self.dir[1]*self.speed", "def turned(self,angle: \"radians to turn\") -> Position:\n return Position(self.x, self.y, self.facing + angle)", "def move(self, direction):\n\n if direction == \"north\":\n self.go_and_update(-1, 0)\n\n elif direction == \"south\":\n self.go_and_update(1, 0)\n\n elif direction == \"east\":\n self.go_and_update(0, 1)\n\n elif direction == \"west\":\n self.go_and_update(0, -1)", "def move(self, dt):\n lims = self.settings['agent']['jointLimits']\n # print '[move] curr joint Angle:'\n # print self.jointAngle\n # print '[move] curr speed:'\n # print self.speed\n\n J = self.jointAngle + dt * np.array(self.speed)\n self.jointAngle[0] = min(max(J[0], lims[0][0]), lims[0][1])\n self.jointAngle[1] = min(max(J[1], lims[1][0]), lims[1][1])\n self.forward_kinematics()", "def move(self):\n keys = pygame.key.get_pressed()\n\n if keys[pygame.K_w]:\n self.y -= self.vel\n if keys[pygame.K_a]:\n self.x -= self.vel\n if keys[pygame.K_s]:\n self.y += self.vel\n if keys[pygame.K_d]:\n self.x += self.vel" ]
[ "0.6722689", "0.66532195", "0.657839", "0.65178674", "0.6484474", "0.6473384", "0.6455628", "0.6454375", "0.6426206", "0.64156604", "0.641183", "0.6391002", "0.6372444", "0.6338004", "0.63118315", "0.6276108", "0.6261837", "0.6261837", "0.6241037", "0.62397355", "0.62006986", "0.61929226", "0.6189241", "0.6166958", "0.6149536", "0.6143719", "0.61427635", "0.6136935", "0.61234623", "0.6121215" ]
0.762536
0
Ant will move a random direction by one space
def MoveRandom(self): r = random.randint(0,3) if r == 0: self.x += 1 elif r == 1: self.y += 1 elif r == 2: self.x -= 1 elif r == 3: self.y -= 1
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_move(turtle, distance):\n angle = uniform(-90,90)\n d = uniform(0,distance)\n turtle.left(angle)\n turtle.forward(d)", "def setRandDirection(self):\n phi = 2*math.pi*random.random()\n u = 2*random.random() - 1\n v = math.sqrt(1-u*u)*math.cos(phi)\n w = math.sqrt(1-u*u)*math.sin(phi)\n self.direction = (u,v,w)", "def move(self):\n if self._z >= 75:\n a = random.random()\n print(str(a))\n if a < 0.2:\n self._z += 1\n if a > 0.2 and a < 0.9:\n self._z -= 1\n if a > 0.9:\n self._z = self._z\n else: \n self._z -= 1\n \n b = random.random()\n print(str(b))\n if b < 0.1:\n self._y += 1\n if b > 0.1 and b < 0.2:\n self._y -= 1\n if b > 0.2 and b < 0.25:\n self._x -= 1\n if b > 0.25:\n self._x += 1", "def move(self):\n if random.random() < 0.5:\n self.y = (self.y + 1) % 100\n else:\n self.y = (self.y - 1) % 100\n if random.random() < 0.5:\n self.x = (self.x + 1) % 100\n else:\n self.x = (self.x - 1) % 100", "def move_aim(self):\n self.color = random.choice(COLORS)\n self.x += 3 * self.speed_x / FPS\n self.y += 3 * self.speed_y / FPS\n self.r -= 1\n self.draw_aim()\n if self.r <= 10:\n self.color = random.choice(COLORS)\n self.x = randint(100, 1000)\n self.y = randint(100, 800)\n self.r = randint(50, 100)\n self.speed_x = randint(-200, 200)\n self.speed_y = randint(-200, 200)\n if self.x >= 1100:\n self.speed_x = randint(-100, -10)\n if self.x <= 50:\n self.speed_x = randint(10, 100)\n if self.y >= 800:\n self.speed_y = randint(-100, -10)\n if self.y <= 50:\n self.speed_y = randint(10, 100)", "def __random_movement(self):\n\t\tself.__steps += 1 \t\t# Increment after every frame\n\t\t# When __steps greater than threshold reverse the direction\n\t\t# and set threshold to a new random value\n\t\tif self.__steps >= self.__threshold_steps:\t\n\t\t\tif self.direction == 'RIGHT':\n\t\t\t\tself.move_left()\n\t\t\t\tself.direction = 'LEFT'\n\t\t\telse:\n\t\t\t\tself.move_right()\n\t\t\t\tself.direction = 'RIGHT'\n\t\t\tself.__threshold_steps = random.randint(25,50)\n\t\t\tself.__steps = 0\n\t\t# Confines the Donkeys movement to within the boundary \n\t\tself.__check_boundary()", "def update(self):\n if self.x<0:\n self.x = 0\n\n if self.y <0:\n self.y = 0\n\n if bool(randint(0, 1))==True:\n if self.walker == True:\n self.x += randint(-2, 2)\n self.y += randint(-2, 2)", "def move(self, direction):\n newx = self.x\n newy = self.y\n newy += random.randint(-1, 1)\n newx += random.randint(-1, 1)\n if self.tmap.contents[newy][newx] != '#':\n self.x = newx\n self.y = newy", "def ran_direction(self,room):\r\n ran_dirt = self.random()\r\n if 1 == ran_dirt: \r\n room.front_attch(True)\r\n elif 2 == ran_dirt: \r\n room.back_attch(True)\r\n elif 3 == ran_dirt: \r\n room.left_attch(True)\r\n elif 4 == ran_dirt: \r\n room.right_attch(True)\r\n return room", "def random_walk(turtle, distance, steps):\n turtle.color(randcolor(), randcolor())\n for step in range(0,steps):\n random_move(turtle, distance)\n gohome(turtle)", "def wander(self, speed: float = 1.):\n pos = np.array(self.pos)\n\n random_heading = random.random() * 2 * np.pi\n rand_vector = speed * np.array([np.cos(random_heading), np.sin(random_heading)])\n target_location = pos + rand_vector\n target_location = np.clip(target_location, [0, 0], [99.9, 99.9])\n self.model.space.move_agent(self, target_location)\n return", "def advance(self):\n #x and y coordinates move and advance by adding the randomly generated velocity \n self.center.x += self.velocity.dx\n self.center.y += self.velocity.dy\n return", "def direction_correction(self):\n self.directions.monster = random.uniform(self.directions.monster * self.get_monster_sensitivity(),\n self.directions.monster * (1 + (1 - self.get_monster_sensitivity())))\n self.directions.food = random.uniform(self.directions.food * self.get_food_sensitivity(),\n self.directions.food * (1 + (1 - self.get_food_sensitivity())))\n self.directions.water = random.uniform(self.directions.water * self.get_water_sensitivity(),\n self.directions.water * (1 + (1 - self.get_water_sensitivity())))", "def move(self):\n possible_steps = self.model.grid.get_neighborhood(\n self.pos,\n moore=False, # implements Von Neumann neighborhood\n include_center=False)\n new_position = self.random.choice(possible_steps)\n self.heading = [new_position[0] - self.pos[0],\n new_position[1] - self.pos[1]]\n self.model.grid.move_agent(self, new_position)", "def random_direction():\n\n if randrange(2):\n return Direction.RIGHT\n else:\n return Direction.DOWN", "def move_to_random_pos(self):\n newpos = [(np.random.rand() - 0.5) * 0.1,\n (np.random.rand() - 0.5) * 0.1,\n np.random.rand() * 0.9 + 0.2]\n self.move_to(newpos)", "def randomWalk(t, turns, distance=20):\n for x in range(turns):\n if x % 2 == 0:\n t.left(random.randint(-180, 180))\n else:\n t.right(random.randint(-180, 180))\n t.forward(random.randint(1,distance))\n sleep(10)", "def move_random(self, board: Board) -> None:\n rnd_move_idx = randint(0,4)\n # moves: stay, up, left, right, down\n moves = [[0,0], [0,-1], [-1,0], [1,0], [0,1]]\n\n if board.can_position_at(self.x + moves[rnd_move_idx][0], self.y + moves[rnd_move_idx][1]):\n board.set_element_at_position(0, self.x, self.y)\n self.x += moves[rnd_move_idx][0]\n self.y += moves[rnd_move_idx][1]\n board.set_element_at_position(3, self.x, self.y)\n print(\"Bomberman moved to [\", self.x, \",\", self.y, \"]\")", "def draw():\n ant.move(aim)\n ant.x = wrap(ant.x)\n ant.y = wrap(ant.y)\n\n aim.move(random() - 0.5)\n aim.rotate(random() * 10 - 5)\n\n clear()\n goto(ant.x, ant.y)\n dot(4)\n\n ontimer(draw, 100)", "def _move_randomly(self):\n a, b = randint(0, len(self.state) - 1), randint(0, len(self.state) - 1)\n wiz1, wiz2 = self.state[a], self.state[b]\n self._swap_wizards(wiz1, wiz2)", "def nextmove(x, y):\n direction = rn.randrange(0, 4)\n\n if direction == 0: # move up\n y += 1\n elif direction == 1: # move down\n y -= 1\n elif direction == 2: # move right\n x += 1\n elif direction == 3: # move left\n x -= 1\n else:\n print(\"[ERROR] Direction isn't 0-3\")\n\n return x, y", "def move(self):\n assert self.is_alive, \"Sprite is dead, and should not be able to move\"\n if self.health > 3:\n self.y += random.randint(-1, 1) # change by -1, 0, 1\n self.x += random.randint(-1, 1) # change by -1, 0, 1\n print(self.name, \"moves to position\", str(self.x), \",\", str(self.y))", "def randomize_trajectory(self):\n self.angle = randint(-360, 360)\n self.speed = randint(1, 5)/2.5", "def go(self):\n # if we want to go to the right, we need to decrease x and increase y\n # if we want to go to the left, we need to increase x and decrease y\n h = random.randrange(2, 4)\n v = random.randrange(1, 3)\n if not bool(random.getrandbits(1)):\n h = - h\n self.velocity = [h, -v]\n self.explode.play()", "def random_step(self):\n pos = [i for i in range(9) if self.grid[i] == 0]\n move = random.choice(pos)\n return self.step(move)", "def random_step(self):\n pos = [i for i in range(9) if self.grid[i] == 0]\n move = random.choice(pos)\n return self.step(move)", "def chooseNewDirection(self, speedRange=None):\n if speedRange is None: speedRange = self.MONSTER_SPEED\n self.dx = random.randint(-speedRange, speedRange)\n self.dy = random.randint(-speedRange, speedRange)", "def random_walk(n):\n x,y = 0,0\n for i in range(n):\n (dx,dy) = random.choice([(0,1),(1,0),(0,-1),(-1,0)])\n x += dx\n y+=dy\n return(x,y)", "def move(self):\n self.x += self.speed_x / FPS\n self.y += self.speed_y / FPS\n self.draw_ball()\n if self.x >= 1100:\n self.speed_x = randint(-100, -10)\n if self.x <= 50:\n self.speed_x = randint(10, 100)\n if self.y >= 800:\n self.speed_y = randint(-100, -10)\n if self.y <= 50:\n self.speed_y = randint(10, 100)", "def _rand_direction(dim, rand):\n direction = rand.normal(size=dim)\n return direction / la.norm(direction)" ]
[ "0.78114086", "0.7343114", "0.73377156", "0.7268426", "0.7248032", "0.7119226", "0.703144", "0.70097953", "0.6923677", "0.6908911", "0.68940854", "0.68913585", "0.67954284", "0.6795343", "0.67691827", "0.6748331", "0.6713536", "0.668025", "0.6644184", "0.6639902", "0.66380334", "0.6637353", "0.65988076", "0.6596052", "0.6583045", "0.6583045", "0.65225327", "0.6497617", "0.64780134", "0.6472301" ]
0.75118154
1
Toggles the ant to show its position with a color
def ShowAnt(self, ShouldShow): if ShouldShow: self.TempScreenColor = self.display.get_at((self.x,self.y)) Ant.updateArray.append(pygame.Rect(self.x,self.y,1,1)) self.display.fill(Colors.A_red, ((self.x,self.y), (1,1))) elif pix == Colors.A_Fire: self.isAlive = False else: Ant.updateArray.append(pygame.Rect(self.x,self.y,1,1)) self.display.fill(self.TempScreenColor, ((self.x,self.y), (1,1)))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def change_color_arrow():\n global index_picker\n offset = 0\n if index_picker % 2 == 1:\n offset = 4\n for i in range(3):\n for j in range(2):\n sense.set_pixel(i+1+offset, j, [100, 100, 200])if ARROW[j][i] == 1 else sense.set_pixel(i+1+offset, j, [0, 0, 0])", "def toggle_color(self, index):\n if self.get_state(index):\n self.canvas.itemconfigure(self.cells[index], state=HIDDEN)\n else:\n self.canvas.itemconfigure(self.cells[index], state=NORMAL)", "def light_positions(turtle, color, pos, hide=0):\n if hide == 1:\n turtle.hideturtle()\n turtle.penup()\n turtle.forward(40)\n turtle.left(90)\n turtle.forward(pos)\n turtle.shape(\"circle\")\n turtle.shapesize(3)\n turtle.fillcolor(color)", "def change_color(self, color):\r\n if color == \"black\":\r\n self.color = \"white\"\r\n self.canvas.itemconfig(self.ball, fill='white')\r\n else:\r\n self.color = \"black\"\r\n self.canvas.itemconfig(self.ball, fill='black')", "def set_light_on(self):\r\n self._light = \"ON\"", "def toggle_shade(self,shade):\n\n # First toggle the user specified shade\n if self.shades[shade][0]:\n self.shades[shade][0] = 0\n else:\n self.shades[shade][0] = 1\n\n # Now draw the image with the active shades\n self.image.blit(self.pic,(0,0))\n for key in self.shades:\n if self.shades[key][0]:\n self.image.blit(self.shades[key][1],(0,0))", "def changeColor( self ):\n\t\t\n\t\tx, y = self.position.xy\n\t\tself.color = ( int((x / WINDOW_X) * 128), int((x / WINDOW_X) * 128) + int((y / WINDOW_Y) * 128 ), int((y / WINDOW_Y) * 128))", "def toggle(self, color='all'):\n if color in ['all', 'r']:\n self.__send('r', 'toggle')\n\n if color in ['all', 'g']:\n self.__send('g', 'toggle')\n\n if color in ['all', 'b']:\n self.__send('b', 'toggle')", "def toggle(self, env, pos):\n return False", "def butenex(self, evt, index, pos):\n if evt == DGG.ENTER:\n self._hilightcurrent(False)\n self.index=index\n self._hilightcurrent(True)\n elif evt == DGG.EXIT:\n self._hilightcurrent(False)", "def dimmer_switch(turtle, color):\n turtle.fillcolor(color + \"4\")", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def set_light_on(self):\n self._light = \"ON\"", "def on_show(self): \n arcade.set_background_color(arcade.color.BLACK)", "def set_green(self):\n self.fill= Cell.FILLED_COLOR_BG\n self.draw()", "def switch(self, _color = 16):\n\t\tself.pointer.flip()\n\n\t\tif self.pointer.get():\n\t\t\tself.content[0][1] = 3\n\t\t\tself.content[1][1] = 16\n\t\telse:\n\t\t\tself.content[0][1] = 16\n\t\t\tself.content[1][1] = 3", "def change_to_tasks(self):\n self.ids[\"shp_btn\"].color = 1, 1, 1, 0.5", "def brighter_switch(turtle, color):\n turtle.fillcolor(color + \"1\")", "def show(self):\r\n stroke(0) # determine the color\r\n circle((self.position.x, self.position.y), radius=10) # creates a circle with defined radius\r", "def click_aim(self, pos):\n x, y = pos\n if (self.x - x) ** 2 + (self.y - y) ** 2 <= self.r ** 2:\n self.color = random.choice(COLORS)\n self.x = randint(100, 1000)\n self.y = randint(100, 800)\n self.r = randint(50, 100)\n self.speed_x = randint(-200, 200)\n self.speed_y = randint(-200, 200)\n return True\n else:\n return False", "def draw():\n ant.move(aim)\n ant.x = wrap(ant.x)\n ant.y = wrap(ant.y)\n\n aim.move(random() - 0.5)\n aim.rotate(random() * 10 - 5)\n\n clear()\n goto(ant.x, ant.y)\n dot(4)\n\n ontimer(draw, 100)", "def toggled(self, b):\n self.group.setVisible(b)\n\n for line in (self.rLine, self.gLine, self.bLine):\n line.setVisible(b)\n\n self.parent.image.timeLine.setVisible(not b)", "def toggleAnnotation(self,i=0,onoff=None):\n active = self.annotations[i][1]\n #print \"WAS\"\n #print self.annotations\n if onoff is None:\n active = not active\n elif onoff:\n active = True\n else:\n active = False\n self.annotations[i][1] = active\n #print \"BECOMES\"\n #print self.annotations\n if active:\n self.drawAnnotation(i)\n else:\n self.removeAnnotation(i)\n #print self._annotations", "def clickWhiteReference(self, event):\n if self.whiteReference is None:\n self.whiteReference = self.spectrometer.getSpectrum()\n self.lightBtn.color = '0.99'\n else:\n self.whiteReference = None\n self.lightBtn.color = '0.85'\n plt.pause(0.3)\n self.axes.autoscale_view()", "def highlight(self, number, state):\n\n marker = game.markers[number]\n link = game.markers[marker.link]\n board = self.ids.board\n ui_link = board.children[-link.index - 1]\n\n # Toggle highlighting on\n if state == 'on':\n ui_link.old_color = ui_link.color\n ui_link.color = scheme.white\n\n # Toggle highlighting off\n elif state == 'off':\n ui_link.color = ui_link.old_color", "def change_color(self, x, y, state):\n if state == 1:\n color = self.tile_color\n else:\n color = self.background_color\n self.canvas.itemconfig(self.board[(x, y)], fill=color)", "def on_show(self):\n arcade.set_background_color(arcade.csscolor.DARK_SLATE_BLUE)\n\n arcade.set_viewport(0, SCREEN_WIDTH - 1, 0, SCREEN_HEIGHT - 1)", "def on_show(self):\n arcade.set_background_color(arcade.csscolor.DARK_SLATE_BLUE)\n\n arcade.set_viewport(0, SCREEN_WIDTH - 1, 0, SCREEN_HEIGHT - 1)", "def _hilightcurrent(self, onoff):\n if len(self.canvas[\"items\"]):\n self.canvas[\"items\"][self.index]['frameColor']=\\\n list(self.highlight)[:3]+[self.highlight[3] if onoff else 0]", "def enableLighting(self):\r\n\t\t\r\n\t\tglEnable(GL_LIGHTING)" ]
[ "0.6351094", "0.6189249", "0.6142086", "0.58642226", "0.5844731", "0.5740332", "0.5705326", "0.5699827", "0.5691193", "0.5687006", "0.56764317", "0.5641515", "0.5641049", "0.5637626", "0.56320447", "0.5620738", "0.5613831", "0.5603704", "0.55606014", "0.5553713", "0.55270195", "0.54968864", "0.5469495", "0.5447123", "0.5439998", "0.5420445", "0.54153323", "0.54153323", "0.54037535", "0.5395636" ]
0.7177802
0
Spawns ant in game and turns the current mouse pos to color of ant
def Spawn(self): if len(Ant.antArray) < Ant.antLimit: Ant.antArray.append(self) self.display.set_at((self.x,self.y), Colors.A_Wood) pygame.display.update(pygame.Rect(self.x,self.y,1,1))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.display.set_at((self.x,self.y), Colors.A_black)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.display.set_at((self.x,self.y), Colors.A_Crazy)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def draw():\n ant.move(aim)\n ant.x = wrap(ant.x)\n ant.y = wrap(ant.y)\n\n aim.move(random() - 0.5)\n aim.rotate(random() * 10 - 5)\n\n clear()\n goto(ant.x, ant.y)\n dot(4)\n\n ontimer(draw, 100)", "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.display.set_at((self.x,self.y), Colors.A_Plant)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.display.set_at((self.x,self.y), Colors.A_Fire)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.display.set_at((self.x,self.y), Colors.A_Water)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def Spawn(self):\n if len(Ant.antArray) < Ant.antLimit:\n Ant.antArray.append(self)\n self.facing = random.randint(0,3)\n self.display.set_at((self.x,self.y), Colors.A_Zombie)\n pygame.display.update(pygame.Rect(self.x,self.y,1,1))", "def ShowAnt(self, ShouldShow):\n if ShouldShow:\n self.TempScreenColor = self.display.get_at((self.x,self.y))\n Ant.updateArray.append(pygame.Rect(self.x,self.y,1,1))\n self.display.fill(Colors.A_red, ((self.x,self.y), (1,1)))\n elif pix == Colors.A_Fire:\n self.isAlive = False\n else:\n Ant.updateArray.append(pygame.Rect(self.x,self.y,1,1))\n self.display.fill(self.TempScreenColor, ((self.x,self.y), (1,1)))", "def follow(self):\n\t\tpos = pygame.mouse.get_pos()\n\t\tself.x = pos[0]\n\t\tself.y = pos[1]\n\t\tself.draw()", "def draw_laser(self):\n pygame.draw.rect(self.screen, self.color, self.rect)", "def select_me(self, mouse_pos):\r\n\t\t#self.active = self.rect.collidepoint(mouse_pos)\r\n\t\tself.active = True", "def click_on_hero():\n mouseclick(coords_hero_button[0], coords_hero_button[1])", "def click_aim(self, pos):\n x, y = pos\n if (self.x - x) ** 2 + (self.y - y) ** 2 <= self.r ** 2:\n self.color = random.choice(COLORS)\n self.x = randint(100, 1000)\n self.y = randint(100, 800)\n self.r = randint(50, 100)\n self.speed_x = randint(-200, 200)\n self.speed_y = randint(-200, 200)\n return True\n else:\n return False", "def draw():\n screen.fill((0, 0, 0))\n alien.draw()", "def on_render(self, console):\n super().on_render(console)\n x, y = self.engine.mouse_location\n console.tiles_rgb['bg'][x, y] = color.white\n console.tiles_rgb['fg'][x, y] = color.black", "def start_cast(self, location=(0.0, 0.0, 0.0)):\r\n opengles.glClearColor(ctypes.c_float(0.0), ctypes.c_float(0.0), \r\n ctypes.c_float(0.0), ctypes.c_float(1.0))\r\n super(ShadowCaster, self)._start()\r\n self.camera.reset(is_3d=False, scale=self.scale)\r\n self.camera.position((location[0], 0, location[2]))\r\n self.location = location", "def game_click(coord):\n mouseclick(coord[0], coord[1])\n time.sleep(0.5)", "def select_me(self, mouse_pos):\r\n\t\tself.active = self.rect.collidepoint(mouse_pos)", "def draw_aim(self):\n polygon(screen, self.color, [(self.x, self.y), (self.x + self.r * 1.71 / 2, self.y - self.r / 2),\n (self.x + self.r * 1.71, self.y), (self.x + self.r * 1.71, self.y + self.r),\n (self.x + self.r * 1.71 / 2, self.y + 3 * self.r / 2), (self.x, self.y + self.r)])", "def on_mouse_press(self, x, y, button, modifiers):\n self.add_wall()", "def goto(x, y):\n turtleTmp.setposition(x, y)", "def draw_a(self):\r\n pen.down()\r\n pen.left(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.right(90)\r\n pen.forward(40)\r\n pen.up()\r\n pen.back(20)\r\n pen.right(90)\r\n pen.down()\r\n pen.forward(40)\r\n pen.up()\r\n pen.left(90)\r\n pen.forward(20)\r\n pen.left(90)\r\n pen.forward(50)", "def draw():\n clear()\n\n for target in targets:\n goto(target.x, target.y)\n dot(20, \"blue\")\n\n if inside(ball):\n goto(ball.x, ball.y)\n dot(6, \"red\")\n\n update()", "def update(self):\r\n self.x = 60\r\n self.y = games.mouse.y\r\n self.check_collide()", "def execute_action(self, a):\n x,y = self.agent\n self.agent = self._get_new_position(x,y,a)", "def mouse_hover(self):\n self.color1 = self.color # Color changes\n position = pygame.mouse.get_pos() # Get mouse position\n if self.rect.collidepoint(position): # If the mouse is inside the button rect\n self.color1 = LIGHT_GREEN # Change color to light green", "def click(self, pos):\n x, y = pos\n if (self.x - x) ** 2 + (self.y - y) ** 2 <= self.r ** 2:\n self.color = random.choice(COLORS)\n self.x = randint(100, 1000)\n self.y = randint(100, 800)\n self.r = randint(30, 50)\n self.speed_x = randint(-100, 100)\n self.speed_y = randint(-100, 100)\n return True\n else:\n return False", "def tir():\r\n global xTrois,yTrois,speedXTrois,speedYTrois,GO\r\n norme=sqrt((mouseX-xTrois)**2+(mouseY-yTrois)**2) \r\n xline=xTrois+(mouseX-xTrois)*1000 # on crée 2 points très lois alligné avec la boule et la souris\r\n yline=yTrois+(mouseY-yTrois)*1000\r\n strokeWeight(2*rayonBalle) \r\n stroke(200+200*(norme/400), 255-200*(norme/400), 0,90)# on fait varier la couleur du trait en fonction de la distance entre la boule et la souris \r\n line(xTrois,yTrois,xline,yline)\r\n strokeWeight(1)\r\n stroke(0)\r\n norme=sqrt((mouseX-xTrois)**2+(mouseY-yTrois)**2)\r\n if mousePressed and GO>30: #quand on clique et que le jeu a commencé depuis suffisament longtemps\r\n speedXTrois=((mouseX-xTrois)/norme)*6.5*(norme/400) #on lance la boule\r\n speedYTrois=((mouseY-yTrois)/norme)*6.5*(norme/400)", "def step(self):\n r, c = self.ant_r, self.ant_c\n if self.grid[r][c] == LangtonAnt.WHITE:\n self.direction = rotate_counterclock(self.direction)\n self.grid[r][c] = LangtonAnt.BLACK\n else:\n self.direction = rotate_clockwise(self.direction)\n self.grid[r][c] = LangtonAnt.WHITE\n\n self.ant_r -= self.direction[1]\n self.ant_c += self.direction[0]", "def update(self):\r\n self.x = games.mouse.x\r\n self.y = games.mouse.y\r\n self.check_collide()" ]
[ "0.6759585", "0.6687633", "0.65759635", "0.653857", "0.65238696", "0.65196604", "0.64285827", "0.64246964", "0.60423154", "0.58290994", "0.5784844", "0.5715071", "0.5691323", "0.568023", "0.56258106", "0.56255716", "0.55967313", "0.55849224", "0.55729926", "0.5561677", "0.55454886", "0.5539192", "0.5536724", "0.55288666", "0.55250853", "0.55144703", "0.5478314", "0.54656655", "0.54400504", "0.54257536" ]
0.6703439
1
Return True if `self` is compatible to `other`.
def is_compatible_to(self, other: 'Signature') -> bool: if self is other or self == other: return True # self.return_type must be compatible to other.return_type self_return_type, other_return_type = (self.return_type, other.return_type) # if self_return_type is None and other_return_type is not None: # return False # if self_return_type is not None and other_return_type is None: # return False if not _is_compatible(self_return_type, other_return_type): return False # other.var_arg_type must be compatible to self.var_arg_type self_var_arg_type, other_var_arg_type = (self.var_arg_type, other.var_arg_type) # if self_var_arg_type is None and other_var_arg_type is not None: # return False # if self_var_arg_type is not None and other_var_arg_type is None: # return False if not _is_compatible(other_var_arg_type, self_var_arg_type): return False # each type in other.arg_types must compatible the corresponding # type on self.arg_types self_arg_types, other_arg_types = self.arg_types, other.arg_types if len(self_arg_types) != len(other_arg_types): return False return (all((_is_compatible(oat, sat) for (oat, sat) in zip(other_arg_types, self_arg_types))))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def compatible(self, other: 'Reaction') -> bool:\n return self.lhs.compatible(other.lhs) and self.rhs.compatible(other.rhs)", "def is_same_type_as_other(cls, other):\r\n return isinstance(other, cls)", "def is_compatible(self, other):\n return self.intervals == other.intervals and\\\n self.nonderived_directions == other.nonderived_directions", "def matches(self, other):\n if isinstance(other, type):\n return isinstance(self, other)\n\n return self == other", "def __eq__(self, other: Any) -> bool:\n if not isinstance(other, type(self)):\n return NotImplemented\n return True", "def __eq__(self, other):\n return isinstance(other, self.__class__)", "def _is_equal_same_type(self, other):\n return True", "def compatible(self, other):\n compatible = ((self.ndim == other.ndim) and\n self.regular and\n other.regular and\n all(numpy.allclose(sw[0], ow[0]) for (sw, ow) in\n zip(self.binwidths, other.binwidths)))\n return compatible", "def __eq__(self, other) -> bool:\n if not isinstance(other, type(self)):\n return False\n for attribute in self.classes:\n if getattr(self, attribute) != getattr(other, attribute):\n return False\n return True", "def __eq__(self, other):\n if self is other:\n return True\n\n if not (type(self) == type(other)):\n return False\n\n assert self.base == other.base\n\n try:\n rands = self.rands\n except NotImplementedError:\n rands = None\n\n if rands:\n return all(s == o for s, o in zip(self.rands, other.rands))\n else:\n return NotImplemented\n\n return False", "def is_syntactically_equal(self, other):\n raise NotImplementedError() # To be subclassed", "def same_as(self, other):\n return super().__eq__(other)", "def __eq__(self, other: 'Origin') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def __eq__(self,other):\n try: return self.object==other.object and isinstance(self,type(other))\n except: return False", "def is_mutually_bit_compatible_with(self, other: 'CompositeType') -> bool:\n return self.bit_length_set == other.bit_length_set", "def __eq__(self, other) -> bool:\n return super().__eq__(other) and self.permeability == other.permeability", "def __eq__(self, other):\n if type(self) is not type(other):\n return NotImplemented\n \n return self._is_equal_same_type(other)", "def __eq__(self, other):\n if type(self) is not type(other):\n return NotImplemented\n \n return self._is_equal_same_type(other)", "def __eq__(self, other):\n if type(self) is not type(other):\n return NotImplemented\n \n return self._is_equal_same_type(other)", "def compatible(self, other):\n return (hasattr(other, 'map') and super(rmap, self).compatible(other))", "def __eq__(self, other: 'CrossConnectRouter') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def __eq__(self, other):\n return (isinstance(other, self.__class__) and\n self.type == other.type and\n self.data == other.data)", "def __eq__(self, other):\n if type(other) is type(self):\n return other.data == self.data\n return False", "def __eq__(self, other: 'OriginInput') -> bool:\n if not isinstance(other, self.__class__):\n return False\n return self.__dict__ == other.__dict__", "def almost_equals(self, other):\n if self.__class__ is other.__class__ and len(self) == len(other):\n for a, b in zip(self, other):\n if not a.almost_equals(b):\n return False\n return True\n else:\n return False", "def __eq__(self, other: object) -> bool:\n if isinstance(other, Lintable):\n return bool(self.name == other.name)\n return False", "def __eq__(self, other):\n if isinstance(self, other.__class__):\n return self.__dict__ == other.__dict__\n return False", "def __eq__(self, other: Any) -> bool:\n return (\n super().__eq__(other) and isinstance(other, AtomicField) and self._spark_data_type == other._spark_data_type\n )", "def __eq__(self, other: object) -> bool:\n\n if not isinstance(other, self.__class__):\n return False\n\n if not self.simctl_type == other.simctl_type:\n return False\n\n return self.raw_info == other.raw_info", "def equals(self, other): # -> bool:\n ..." ]
[ "0.7865186", "0.75270057", "0.7411802", "0.73771805", "0.7337953", "0.7326996", "0.7268032", "0.72549725", "0.71684146", "0.7143496", "0.70590407", "0.70508957", "0.6964649", "0.69521296", "0.6904362", "0.6858526", "0.6841164", "0.6841164", "0.6841164", "0.68307346", "0.6821179", "0.6817714", "0.6812071", "0.67961425", "0.6794651", "0.6793793", "0.6785849", "0.6771743", "0.67696005", "0.676185" ]
0.7725469
1
Retrieve the type signature of the callable `obj`.
def signature(obj: Callable) -> Signature: return Signature.from_callable(obj)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _make_callable_signature(obj):\n if inspect.isclass(obj) or inspect.isfunction(obj):\n if obj.__name__ == \"<lambda>\":\n return _make_lambda_name(obj)\n return obj.__name__\n elif inspect.ismethod(obj):\n obj_self = obj.__self__\n if isinstance(obj_self, type):\n cls_name = obj_self.__name__\n else:\n cls_name = obj_self.__class__.__name__\n return f\"{cls_name}.{obj.__name__}\"\n else:\n raise TypeError(\n f\"Only class/function/methods are valid inputs, got {type(obj)}\")", "def getargspec(self,obj):\n\n if inspect.isfunction(obj):\n func_obj = obj\n elif inspect.ismethod(obj):\n func_obj = obj.im_func\n else:\n raise TypeError, 'arg is not a Python function'\n args, varargs, varkw = inspect.getargs(func_obj.func_code)\n return args, varargs, varkw, func_obj.func_defaults", "def from_callable(cls, obj: Callable) -> 'Signature':\n namespace = None\n arg_types = []\n var_arg_type = None\n if isinstance(obj, type): # it's a class?\n try:\n sig = inspect.signature(_get_constructor(obj) or obj)\n except (TypeError, ValueError):\n sig = None\n skip_arg = 1\n else:\n try:\n sig = inspect.signature(obj)\n except TypeError:\n sig = None\n skip_arg = 0\n if sig:\n return_type = sig.return_annotation\n if return_type is sig.empty:\n if isinstance(obj, type):\n return_type = obj\n else:\n return_type = Any\n elif return_type is None and isinstance(obj, type):\n return_type = obj\n elif isinstance(return_type, Text):\n namespace = (namespace or\n importlib.import_module(obj.__module__).__dict__)\n return_type = eval(return_type, namespace)\n non_def_args = [(p.name, p.kind, p.annotation)\n for p in list(sig.parameters.values())[skip_arg:]\n if p.default is sig.empty]\n for name, kind, annotation in non_def_args:\n if kind in (_KEYWORD_ONLY, _VAR_KEYWORD):\n raise ValueError(\"'\" + repr(obj) + \"' has keyword-only \"\n \"arg w/o default value: '\" + name + \"'.\")\n if annotation is sig.empty:\n raise ValueError(\"'\" + repr(obj) + \"' has arg\"\n \" w/o type hint: '\" + name + \"'.\")\n if isinstance(annotation, Text):\n namespace = (namespace or\n importlib.import_module(obj.__module__)\n .__dict__)\n try:\n annotation = eval(annotation, namespace)\n except NameError:\n annotation = None\n if isinstance(annotation, (type, TypingMeta, _TypingBase)):\n if kind == _VAR_POSITIONAL:\n var_arg_type = annotation\n else:\n arg_types.append(annotation)\n else:\n raise ValueError(\"'\" + repr(obj) + \"' has arg\"\n \" w/o type hint: '\" + name + \"'.\")\n if arg_types or var_arg_type or return_type:\n return cls(arg_types, var_arg_type=var_arg_type,\n return_type=return_type)\n raise ValueError(\"Can't retrieve signature of '\" + repr(obj) + \"'.\")", "def typename(obj):\n return obj.__name__ if hasattr(obj, '__name__') else type(obj).__qualname__", "def __call__(self, obj: Any) -> CallableDetails:\n try:\n return CallableInspector.inspect(obj)\n except TypeError as e:\n raise IncompatibleHandlerFactoryError(f\"{obj!r}: {e}\")", "def get_func_ast(obj : types.FunctionType):\n return get_ast(obj).body[0]", "def __call__(self, obj: Any) -> CallableDetails:\n if self.subtype_of:\n if not isinstance(obj, self.subtype_of):\n raise IncompatibleHandlerFactoryError(\n f\"Object {obj!r} is not type of {self.subtype_of}\"\n )\n\n try:\n attr = getattr(obj, self.name)\n except AttributeError:\n raise IncompatibleHandlerFactoryError(\n f\"Object {obj!r} has no attribute named {self.name}\"\n )\n\n try:\n return CallableInspector.inspect(attr)\n except TypeError as e:\n raise IncompatibleHandlerFactoryError(f\"{obj!r} attribute {self.name}: {e}\")", "def typename ( o ) :\n return type ( o ) .__name__", "def print_type(obj: object) -> None:\n print(f'{type(obj)}')", "def info(obj):\n if type(obj) is tuple:\n return '({})'.format(', '.join(map(TypeTool.info,obj)))\n elif type(obj) is list:\n return 'List[{}]'.format(TypeTool.info(obj[0]))\n else:\n ctype_name = type(obj).__name__\n if ctype_name == 'ndarray': return '{}[{}]{}'.format(ctype_name,obj.dtype, obj.shape)\n elif ctype_name == 'str': return 'string'\n elif ctype_name == 'bytes': return 'List[byte]'\n else: return ctype_name", "def get_func_type(self, *args):\n return _ida_hexrays.cfuncptr_t_get_func_type(self, *args)", "def get_func_type(self, *args):\n return _ida_hexrays.cfunc_t_get_func_type(self, *args)", "def get_bytes(obj):\n try:\n obj = obj.read(_NUM_SIGNATURE_BYTES)\n except AttributeError:\n # duck-typing as readable failed - we'll try the other options\n pass\n\n kind = type(obj)\n\n if kind is bytearray:\n return signature(obj)\n\n if kind is str:\n return get_signature_bytes(obj)\n\n if kind is bytes:\n return signature(obj)\n\n if kind is memoryview:\n return signature(obj).tolist()\n\n raise TypeError('Unsupported type as file input: %s' % kind)", "def fl_get_object_type(ptr_flobject):\n _fl_get_object_type = library.cfuncproto(\n library.load_so_libforms(), \"fl_get_object_type\", \\\n cty.c_int, [cty.POINTER(xfdata.FL_OBJECT)], \\\n \"\"\"int fl_get_object_type(FL_OBJECT * obj) \"\"\")\n library.check_if_flinitialized()\n library.verify_flobjectptr_type(ptr_flobject)\n library.keep_elem_refs(ptr_flobject)\n retval = _fl_get_object_type(ptr_flobject)\n return retval", "def _get_arity(callable):\n return len(inspect.signature(callable).parameters)", "def signature(function: model.Function) -> str:\n return str(function.signature)", "def get_python_type(obj, format_type):\n t = type(obj)\n\n return t if format_type is None else t.__name__", "def is_callable(obj):\n return callable(obj)", "def ftype(obj):\n if isinstance(obj, np.ndarray) or isinstance(obj, torch.Tensor):\n types = \" \".join([str(x) for x in [type(obj), obj.dtype, list(obj.shape)]])\n if isinstance(obj, torch.Tensor):\n types = \" \".join([obj.device.type, types])\n else:\n types = str(type(obj))\n\n for s in [\"class \", \"'\", \"<\", \">\", \"(\", \")\", \"torch.\", \"numpy.\" ]:\n types = types.replace(s, \"\")\n return types", "def _get_signature(self):\n if hasattr(self, '_signature'):\n return self._signature\n fullargspec = inspect.getargspec(self.callable)\n argspec = fullargspec[0]\n assert argspec[0:2] == ['self', 'req'] or argspec[0] == 'req', \\\n 'Invalid argspec %s for %s' % (argspec, self.name)\n while argspec and (argspec[0] in ('self', 'req')):\n argspec.pop(0)\n argspec.reverse()\n defaults = fullargspec[3]\n if not defaults:\n defaults = []\n else:\n defaults = list(defaults)\n args = []\n sig = []\n for sigcand in self.xmlrpc_signatures():\n if len(sig) < len(sigcand):\n sig = sigcand\n sig = list(sig)\n for arg in argspec:\n if defaults:\n value = defaults.pop()\n if type(value) is str:\n if '\"' in value:\n value = \"'%s'\" % value\n else:\n value = '\"%s\"' % value\n arg += '=%s' % value\n args.insert(0, RPC_TYPES[sig.pop()] + ' ' + arg)\n self._signature = '%s %s(%s)' % (RPC_TYPES[sig.pop()], self.name, ', '.join(args))\n return self._signature", "def _qualname(obj):\n return obj.__qualname__", "def _get_argspec(self, obj, node, name=None):\n try:\n return self.arg_spec_cache.get_argspec(obj, name=name, logger=self.log)\n except TypeError as e:\n self._show_error_if_checking(node, e, ErrorCode.not_callable)\n return None", "def kind_of(obj):\n # why don't I use isinstance - it saves us big time\n\n # dict, list, and tuple are differianted from str, unicode, int, bool, and float\n # because they have special treatment and simple `==` or `is` is not enough to\n # prove them valid.\n obj_type = type(obj)\n if obj_type is dict:\n return TYPE_DICTIONARY\n elif obj_type is list:\n return TYPE_LIST\n elif obj_type is tuple:\n return TYPE_TUPLE\n elif obj in ATOMIC_TYPES:\n return TYPE_TYPE\n elif obj is object:\n return TYPE_OBJECT\n elif getattr(obj, \"__class__\", False) and issubclass(obj.__class__, BaseValidator):\n return TYPE_VALIDATOR\n elif callable(obj):\n return TYPE_FUNCTION\n # this f##king SRE_Pattern, why can't I f##king kill it\n elif getattr(obj, \"match\", False) and getattr(obj, \"search\", False):\n return TYPE_REGEX\n else:\n return TYPE_UNKNOWN", "def is_function(obj):\n return isinstance(obj, (types.FunctionType, types.MethodType,\n types.LambdaType))", "def _type_as_str(obj: Instrument) -> str:\n # type().__name__ will return something like: '_Counter',\n # this drops the leading underscore for cleaner logging.\n\n return type(obj).__name__[1:]", "def getargspec(func):\n if isinstance(func, partial):\n return inspect.getargspec(func.func)\n else:\n if isinstance(func, type):\n return inspect.getargspec(func.__init__)\n else:\n return inspect.getargspec(func)", "def get_type_name_value(obj):\n return None if obj is None else obj.GetTypeName()", "def lookup(obj):\n\n return (dir(obj))", "def lookup(obj):\n return dir(obj)", "def lookup(obj):\n return dir(obj)" ]
[ "0.7464089", "0.6583049", "0.6352262", "0.6304821", "0.6268376", "0.6045959", "0.60015917", "0.5922175", "0.5805204", "0.57128704", "0.56448716", "0.56355196", "0.5625885", "0.560357", "0.5551908", "0.55509865", "0.55469924", "0.55452144", "0.55344373", "0.54780686", "0.54219574", "0.5378657", "0.53590554", "0.53577656", "0.5346298", "0.5273047", "0.524071", "0.52340925", "0.52335906", "0.52335906" ]
0.8067756
0
Enable automatic upgrade of Sql IaaS extension Agent.
def enable_automatic_upgrade(self) -> Optional[bool]: return pulumi.get(self, "enable_automatic_upgrade")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def upgrade(self):", "def upgrade(self):", "def upgrade():\n op.add_column(\n 'assessments',\n sa.Column(\n 'assessment_type',\n sa.String(length=250),\n nullable=False,\n server_default=\"Control\",\n )\n )\n # Change CA help text \"Assessment type\" to \"Assessment Category\"\n op.execute(\n 'UPDATE custom_attribute_definitions '\n 'SET helptext = \"Assessment Category\" '\n 'WHERE helptext = \"Assessment type\" '\n 'AND definition_type = \"assessment\" AND title = \"Type\";'\n )", "def test_upgrade_with_auto_upgrade_latest_engine_enabled():", "def upgrade():\r\n current_context = op.get_context()\r\n meta = current_context.opts['target_metadata']\r\n user = sa.Table('users', meta, autoload=True)\r\n\r\n # Add the initial admin user account.\r\n op.bulk_insert(user, [{\r\n 'username': u'admin',\r\n 'password': u'$2a$10$LoSEVbN6833RtwbGQlMhJOROgkjHNH4gjmzkLrIxOX1xLXNvaKFyW',\r\n 'email': u'[email protected]',\r\n 'activated': True,\r\n 'is_admin': True,\r\n 'api_key': u'123456',\r\n }\r\n ])", "def auto_upgrade(self) -> bool:\n return pulumi.get(self, \"auto_upgrade\")", "async def on_upgrade_complete(self, upgrade: UpgradeId):", "def upgradedb(ctx):\n path = Path(__file__).resolve().parent.parent\n conf = Config(str(path / \"migrations\" / \"alembic.ini\"))\n conf.set_main_option(\"script_location\", str(path / \"migrations\"))\n command.upgrade(conf, \"heads\")", "def pre_upgrade(self, upgrade_specs):\n pass", "def run_migration(env, upgrade_type):\n pass", "def enable():\n configdb = ConfigDBConnector()\n configdb.connect()\n tunnel_info = {}\n tunnel_info['FLEX_COUNTER_STATUS'] = ENABLE\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", \"TUNNEL\", tunnel_info)", "def upgrade_environment(self, db):\n\n pass", "def upgrade(revision, sql):\n alembic_command.upgrade(alembic_config, revision, sql=sql)", "def execute():\n\n\tfrappe.db.sql(\n\t\t\"\"\"\n\t\tUPDATE `tabClient Script` SET enabled=1\n\t\"\"\"\n\t)", "def enable():\n if not _status_apf():\n return __apf_cmd(\"-s\")", "def environment_needs_upgrade(self, db):\n\n return False", "def enable(cls):\r\n cls.disable()\r\n sys.meta_path.insert(0, cls())", "def upgrade_script():\n if postgres.db_exists(env.db):\n with cd(path()):\n sudo('bin/upgrade_{odoo} -d {db} '.format(**env), user=env.account)", "def upgrade():\n op.add_column(\n 'share_instances',\n Column('access_rules_status', String(length=255))\n )\n\n connection = op.get_bind()\n share_instances_table = utils.load_table('share_instances', connection)\n instance_access_table = utils.load_table('share_instance_access_map',\n connection)\n\n # NOTE(u_glide): Data migrations shouldn't be performed on live clouds\n # because it will lead to unpredictable behaviour of running operations\n # like migration.\n instances_query = (\n share_instances_table.select()\n .where(share_instances_table.c.status == constants.STATUS_AVAILABLE)\n .where(share_instances_table.c.deleted == 'False')\n )\n\n for instance in connection.execute(instances_query):\n\n access_mappings_query = instance_access_table.select().where(\n instance_access_table.c.share_instance_id == instance['id']\n ).where(instance_access_table.c.deleted == 'False')\n\n status = constants.STATUS_ACTIVE\n\n for access_rule in connection.execute(access_mappings_query):\n\n if (access_rule['state'] == constants.STATUS_DELETING or\n access_rule['state'] not in priorities):\n continue\n\n if priorities[access_rule['state']] > priorities[status]:\n status = access_rule['state']\n\n # pylint: disable=no-value-for-parameter\n op.execute(\n share_instances_table.update().where(\n share_instances_table.c.id == instance['id']\n ).values({'access_rules_status': upgrade_data_mapping[status]})\n )\n\n op.drop_column('share_instance_access_map', 'state')", "def package_upgrade():\n\n if (do_action_package_upgrade('nova-common',\n do_openstack_upgrade,\n CONFIGS)):\n # we should restart the container scoped (subordinate) plugins after a\n # managed openstack upgrade see: BUG#1835557\n for rid in relation_ids('neutron-plugin'):\n neutron_plugin_joined(rid, remote_restart=True)\n for rid in relation_ids('nova-ceilometer'):\n nova_ceilometer_joined(rid, remote_restart=True)\n for rid in relation_ids('nova-vgpu'):\n nova_vgpu_joined(rid, remote_restart=True)\n # NOTE(ajkavanagh) - if unit is paused (usually true for managed\n # upgrade) then the config_changed() function is a no-op\n config_changed()", "def switchToAppInstaller(self):\n self._sendCommand(self.SONY_CMD_ScalarExtCmdPlugIn_NotifyScalarDlmode, bufferSize=0)", "def __init__(__self__, *,\n agent_auto_upgrade: Optional[pulumi.Input[Union[str, 'AutoUpgradeOptions']]] = None,\n agent_version: Optional[pulumi.Input[str]] = None):\n if agent_auto_upgrade is None:\n agent_auto_upgrade = 'Enabled'\n if agent_auto_upgrade is not None:\n pulumi.set(__self__, \"agent_auto_upgrade\", agent_auto_upgrade)\n if agent_version is not None:\n pulumi.set(__self__, \"agent_version\", agent_version)", "def enable_audit_monitoring():\n __enable_data_access_logging()\n __enable_log_streaming()\n __create_audit_alerts()\n __get_incidents_history()", "def do_upgrade(env, ver, cursor):\n cursor.execute('UPDATE system SET name=%s WHERE name=%s',\n (\"agiletools_version\", \"taskboard_schema\"))", "def agent_auto_upgrade(self) -> Optional[pulumi.Input[Union[str, 'AutoUpgradeOptions']]]:\n return pulumi.get(self, \"agent_auto_upgrade\")", "def enable(ctx):\n\n config_db = ConfigDBConnector()\n config_db.connect()\n config_db.mod_entry(\"NAT_GLOBAL\", \"Values\", {\"admin_mode\": \"enabled\"})", "def install_step(self):\n silent_cfg_names_map = None\n\n if LooseVersion(self.version) < LooseVersion('2013_sp1'):\n # since icc v2013_sp1, silent.cfg has been slightly changed to be 'more standard'\n\n silent_cfg_names_map = {\n 'activation_name': ACTIVATION_NAME_2012,\n 'license_file_name': LICENSE_FILE_NAME_2012,\n }\n\n super(EB_icc, self).install_step(silent_cfg_names_map=silent_cfg_names_map)", "def __upgrade(self):", "def enable():\n configdb = ConfigDBConnector()\n configdb.connect()\n fc_info = {}\n fc_info['FLEX_COUNTER_STATUS'] = 'enable'\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", \"QUEUE_WATERMARK\", fc_info)\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", \"PG_WATERMARK\", fc_info)\n configdb.mod_entry(\"FLEX_COUNTER_TABLE\", BUFFER_POOL_WATERMARK, fc_info)", "def enable(self):\n self._installed_apps_add()" ]
[ "0.5746555", "0.5746555", "0.5360772", "0.525915", "0.5241179", "0.5205984", "0.5202186", "0.51439685", "0.5128792", "0.51103", "0.5100761", "0.5082117", "0.50677305", "0.5032857", "0.50267804", "0.5008855", "0.50035113", "0.49937436", "0.4992535", "0.49824744", "0.4973609", "0.49569327", "0.49400526", "0.4921251", "0.49035868", "0.4902366", "0.49022266", "0.48955148", "0.48866004", "0.48618186" ]
0.6036892
0
Key vault credential settings.
def key_vault_credential_settings(self) -> Optional['outputs.KeyVaultCredentialSettingsResponse']: return pulumi.get(self, "key_vault_credential_settings")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def credentials(self):\n return CurrentProject().config.credentials[self.key]", "def set_credentials():", "def _get_credentials(self):\n if self.config_file:\n with open(self.config_file) as f:\n config_str = f.read()\n credentials_dict = json.loads(config_str)\n self.credentials = credentials_dict[self.account][self.auth_type]\n else:\n self.credentials = {\n \"account\": os.environ.get('SNOWSQL_ACCOUNT'),\n \"user\": os.environ.get('SNOWSQL_USER'),\n \"password\": os.environ.get('SNOWSQL_PWD')\n }", "def credentials():\n\n username = os.environ.get('OS_USERNAME')\n password = os.environ.get('OS_PASSWORD')\n tenant_name = (os.environ.get('OS_TENANT_NAME') or\n os.environ.get('OS_PROJECT_NAME'))\n auth_url = os.environ.get('OS_AUTH_URL')\n\n config = configparser.RawConfigParser()\n if config.read(_CREDS_FILE):\n username = username or config.get('admin', 'user')\n password = password or config.get('admin', 'pass')\n tenant_name = tenant_name or config.get('admin', 'tenant')\n auth_url = auth_url or config.get('auth', 'uri')\n\n return {\n 'username': username,\n 'password': password,\n 'tenant_name': tenant_name,\n 'uri': auth_url\n }", "def _set_credentials():\n # Override credentials here if necessary\n if env.user == 'ubuntu':\n env.key_filename = [\n os.path.expanduser('~/.ssh/ubuntu-id_dsa')]\n env.abort_on_prompts = True\n env.disable_known_hosts = True\n env.use_shell = False", "def get_key_vault_credentials():\n if \"APPSETTING_WEBSITE_SITE_NAME\" in os.environ:\n return MSIAuthentication(\n resource='https://vault.azure.net'\n )\n else:\n return ServicePrincipalCredentials(\n client_id=os.environ['AZURE_CLIENT_ID'],\n secret=os.environ['AZURE_CLIENT_SECRET'],\n tenant=os.environ['AZURE_TENANT_ID'],\n resource='https://vault.azure.net'\n )", "def _add_cred_variables(self):\n self.credentialKey = {}\n authInfo = None\n if self.client:\n try:\n authInfo = self.client.getAuthenticatorInfo()\n except VersionMethodError:\n pass\n authArgOpts = dict(help=\"authentication plugin\")\n if authInfo:\n self.authenticatorInfo = AuthenticatorInfo(authInfo)\n authArgOpts['choices'] = self.authenticatorInfo.getAuthNames()\n else:\n self.authenticatorInfo = LegacyAuthenticatorInfo()\n\n var = self.add_variable('auth', (\"-a\", \"--auth\"), authArgOpts,\n envvar='ICAT_AUTH')\n var.postprocess = _post_auth\n for key in self.authenticatorInfo.getCredentialKeys(hide=False):\n self._add_credential_key(key)\n hidden = self.authenticatorInfo.getCredentialKeys(hide=True)\n if hidden:\n var = self.add_variable('promptPass', (\"-P\", \"--prompt-pass\"), \n dict(help=\"prompt for the password\", \n action='store_const', const=True), \n type=boolean, default=False)\n var.postprocess = _post_promptPass\n for key in hidden:\n self._add_credential_key(key, hide=True)", "def set_credentials(self, *args, **kwargs):\n pass", "def config(self):\n credentials = dict(\n key_id=LocalConfig.AWS_IAM_KEY_ID,\n access_secret=LocalConfig.AWS_IAM_ACCESS_SECRET,\n bucket=LocalConfig.AWS_S3_BUCKET,\n region=LocalConfig.AWS_S3_REGION\n )\n return credentials", "def get_credentials(self):\n #\n # Why is this not read from the yaml file?\n path = Path(path_expand(self.credential_file)).resolve()\n if not os.path.exists(path):\n os.makedirs(path)\n\n credentials_path = (path / 'google-drive-credentials.json').resolve()\n print(credentials_path)\n\n store = Storage(credentials_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(self.client_secret_file,\n self.scopes)\n flow.user_agent = self.application_name\n #\n # SHOUDL THE FLAGS NOT BE SET IN THE YAML FILE OR DOCOPTS OFTHE COMMAND?\n #\n if self.flags:\n credentials = tools.run_flow(flow, store, self.flags)\n\n return credentials", "def cfg_credentials(context):\n arguments = {\n '--config': context.config_file,\n 'authorize': False,\n 'account_summary': False\n }\n pychex_cli = PychexCli(arguments)\n pychex_cli.read_config()\n # Check that the values pulled from the read_config method match what we\n # know\n print(pychex_cli.username)\n assert pychex_cli.username == context.username\n assert pychex_cli.security_image_path == context.security_image_path\n assert pychex_cli.password == context.password\n # Check that the unencrypted values are not present\n with open(arguments['--config']) as cfg:\n cfg_txt = cfg.read()\n assert cfg_txt.find(context.username) == -1\n assert cfg_txt.find(context.security_image_path) == -1\n assert cfg_txt.find(context.password) == -1", "def __get_credentials_from_config(self):\n cr = ConfigFileReader()\n\n self.username = cr.get_value(Config.EDUROAM_USER)\n debug(\"Username set to : \" + self.username)\n self.password = cr.get_value(Config.EDUROAM_PWD)", "def _connect_azure_vault(self):\n credentials = ServicePrincipalCredentials(\n client_id=self.config.azure_client_id,\n secret=os.environ['AZURE_CLIENT_SECRET'],\n subscription=self.config.azure_subscription_id,\n tenant=self.config.azure_tenant_id\n )\n\n self.vault = KeyVaultClient(credentials)\n self.default_azure_key_version = KeyVaultId.version_none", "def get_credentials(self):\n home_dir = os.path.expanduser(\"~\")\n credential_dir = os.path.join(home_dir, \".credentials\")\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir, \"autoto.json\")\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n credentials = tools.run_flow(flow, store, self.auth_flags)\n print(\"Storing credentials to \" + credential_path)\n return credentials", "def get_credentials():\n home_dir = os.path.expanduser('~')\n credential_dir = os.path.join(home_dir, '.credentials')\n if not os.path.exists(credential_dir):\n os.makedirs(credential_dir)\n credential_path = os.path.join(credential_dir,\n 'credentialv_modify.json')\n\n store = Storage(credential_path)\n credentials = store.get()\n if not credentials or credentials.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)\n flow.user_agent = APPLICATION_NAME\n if flags:\n credentials = tools.run_flow(flow, store, flags)\n else: # Needed only for compatibility with Python 2.6\n credentials = tools.run(flow, store)\n print('Storing credentials to ' + credential_path)\n return credentials", "def _make_sure_credentials_are_set(self):\n if self.backend_options:\n if not os.environ.get('APCA_API_KEY_ID') and \\\n self.backend_options['key_id']:\n os.environ['APCA_API_KEY_ID'] = self.backend_options['key_id']\n if not os.environ.get('APCA_API_SECRET_KEY') and \\\n self.backend_options['secret']:\n os.environ['APCA_API_SECRET_KEY'] = self.backend_options[\n 'secret']\n if not os.environ.get('APCA_API_BASE_URL') and \\\n self.backend_options['base_url']:\n os.environ['APCA_API_BASE_URL'] = self.backend_options[\n 'base_url']", "def get_credential(self, key):\n return self.creds.get(key, '')", "def credentials(self) -> Mapping:", "def get_host_vars(self, host, vault_password=None):\n use_keychain = host.get_variables().get(\"use_keychain\")\n hostname = host.get_variables().get('inventory_hostname')\n if '-l' in sys.argv:\n # Check if only limited set of hosts is required for this run and get password only for them\n # quite a dirty way to accomplish that...\n found = False\n for limit in sys.argv[sys.argv.index('-l')+1].split(\",\"):\n m = re.match(limit.replace(\"*\", \".*\"), hostname)\n if m is not None:\n found = True\n break\n if not found:\n return\n if use_keychain and use_keychain.lower() in ['true', 'yes']:\n if VarsModule.sudo_password_cache.get(hostname) is None:\n user, passwd = KeyChain.get_credentials(host.get_variables()['inventory_hostname'])\n if not user:\n # Maybe short hostname then?\n user, passwd = KeyChain.get_credentials(host.get_variables()['inventory_hostname_short'])\n\n if not passwd:\n print(\"Cannot get password for host %s from keychain\" % hostname)\n passwd = getpass.getpass(\"Password for host %s: \"% hostname)\n VarsModule.remote_username_cache[hostname] = user\n VarsModule.sudo_password_cache[hostname] = passwd\n if VarsModule.remote_username_cache[hostname]:\n host.set_variable('ansible_ssh_user', VarsModule.remote_username_cache[hostname])\n host.set_variable('ansible_sudo_pass', VarsModule.sudo_password_cache[hostname])", "def get_client_settings_env(**_):\r\n username = os.environ.get('SL_USERNAME')\r\n api_key = os.environ.get('SL_API_KEY')\r\n proxy = os.environ.get('https_proxy')\r\n\r\n config = {'proxy': proxy}\r\n if username and api_key:\r\n config['auth'] = BasicAuthentication(username, api_key)\r\n return config", "def credentials(self):\n return self._credentials", "def credential(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"credential\")", "def get_credentials(self):\r\n home_dir = os.path.expanduser('~')\r\n credential_dir = os.path.join(home_dir, '.credentials')\r\n if not os.path.exists(credential_dir):\r\n os.makedirs(credential_dir)\r\n credential_path = os.path.join(credential_dir, self.CRED_FILENAME)\r\n \r\n store = Storage(credential_path)\r\n credentials = store.get()\r\n if not credentials or credentials.invalid:\r\n flow = client.flow_from_clientsecrets(self.CLIENT_SECRET_FILE, self.SCOPES)\r\n flow.user_agent = self.APPLICATION_NAME\r\n if flags:\r\n credentials = tools.run_flow(flow, store, flags)\r\n else: # Needed only for compatibility with Python 2.6\r\n credentials = tools.run(flow, store)\r\n print('Storing credentials to ' + credential_path)\r\n return credentials", "def __init__(self, config: Union[str, Path, TextIOWrapper] = None):\n if not isinstance(config, TextIOWrapper):\n config = Path(config) if config else Path(self._DEFAULT_LOCATION)\n config = config.expanduser().absolute()\n with open(config, 'r') as fp:\n self._config = json.load(fp)\n else:\n self._config = json.load(config)\n self._store = self._config.get('credsStore', None)\n if self._store not in self._SUPPORTED_STORES:\n raise UnsupportedStore(f'Credential store \"{self._store}\" not supported')\n # TODO: Support the other methods besides secretservice when we can actually test with them\n self._cmd = ['docker-credential-secretservice', 'get']", "def aws_credentials():\n os.environ['AWS_ACCESS_KEY_ID'] = 'testing'\n os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'\n os.environ['AWS_SECURITY_TOKEN'] = 'testing'\n os.environ['REGION'] = 'region'", "def aws_credentials():\n os.environ['AWS_ACCESS_KEY_ID'] = 'testing'\n os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing'\n os.environ['AWS_SECURITY_TOKEN'] = 'testing'\n os.environ['AWS_SESSION_TOKEN'] = 'testing'\n os.environ['AWS_DEFAULT_REGION'] = 'us-east-1'", "def credential(self):\n return self._credential", "def credential(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"credential\")", "def credentials(self):\n return True", "def get_credentials(self):\n return self.credentials" ]
[ "0.67248553", "0.66756296", "0.65379584", "0.6494901", "0.6491272", "0.6403726", "0.6381683", "0.6038071", "0.60170984", "0.60099757", "0.59500206", "0.59424424", "0.5904099", "0.5865562", "0.5857952", "0.58470386", "0.5843469", "0.5818919", "0.5775693", "0.5767203", "0.57606214", "0.57508445", "0.57447684", "0.57394063", "0.5738865", "0.5710766", "0.5710032", "0.57095736", "0.5695578", "0.567955" ]
0.681181
0
SQL IaaS Agent least privilege mode.
def least_privilege_mode(self) -> Optional[str]: return pulumi.get(self, "least_privilege_mode")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def policy(agent):", "def which_security(self):\n # Tools and services\n if self.details[\"owner\"] in self.tas_override:\n # setup owner and oncall privs\n if self.details[\"platform\"] == \"prod\":\n self.details[\"security\"][\"role.service_owner\"] = \"%s-prod\" % self.details[\"owner\"]\n self.details[\"security\"][\"role.authorized_operator\"].append(\"team-toolsandservices-prod\")\n else:\n self.details[\"security\"][\"role.service_owner\"] = self.details[\"owner\"]\n self.details[\"security\"][\"role.authorized_operator\"].append(\"team-toolsandservices\")\n\n # by function\n if self.details[\"function\"] in [\"linjump\", \"loga\", \"log\"]:\n if self.details[\"platform\"] == \"prod\":\n self.details[\"security\"][\"role.authorized\"].append('prod')\n # If the host is not in prod, it will inherit engineering acess\n # from below.\n\n elif self.details[\"function\"] == \"repo\":\n self.details[\"security\"][\"role.authorized\"].append('engineering')\n\n # Engineering should be able to log into our hosts that aren't in prod\n if self.details[\"platform\"] != \"prod\":\n self.details[\"security\"][\"role.authorized\"].append('engineering')\n\n # Infosec\n elif self.details[\"owner\"] == \"team-infosec\":\n if self.details[\"platform\"] == \"prod\":\n self.details[\"security\"][\"role.service_owner\"] = \"%s-prod\" % self.details[\"owner\"]\n else:\n self.details[\"security\"][\"role.service_owner\"] = self.details[\"owner\"]\n\n # Other hosts should get the legacy access policy\n else:\n if self.details[\"platform\"] == \"prod\":\n self.details[\"security\"][\"role.service_owner\"] = 'prod'\n else:\n self.details[\"security\"][\"role.service_owner\"] = 'ops'\n self.details[\"security\"][\"role.authorized\"].append('engineering')", "def __acl__(self):\n yield 'Allow', 'system.Everyone', 'none'\n yield security.DENY_ALL", "def get_everyone_granted(self):", "def has_super_access():\n current_user = frappe.get_doc('User', frappe.session.user)\n roles = set([role.role for role in current_user.roles])\n return bool(roles & {'Administrator', 'Instructor', 'Education Manager', 'System Manager', 'Academic User'})", "def authorization():\n pass", "def privilege(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"privilege\")", "def role_command():", "def give_permissions(self):\n self._activate()\n self.configure(state=\"enabled\")", "def enter_maintenance_mode(self):\n cmd = self._cmd('enterMaintenanceMode')\n if cmd.success:\n self._update(_get_role(self._get_resource_root(), self._path()))\n return cmd", "def default_allow_privilege_escalation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"default_allow_privilege_escalation\")", "def default_allow_privilege_escalation(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"default_allow_privilege_escalation\")", "def administrator(_) -> int:\n return 1 << 3", "def administrator(_) -> int:\n return 1 << 3", "def authorization(cls):\n\n return PyFunceble.CONFIGURATION.db_type in [\"mariadb\", \"mysql\"]", "def default_capabilities(self):", "def test_superuser_permission_with_super_user(self):\n with self.settings(MAINTENANCE_MODE_PERMISSION_PROCESSORS=(\n 'maintenancemode.permission_processors.is_superuser',\n )):\n self.client.login(username='super_user', password='maintenance_pw')\n response = self.client.get('/')\n self.assertNormalMode(response)", "def enable_primary_site_administrator(self):\n eURL = self._url + \"/psa/enable\"\n params = {\n \"f\" : \"json\"\n }\n return self._con.post(path=eURL, postdata=params)", "def _onchange_restrict_access(self, stage_id):\n print('----------',self.env.uid)\n # if self.env.uid != 1 :\n raise exceptions.Warning('You are not allowed to change the stages, Please contact the Administrator')\n return True\n return {}", "def RequestedPermissions(self) -> _n_6_t_0:", "def server_agent():", "def set_admin():\n try:\n ctypes.windll.shell32.ShellExecuteW(None, \"runas\", sys.executable, __file__, None, 1)\n except:\n print(\"Could not set the UAC level.\")", "def is_administrator(self):\n return False", "def role(self):\n return ['Server', 'Client'][self.is_client()]", "def permissions():\n pass", "def show_privileges(self):\n print(\"\\nAdministrator privileges: \")\n for privilege in self.privileges:\n print(\"- \" + privilege)", "def check_permission():\n if IS_ADMIN:\n out_info(\"Running as Root/Admin\")\n else:\n out_warning(\"Running without root/admin privileges\")", "def supportedPrivileges(self):\n return allPrivilegeSet", "def get_accessible_user_id(self):\n ### DATABASE CODE GOES HERE\n return 1", "def check_enable_mode(self, *args, **kwargs):\n pass" ]
[ "0.5651815", "0.5552572", "0.53834474", "0.53759897", "0.5337457", "0.53322214", "0.52849346", "0.521477", "0.5178467", "0.517436", "0.5171991", "0.5171991", "0.5162061", "0.5162061", "0.5161537", "0.5125376", "0.5106921", "0.50724584", "0.50508195", "0.5049469", "0.5044538", "0.5034205", "0.50227815", "0.5019157", "0.5014796", "0.50106025", "0.49729404", "0.49575183", "0.49503943", "0.49441284" ]
0.6251558
0
SQL image offer. Examples include SQL2016WS2016, SQL2017WS2016.
def sql_image_offer(self) -> Optional[str]: return pulumi.get(self, "sql_image_offer")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def queryImage(name):\n\n width, height = queryDimensions(name)\n header, rows = querySciDB(\"scan(%s)\" % name)\n\n return render.renderPng(width, height, rows)", "def get_image(id_num):\n return sqldb.get_image(id_num)", "def sql_image_sku(self) -> Optional[str]:\n return pulumi.get(self, \"sql_image_sku\")", "def return_image(val, model_id, message_name, field_name, mime, sind):\n column_data_source = curdoc().get_model_by_name(sind)\n index = column_data_source.tags[0]\n url = \"http://{0}/image/\".format(_host) + \"---\".join([model_id, message_name, field_name, mime, sind, str(index)])\n return url", "def get_dataframe_from_db( input_file, conn, sources: list=None, image_min: int=128 ):\n\n if sources is None and input_file is None:\n names = \"\"\n elif input_file is not None:\n if isinstance(input_file, str):\n input_file = [input_file]\n filelist = preprocessFileList( input_file )\n names = [ f'\"{Path(f).name}\"' for f in filelist if '\"' not in f ]\n names = f'AND Sources.name in ({\", \".join(names)})'\n else:\n names = [ f'\"{s}\"' for s in sources ]\n names = f'AND Sources.name in ({\", \".join(names)})'\n\n if image_min is not None:\n images = f\"\"\"AND Sources.image_width >= {image_min}\n AND Sources.image_height >= {image_min}\"\"\"\n else:\n images = \"\"\n\n# Edited values for angle and throttle override all others.\n# Otherwise user overrides pilot. But there's no way to know if the user overrode the pilot if the user value is zero.\n sql=f\"\"\"SELECT Sources.full_path || '/' || '{Tub.images()}' || '/' || TubRecords.image_path as \"cam/image_array\",\n-- edit > user > pilot\ncase when edit_angle is not null then edit_angle\n when pilot_angle is not null and user_angle == 0.0 then pilot_angle\n else user_angle end as \"user/angle\",\ncase when edit_throttle is not null then edit_throttle\n when pilot_throttle is not null and user_throttle == 0.0 then pilot_throttle\n else user_throttle end as \"user/throttle\"\n FROM TubRecords, Sources\n WHERE TubRecords.source_id = Sources.source_id\n{names}\n{images}\nAND TubRecords.deleted = 0;\"\"\"\n\n df = pd.read_sql_query(sql, conn)\n\n return df", "def image(self, name=None):\n raise NotImplementedError", "def test_Image():\n assert Image(cur, \"Simple_Linear\").detect_image() == True\n assert Image(cur, \"Logistic_Linear\").detect_image() == False\n assert Image(cur, \"Simple_Linear\").date == \"2021-04-20\"\n assert Image(cur, \"Breslow-Day_Test\").source == \"Course BIOSTAT703 slide\"", "def _generateImageDescription(self, obj, **args ):\n result = []\n try:\n image = obj.queryImage()\n except NotImplementedError:\n pass\n else:\n description = image.imageDescription\n if description and len(description):\n result.append(description)\n return result", "def s2eexi_image(node, key_image, _paren_if_fun, _paren_if_app):\n return quantified_exp_image(node, key_image, open_close=(\"[\", \"]\"))", "def image_selection_change():\n\n def return_image(val, model_id, message_name, field_name, mime, sind):\n \"\"\"Returns a URL resolvable by the probe\"\"\"\n column_data_source = curdoc().get_model_by_name(sind)\n index = column_data_source.tags[0]\n url = \"http://{0}/image/\".format(_host) + \"---\".join([model_id, message_name, field_name, mime, sind, str(index)])\n return url\n\n d = curdoc()\n _remove_fig(d)\n model_id, message_name, _ = run_handlers.get_modelid_messagename_type(d)\n image_field = d.get_model_by_name(IMAGE_SELECTION).value.split(\" :\")[0]\n mime = d.get_model_by_name(MIME_SELECTION).value\n\n if image_field != DEFAULT_UNSELECTED and mime != DEFAULT_UNSELECTED:\n plot = figure(plot_width=500, plot_height=500, title=\"\", x_range=Range1d(start=0, end=1), y_range=Range1d(start=0, end=1), name=FIGURE_MODEL)\n sind = run_handlers.get_source_index(d.session_context.id, model_id, message_name, image_field + mime)\n\n _install_callback_and_cds(sind, model_id, message_name,\n {image_field: [return_image, {\"model_id\": model_id,\n \"message_name\": message_name,\n \"field_name\": image_field,\n \"mime\": mime,\n \"sind\": sind}]},\n stream_limit=1)\n plot.image_url(url=image_field, x=0, y=1, h=1, w=1, source=d.get_model_by_name(sind))\n d.add_root(plot)", "def visualize(self):\n return \"https://neuroglancer.bossdb.io/#!{'layers':{'image':{'source':'boss://__replace_me__'}}}\".replace(\n \"__replace_me__\",\n f\"{self.volume_provider.boss._project._base_protocol}://{self.volume_provider.boss._project._base_url}/{self.collection_name}/{self.experiment_name}/{self.channel_name}\",\n )", "def get_latest_image():\n return sqldb.get_latest_image()", "def get_hd_image_url(image_url):\n query_params = ['?w=2000', '?w=1800', '?w=1480', '?w=1380']\n for param in query_params:\n hd_image_url = image_url + param\n response = requests.get(hd_image_url)\n if response.status_code == 200:\n return hd_image_url\n return image_url", "def small_image(self):\n pass", "def get_dataframe_from_db_with_aux( input_file, conn, sources: list=None, image_min: int=128 ):\n\n if sources is None and input_file is None:\n names = \"\"\n elif input_file is not None:\n if isinstance(input_file, str):\n input_file = [input_file]\n filelist = preprocessFileList( input_file )\n names = [ f'\"{Path(f).name}\"' for f in filelist if '\"' not in f ]\n names = f'AND Sources.name in ({\", \".join(names)})'\n else:\n names = [ f'\"{s}\"' for s in sources ]\n names = f'AND Sources.name in ({\", \".join(names)})'\n\n if image_min is not None:\n images = f\"\"\"AND Sources.image_width >= {image_min}\n AND Sources.image_height >= {image_min}\"\"\"\n else:\n images = \"\"\n\n# Edited values for angle and throttle override all others.\n# Otherwise user overrides pilot. But there's no way to know if the user overrode the pilot if the user value is zero.\n# select t.source_id, pos_cte, sm.value, tr.track_id from TubRecords t, Sources s, SourceMeta sm, Tracks tr where t.source_id = s.source_id and s.source_id = sm.source_id and sm.key=\"DONKEY_GYM_ENV_NAME\" AND sm.value=tr.gym_name ORDER BY RANDOM() LIMIT 10;\n\n sql=f\"\"\"SELECT Sources.full_path || '/' || '{Tub.images()}' || '/' || TubRecords.image_path as \"cam/image_array\",\n case when edit_angle is not null then edit_angle\n when pilot_angle is not null and user_angle == 0.0 then pilot_angle\n else user_angle end as \"user/angle\",\n case when edit_throttle is not null then edit_throttle\n when pilot_throttle is not null and user_throttle == 0.0 then pilot_throttle\n else user_throttle end as \"user/throttle\",\n TubRecords.pos_cte,\n Tracks.track_id\n FROM TubRecords, Sources, SourceMeta, Tracks\n WHERE TubRecords.source_id = Sources.source_id\n AND Sources.source_id = SourceMeta.source_id\n AND SourceMeta.key = \"DONKEY_GYM_ENV_NAME\"\n AND SourceMeta.value = Tracks.gym_name\n AND TubRecords.pos_cte is not null\n {names}\n {images}\nAND TubRecords.deleted = 0;\"\"\"\n\n df = pd.read_sql_query(sql, conn)\n df['user/angle'] = df['user/angle'].astype(np.float32)\n df['user/throttle'] = df['user/throttle'].astype(np.float32)\n df['pos_cte'] = df['pos_cte'].astype(np.float32)\n df['track_id'] = df['track_id'].astype(np.int64)\n\n return df", "def visualize(self, sql: str, filename=\"mydask.png\") -> None: # pragma: no cover\n result = self.sql(sql, return_futures=True)\n (result,) = optimize(result)\n\n result.visualize(filename)", "def get_platform_image_output(location: Optional[pulumi.Input[str]] = None,\n offer: Optional[pulumi.Input[str]] = None,\n publisher: Optional[pulumi.Input[str]] = None,\n sku: Optional[pulumi.Input[str]] = None,\n version: Optional[pulumi.Input[Optional[str]]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPlatformImageResult]:\n ...", "async def olá(self):\r\n\t\tawait self.client.say('© Maddie 2017')\r\n\t\te = Embed()\r\n\t\te.set_image(url='https://cdn.discovery.pgsitecore.com/en-us/-/media/Olay_PathFinder/Images/a/OLAY%20TE%207IN1%20DEEP%20PENETRATING%20MOISTURE%20BODY%20WASH_Front.png?w=460&v=1-201705260605')\r\n\t\tawait self.client.say(embed=e)", "def docker_image_tag(self, app):\n return f\"briefcase/{app.bundle}.{app.app_name.lower()}:{app.target_vendor}-{app.target_codename}\"", "def do_command(self, args):\n imageops = dbops.Images()\n imageops.add(args)", "def get_image_url():", "def get_adoption_image(self, obj):\n adoption_image = 'Sin imagen'\n if AdoptionImage.objects.filter(adoption_proposal=obj).exists():\n adoption_image = (AdoptionImage.objects\n .filter(adoption_proposal=obj)\n .order_by('-upload_date').first())\n adoption_image = (settings.IMAGE_HOST + adoption_image.image.url)\n return adoption_image", "def get_images_by_vulnerability(self, **kwargs):\n ...", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")", "def image(self) -> Optional[str]:\n return pulumi.get(self, \"image\")" ]
[ "0.5541182", "0.5481096", "0.54725695", "0.52279735", "0.510517", "0.5102536", "0.5078968", "0.5022758", "0.49956042", "0.49684694", "0.49386504", "0.49383932", "0.49115917", "0.4904313", "0.4878855", "0.48654482", "0.486533", "0.48480713", "0.48383203", "0.4795004", "0.47740936", "0.47652513", "0.47640505", "0.47397366", "0.47397366", "0.47397366", "0.47397366", "0.47397366", "0.47397366", "0.47397366" ]
0.70571125
0
SQL Server license type.
def sql_server_license_type(self) -> Optional[str]: return pulumi.get(self, "sql_server_license_type")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def lic_type():\n return VocabularyType.create(id='licenses', pid_type='lic')", "def get_license_type():\n sql = \"\"\"\n SELECT license_type license, description\n FROM license_types_codes\n WHERE can_use = 'Y'\n \"\"\"\n\n # [('P', 'Permanent'), ... ]\n record_set = get_record(sql)\n result = dict(record_set)\n return result", "def License(self, default=None):\n return self.data.get('license', default)", "def license_counting_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"license_counting_type\")", "def is_valid_license_type(self):\n clean = self.license_type.lower().replace('-', ' ')\n return clean not in INVALID_LICENSE_TYPE", "def license_counting_type(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"license_counting_type\")", "def license_counting_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"license_counting_type\")", "def software_license(self) -> str:\n return self.random.choice(LICENSES)", "def license(self): # noqa: A003\n logger.debug(\"Get license\")\n return self._raw_api.license.get()", "def licence_code(self):\r\n return get_licence_code(self.key2, self.pre_code)", "def custom_licenses(self):\n buf = (ctypes.c_char * self.MAX_BUF_SIZE)()\n result = self._dll.JLINK_EMU_GetLicenses(buf, self.MAX_BUF_SIZE)\n if result < 0:\n raise errors.JLinkException(result)\n return ctypes.string_at(buf).decode()", "def license_plate(self) -> str:\n return self.numerify(self.generator.parse(self.random_element(self.license_formats)))", "def get_type(self):\n return \"TDS2024C\"", "def license_key(self):\n # type: () -> string_types\n return self._license_key", "def get_license():\n repo_fs()\n return LICENSE", "def product_type(self):\n pass", "async def get_license(self) -> APIReturn:\n return await self._request(\"GET\", \"/getLicense\")", "def license_number(self):\n return self._license_number", "def LicenseServer(self):\n return self._get_attribute('licenseServer')", "def qs_license():\r\n paragraph = document.add_paragraph('')\r\n document.add_heading('License', level=1)\r\n lic_metric = ['lef', 'serial', 'name', 'organization', 'product', 'numberOfCores', 'isExpired', 'expiredReason', 'isBlacklisted', 'isInvalid']\r\n qs_lic = get_qlik_sense.get_license()\r\n num_of_metric = len(qs_lic)\r\n table = document.add_table(rows=num_of_metric+1, cols=2)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n row.cells[0].text = 'details'\r\n\r\n for metric in range(len(qs_lic)):\r\n row = table.rows[metric+1]\r\n row.cells[0].text = str(lic_metric[metric])\r\n row.cells[1].text = str(qs_lic[metric][0])\r\n document.add_page_break()", "def getLicense(self, resource):\n\n if isinstance(resource, int):\n resource = 'licenses/{0}'.format(resource)\n\n res = self.getRequest(resource)\n if res:\n license = vsdModels.License(**res)\n\n return license\n else:\n return None", "def get_licence(self, _return):\n return _return.licence.licence_number", "def get_license_string(self):\n output = ''\n if self.license_id:\n output += '{}'.format(self.license_id)\n if self.license_creation_date:\n output += ' (Created {})'.format(self.license_creation_date)\n if self.license_type:\n output += ' {}'.format(self.license_type)\n if self.license_status:\n output += ' - {}'.format(self.license_status)\n return output", "def show_license(ctx, param, value):\n if not value or ctx.resilient_parsing:\n return\n click.echo(lic)\n ctx.exit()", "def get_license_fullname(license_type):\n lic = get_license_type()\n return lic[license_type]", "def license(*args, borrow: bool=True, info: bool=True, isBorrowed: bool=True, isExported:\n bool=True, isTrial: bool=True, licenseMethod: bool=True, productChoice: bool=True,\n r: bool=True, showBorrowInfo: bool=True, showProductInfoDialog: bool=True, status:\n bool=True, usage: bool=True, **kwargs)->AnyStr:\n pass", "def validate_license_model(license_model):\n\n VALID_LICENSE_MODELS = (\n \"license-included\",\n \"bring-your-own-license\",\n \"general-public-license\",\n \"postgresql-license\",\n )\n\n if license_model not in VALID_LICENSE_MODELS:\n raise ValueError(\n \"DBInstance LicenseModel must be one of: %s\"\n % \", \".join(VALID_LICENSE_MODELS)\n )\n return license_model", "def licenses(self) -> Sequence[str]:\n return pulumi.get(self, \"licenses\")", "def license(self, license):\n\n self._license = license", "def isLicensed(self):\n\t\treturn True" ]
[ "0.7388876", "0.6914536", "0.65520567", "0.6494581", "0.64759284", "0.6462194", "0.64542824", "0.61641365", "0.61452985", "0.6031865", "0.5985203", "0.59722406", "0.59193486", "0.5915004", "0.5913919", "0.5904301", "0.5891602", "0.5890338", "0.5859811", "0.5828259", "0.5734647", "0.57031435", "0.5694209", "0.5681191", "0.56379426", "0.5637394", "0.56319577", "0.56251264", "0.556714", "0.5564699" ]
0.83057636
0
ARM resource id of the SQL virtual machine group this SQL virtual machine is or will be part of.
def sql_virtual_machine_group_resource_id(self) -> Optional[str]: return pulumi.get(self, "sql_virtual_machine_group_resource_id")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> str:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group_id\")", "def resource_group(self) -> str:\n return pulumi.get(self, \"resource_group\")", "def virtual_machine_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"virtual_machine_resource_id\")", "def resource_group(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group\")", "def resource_group(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"resource_group\")", "def get_resource_group_name(self):\n return self.instance_metadata.resource_group_name", "def resource_group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_group\")", "def node_resource_group(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"node_resource_group\")", "def resource_group_name(self) -> str:\n return pulumi.get(self, \"resource_group_name\")", "def group_id(self) -> str:\n return pulumi.get(self, \"group_id\")", "def group_id(self) -> str:\n return pulumi.get(self, \"group_id\")", "def id(self):\n return self._group", "def get_resource_group_name(self) -> str:\n # read the original value passed by the command\n resource_group_name = self.raw_param.get(\"resource_group_name\")\n\n # this parameter does not need dynamic completion\n # this parameter does not need validation\n return resource_group_name" ]
[ "0.776278", "0.776278", "0.776278", "0.776278", "0.776278", "0.75804204", "0.7398866", "0.7398866", "0.7398866", "0.73647386", "0.7292113", "0.7292113", "0.7292113", "0.7292113", "0.7292113", "0.7292113", "0.7292113", "0.7292113", "0.71181333", "0.68674904", "0.67758405", "0.67758405", "0.6764885", "0.6712512", "0.663038", "0.65331", "0.65239626", "0.65239626", "0.65018976", "0.63363224" ]
0.8492074
0
ARM Resource id of underlying virtual machine created from SQL marketplace image.
def virtual_machine_resource_id(self) -> Optional[str]: return pulumi.get(self, "virtual_machine_resource_id")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def sql_virtual_machine_group_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"sql_virtual_machine_group_resource_id\")", "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> str:\n return pulumi.get(self, \"resource_id\")", "def resourceid(self):", "def id(self):\n return self.raw_resource.uuid", "def get_image_ref() -> str:\n images_rq = request(\n method=\"GET\", url=app.config[\"IMAGE_REF\"], headers=build_header(),\n )\n if not images_rq.ok:\n HTTPError(f\"Can not get image id for virtual machine: {images_rq.status_code}\")\n\n [image] = images_rq.json()[\"images\"]\n return image[\"id\"]", "def remote_volume_resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"remote_volume_resource_id\")", "def get_self_instance_id():\n\n logging.debug('get_self_instance_id()')\n response = urllib2.urlopen('http://169.254.169.254/1.0/meta-data/instance-id')\n instance_id = response.read()\n return instance_id", "def resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"resource_id\")", "def runtime_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"runtime_id\")", "def get_vm_id(self):\n return self.instance_metadata.vm_id", "def vm_id(self):\n return self.vm_info.get('id', 'Error retrieving ID')", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"resource_id\")", "def getId(self):\n return self.__vmId", "def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")", "def target_resource_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"target_resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")", "def resource_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"resource_id\")" ]
[ "0.67559993", "0.6314548", "0.6314548", "0.6314548", "0.62932324", "0.6195527", "0.61022866", "0.60891443", "0.6060182", "0.60245854", "0.59726673", "0.5938371", "0.5903513", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.58927864", "0.5865102", "0.5850114", "0.5850114", "0.58449525", "0.58449525", "0.58449525", "0.58449525" ]
0.77778864
0
Gets a SQL virtual machine.
def get_sql_virtual_machine(expand: Optional[str] = None, resource_group_name: Optional[str] = None, sql_virtual_machine_name: Optional[str] = None, opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSqlVirtualMachineResult: __args__ = dict() __args__['expand'] = expand __args__['resourceGroupName'] = resource_group_name __args__['sqlVirtualMachineName'] = sql_virtual_machine_name opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts) __ret__ = pulumi.runtime.invoke('azure-native:sqlvirtualmachine/v20230101preview:getSqlVirtualMachine', __args__, opts=opts, typ=GetSqlVirtualMachineResult).value return AwaitableGetSqlVirtualMachineResult( assessment_settings=pulumi.get(__ret__, 'assessment_settings'), auto_backup_settings=pulumi.get(__ret__, 'auto_backup_settings'), auto_patching_settings=pulumi.get(__ret__, 'auto_patching_settings'), enable_automatic_upgrade=pulumi.get(__ret__, 'enable_automatic_upgrade'), id=pulumi.get(__ret__, 'id'), identity=pulumi.get(__ret__, 'identity'), key_vault_credential_settings=pulumi.get(__ret__, 'key_vault_credential_settings'), least_privilege_mode=pulumi.get(__ret__, 'least_privilege_mode'), location=pulumi.get(__ret__, 'location'), name=pulumi.get(__ret__, 'name'), provisioning_state=pulumi.get(__ret__, 'provisioning_state'), server_configurations_management_settings=pulumi.get(__ret__, 'server_configurations_management_settings'), sql_image_offer=pulumi.get(__ret__, 'sql_image_offer'), sql_image_sku=pulumi.get(__ret__, 'sql_image_sku'), sql_management=pulumi.get(__ret__, 'sql_management'), sql_server_license_type=pulumi.get(__ret__, 'sql_server_license_type'), sql_virtual_machine_group_resource_id=pulumi.get(__ret__, 'sql_virtual_machine_group_resource_id'), storage_configuration_settings=pulumi.get(__ret__, 'storage_configuration_settings'), system_data=pulumi.get(__ret__, 'system_data'), tags=pulumi.get(__ret__, 'tags'), troubleshooting_status=pulumi.get(__ret__, 'troubleshooting_status'), type=pulumi.get(__ret__, 'type'), virtual_machine_resource_id=pulumi.get(__ret__, 'virtual_machine_resource_id'), wsfc_domain_credentials=pulumi.get(__ret__, 'wsfc_domain_credentials'), wsfc_static_ip=pulumi.get(__ret__, 'wsfc_static_ip'))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def get_vm(**kwargs):\n model = self.db.vm_table_from_provider('openstack')\n vm = self.db.select(model, **kwargs).all()\n assert len(vm) == 1, vm\n vm = vm[0]\n return vm", "def get_sql_virtual_machine_output(expand: Optional[pulumi.Input[Optional[str]]] = None,\n resource_group_name: Optional[pulumi.Input[str]] = None,\n sql_virtual_machine_name: Optional[pulumi.Input[str]] = None,\n opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSqlVirtualMachineResult]:\n ...", "def virtual_machine(self) -> pulumi.Output['outputs.VirtualMachineResponse']:\n return pulumi.get(self, \"virtual_machine\")", "def virtual_machine(self) -> Optional[pulumi.Input['VirtualMachineArgs']]:\n return pulumi.get(self, \"virtual_machine\")", "def default_virtual_machine(self):\n return self._default_virtual_machine", "def _get_vm(name=None, session=None):\n if session is None:\n session = _get_session()\n vms = session.xenapi.VM.get_by_name_label(name)\n vms = [x for x in vms if not session.xenapi.VM.get_is_a_template(x)]\n if len(vms) == 1:\n return vms[0]\n else:\n log.error(\"VM %s returned %s matches. 1 match expected.\", name, len(vms))\n return None", "def get_vm_by_name(self, name=None):\n\n vm_obj = self.get_obj(self.connect.RetrieveContent(), [vim.VirtualMachine], name)\n if vm_obj:\n return vm_obj\n else:\n print(\"VMUNAVAILABLE(NAME)\")\n # raise VMUnavaiable(name)", "def get_vm(client, resource_group_name, vm_name):\n return client.get(resource_group_name, vm_name)", "def get_vm_by_name(options, vm_name):\n dc = get_datacenter(options)\n vmFolder = dc.GetVmFolder()\n users_folder = find_entity_by_name(vmFolder, 'users')\n current_user = os.environ[\"USER\"]\n #current_user = pwd.getpwuid(os.getuid())[0]\n user_folder = find_entity_by_name(users_folder, current_user)\n vm = find_entity_by_name(user_folder, vm_name)\n if vm is not None:\n print \"Found VM with name: %s\" % vm_name\n return vm", "def microvm(self):\n return self._context.get(\"microvm\", None)", "def get_backend_vm(self, vm):\n return self._get_backend_vm(vm.backend_id)", "def get_vm(client, vm_name):\n names = set([vm_name])\n vms = client.vcenter.VM.list(VM.FilterSpec(names=names))\n if len(vms) == 0:\n print(\"VM with name ({}) not found\".format(vm_name))\n return None\n vm = vms[0].vm\n print(\"Found VM '{}' ({})\".format(vm_name, vm))\n return vm", "def get_machine(self, name):\n\n return self._machine_manager.get_machine(name)", "def refresh_vm(context, vm):\n vapp = vm.getVirtualAppliance()\n return vapp.getVirtualMachine(vm.getId())", "def get_virtual_machines(_id):\n url = '/%s' % str(_id)\n return atmosphere.tools.create_req(url=url)", "def get_vm(self, vm_name):\n matches = self.find_vms(name=vm_name)\n if not matches:\n raise VMInstanceNotFound('vm with name {}'.format(vm_name))\n if len(matches) > 1:\n raise MultipleItemsError('multiple VMs with name {}'.format(vm_name))\n return matches[0]", "def sql_virtual_machine_group_resource_id(self) -> Optional[str]:\n return pulumi.get(self, \"sql_virtual_machine_group_resource_id\")", "def AptGetServiceName(vm):\n del vm\n return 'mysql'", "def select_vm_id(uuid):\n return IMPL.select_vm_id(uuid)", "def get_vm_ip(options, name):\n try:\n vm = get_vm_by_name(options, name)\n except Exception, error:\n vm = None\n print \"get_vm_by_name errored: %r\" % error\n return None\n if vm is None:\n print \"Not able to find vm %s, probably never deployed\" % name\n return None\n summary = vm.summary\n # check if the vm is powered on\n if summary.runtime.powerState != 'poweredOn':\n print (\"VM %s not in powerdOn state: %s. Powering on the VM\" %\n (name, vm.runtime.powerState))\n vm.PowerOnVM_Task()\n timeout = 1200\n sleepTime = 10\n while timeout > 0:\n print (\"Getting vmware tools status for vm %s\\n\" % name)\n if vm.guest.toolsRunningStatus == 'guestToolsRunning':\n break\n time.sleep(sleepTime)\n timeout = (timeout - sleepTime)\n\n # look for all ip addresses exposed by the guest\n # and select the one that is reachable\n for net_info in vm.guest.net:\n if net_info == None or net_info.ipConfig == None:\n # TODO: using print as this module is yet to be integrated with\n # logger\n print \"net_info not defined, probably tools not running\\n\"\n break\n for entry in net_info.ipConfig.ipAddress:\n match = re.match(\"\\d+.\\d+.\\d+.\\d+\", entry.ipAddress)\n if match:\n result = os.system(\"ping -c 1 -t 60 %s 2>&1 > /dev/null\" \\\n % entry.ipAddress)\n if result == 0:\n print \"Found IP in vm.guest.net.ipConfig=%s\" % entry.ipAddress\n return entry.ipAddress\n\n # check if summary has ip address information\n if summary.guest.ipAddress and summary.guest.ipAddress != '127.0.0.1':\n print \"Found IP in vm.summary.guest.ipAddress=%s\" % summary.guest.ipAddress\n return summary.guest.ipAddress\n # as last option, rely on ip address written under annotation\n note = yaml.load(summary.config.annotation)\n if 'ip' in note:\n print \"Found IP in vm.summary.config.annotation=%s\" % note['ip']\n return note['ip']\n\n print \"No ip address found for vm %s\\n\" % name\n return None", "def get(self, vm_name=None):\n data = []\n #if no vm name get all\n if vm_name == None:\n #get list of all inactive and active\n vm_defined_list = self.conn.listDefinedDomains()\n vm_active_list = self.conn.listDomainsID()\n #iterate over these lists and get some info on each domain\n for vmid in vm_active_list:\n dom = self.conn.lookupByID(vmid)\n data.append(dom.XMLDesc(3))\n for name in vm_defined_list:\n dom = self.conn.lookupByName(name)\n data.append(dom.XMLDesc(3))\n else:\n vm_name = common.validate_hostname(vm_name)\n try:\n dom = self.conn.lookupByName(vm_name)\n except libvirt.libvirtError:\n result = common.process_results(data, 'VM')\n self.log.debug('Result: %s' % result)\n return result\n info = dom.XMLDesc(3)\n data.append(info)\n #self.conn.close() # Connection closing left to calling app - bad?\n result = common.process_results(data, 'VM')\n self.log.debug('Result: %s' % result)\n return result", "def get_vm_executor(vm_name):\n logger.info(\"Get IP from VM %s\", vm_name)\n vm_ip = get_vm_ip(vm_name)\n logger.info(\"Create VM instance with root user from vm with ip %s\", vm_ip)\n return rhevm_helpers.get_host_executor(\n ip=vm_ip, password=config.VMS_LINUX_PW\n )", "def get(self, psvm):\n return self._get('/os-psvm/%s' % (base.getid(psvm)), \"psvm\")", "def machine():\n return uname().machine", "def machine():\n return uname().machine", "def select_sql(command):\n logging.debug(\"Running Select sql \"+str(command))\n try:\n## host, userid, password, database instance\n con = mdb.connect(serverip, username, userpass, schema);\n cursor = con.cursor()\n \n sql = command\n cursor.execute(sql)\n return cursor.fetchall()\n \n con.close()\n\n except mdb.Error, e:\n logger.error(e)", "def create_virtual_machine(self, vm):\n if vm.template:\n backend_id = self.create_virtual_machine_from_template(vm)\n else:\n backend_id = self.create_virtual_machine_from_scratch(vm)\n\n try:\n backend_vm = self.client.get_vm(backend_id)\n except VMwareError as e:\n raise VMwareBackendError(e)\n\n vm.backend_id = backend_id\n vm.runtime_state = backend_vm['power_state']\n vm.save(update_fields=['backend_id', 'runtime_state'])\n\n for disk in backend_vm['disks']:\n disk = self._backend_disk_to_disk(disk['value'], disk['key'])\n disk.vm = vm\n disk.service_settings = vm.service_settings\n disk.project = vm.project\n disk.save()\n\n # If virtual machine is not deployed from template, it does not have any networks.\n # Therefore we should create network interfaces manually according to VM spec.\n if not vm.template:\n for network in vm.networks.all():\n try:\n self.client.create_nic(vm.backend_id, network.backend_id)\n except VMwareError as e:\n raise VMwareBackendError(e)\n\n signals.vm_created.send(self.__class__, vm=vm)\n return vm", "def virtual_machines(self):\n return self._virtual_machines", "def create_vm(self):\n\t\treturn handle_to_object(call_sdk_function('PrlSrv_CreateVm', self.handle))", "def vmware(self) -> Optional[pulumi.Input['VirtualNetworksPropertiesVmwareArgs']]:\n return pulumi.get(self, \"vmware\")" ]
[ "0.7051682", "0.6846531", "0.65177065", "0.6314297", "0.6176782", "0.6015212", "0.5942766", "0.58022934", "0.5735303", "0.5688736", "0.5669816", "0.5563412", "0.5531041", "0.5528453", "0.548521", "0.54187983", "0.53782344", "0.53499717", "0.52770954", "0.52686584", "0.52650195", "0.52603865", "0.5258824", "0.5208759", "0.5208759", "0.51744944", "0.517098", "0.5150188", "0.5149406", "0.5144729" ]
0.7139321
0
Densitydependent downsampling to a sample size smaller or equal to a specified value. This approach downsamples a point cloud such that regions of higher density are less likely to be sampled from. The resulting cloud therefore has a more uniform density. The approach used here is directly inspired by SPADE [ref 1]. Note that the number of points sampled by this approach scales with the size of the input cloud. A random subsampling step is included at the end of the process to bring the result down to the specified sample size. However, if the specified sample size is not small compared to the input cloud size, the downsampling may result in fewer points, so the final size of the output cloud is not guaranteed to be the specified sample size.
def ddds(cloud, sample_size, presample=None, processes=10): #-------------------------------------------------------------------------- ### Prep # Handle small point clouds if cloud.shape[0] <= sample_size: warn("(code 1) Point cloud is already <= desired sample size. " + "No subsampling is performed.") return cloud #-------------------------------------------------------------------------- ### Compute per-landmark local densities # Subsample randomly (for speed/memory efficiency) if presample is not None: cloud_presubs = random_subsample(cloud, presample) else: cloud_presubs = np.copy(cloud) # Compute distance of each subsampled point to the closest other point # Note: `k=2` is necessary since `k=1` is the point itself. tree = cKDTree(cloud) NN_dists = tree.query(cloud_presubs, k=2, n_jobs=processes)[0][:,1] # Get the size of the local neighborhood # which is `alpha * median(smallest_distances)`, # where a good value for alpha is 5 according to SPADE alpha = 5 NN_size = alpha * np.median(NN_dists) # Get the local density (LD) of each landmark # ...which is the number of other landmarks in its local neighborhood LDs = tree.query_ball_point(cloud, NN_size, n_jobs=processes) # Get indices LDs = np.vectorize(len)(LDs) # Count # Define the target density (TD) # Note: Good values according to SPADE: the 3rd or 5th percentile of LDs # Note: This effectively defines how strongly the data will be subsampled TD_percentile = 3 TD = np.percentile(LDs, TD_percentile) #-------------------------------------------------------------------------- ### Perform density-dependent subsampling # Create p(keep_lm) probability vector # Note: For each point i, the probability of keeping it is # { 1 if LD_i < TD # { TD / LD_i otherwise p_keep = TD / LDs p_keep[LDs<TD] = 1 # Randomly decide if a landmark should be kept according to p(keep_lm) rand = np.random.uniform(size=cloud.shape[0]) keep = p_keep >= rand # Index the lms to be kept cloud_ddds = cloud[keep,:] #-------------------------------------------------------------------------- ### Further random downsampling # Note: This ensures that the downsampled cloud does not grow with the # input data and instead is of the specified sample_size or smaller. if cloud_ddds.shape[0] > sample_size: cloud_ddds = random_subsample(cloud_ddds, sample_size) #-------------------------------------------------------------------------- ### Return result return cloud_ddds
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def random_subsample(cloud, sample_size, replace=False):\n\n # Handle small point clouds\n if cloud.shape[0] <= sample_size:\n warn(\"(code 1) Point cloud is already <= desired sample size. \" +\n \"No subsampling is performed.\")\n return cloud\n\n # Perform subsamping\n sample_indices = np.random.choice(np.arange(cloud.shape[0]),\n sample_size, replace=False)\n cloud_subs = cloud[sample_indices]\n\n # Return result\n return cloud_subs", "def upsample_nearest(input, size=None, scale_factor=None):\n return interpolate(input, size, scale_factor, 'nearest')", "def upsampleImage( arr, kernelSize ):\n return scipy.ndimage.zoom( arr, kernelSize )", "def downsample_x2(x, fsize=5, sigma=1.4):\n\n\n #\n # Your code here\n #\n\n G = gaussian_kernel(fsize, sigma)\n g_img = convolve2d(x, G, mode='same', boundary='symm')\n x = g_img[0::2, 0::2]\n\n return x", "def downsample_sam(self, factor):", "def upsample_filt(size):\r\n factor = (size + 1) // 2\r\n if size % 2 == 1:\r\n center = factor - 1\r\n else:\r\n center = factor - 0.5\r\n og = np.ogrid[:size, :size]\r\n return (1 - abs(og[0] - center) / factor) * \\\r\n (1 - abs(og[1] - center) / factor)", "def _down_sample(self):\n self._subsamples = self._raw_data.samples[::self._down_sample_factor]\n # Neglects the redundant subsamples in the tails.\n if len(self._subsamples) >= self._number_of_subsamples:\n self._subsamples = self._subsamples[:self._number_of_subsamples]\n if not len(self._subsamples) == self._number_of_subsamples:\n raise WaveformError(\n 'Number of subsample is %r, while %r is expected' % (\n len(self._subsamples), self._number_of_subsamples))\n logging.debug('down-samples: %r', self._subsamples)", "def test_get_mesh_grid_as_point_cloud_downsample() -> None:\n min_x = -3 # integer, minimum x-coordinate of 2D grid\n max_x = 0 # integer, maximum x-coordinate of 2D grid\n min_y = 2 # integer, minimum y-coordinate of 2D grid\n max_y = 5 # integer, maximum y-coordinate of 2D grid\n\n # return pts, a Numpy array of shape (N,2)\n pts = mesh_grid_utils.get_mesh_grid_as_point_cloud(\n min_x, max_x, min_y, max_y, downsample_factor=3.0\n )\n\n assert pts.shape == (4, 2)\n\n # fmt: off\n gt_pts: List[List[float]] = [\n [-3.0, 2.0],\n [0.0, 2.0],\n [-3.0, 5.0],\n [0.0, 5.0]\n ]\n # fmt: on\n assert np.allclose(gt_pts, pts)", "def upsample(\n input,\n size=None,\n scale_factor=None,\n mode='nearest',\n align_corners=False,\n):\n return interpolate(input, size, scale_factor, mode, align_corners)", "def get_upsample_filter(size):\n factor = (size + 1) // 2\n if size % 2 == 1:\n center = factor - 1\n else:\n center = factor - 0.5\n og = np.ogrid[:size, :size]\n filter = (1 - abs(og[0] - center) / factor) * \\\n (1 - abs(og[1] - center) / factor)\n return torch.from_numpy(filter).float()", "def get_crop_numpy(im: np.ndarray, pos: np.ndarray, sample_sz: np.ndarray, output_sz: np.ndarray = None,\n mode: str = 'constant', avg_chans=(0, 0, 0), max_scale_change=None):\n\n # if mode not in ['replicate', 'inside']:\n # raise ValueError('Unknown border mode \\'{}\\'.'.format(mode))\n\n # copy and convert\n posl = pos.astype(np.int).copy()\n\n # Get new sample size if forced inside the image\n if mode == 'inside' or mode == 'inside_major':\n pad_mode = 'replicate'\n # im_sz = torch.tensor([im.shape[2], im.shape[3]], device=im.device)\n # shrink_factor = (sample_sz.float() / im_sz)\n im_sz = np.array([im.shape[0], im.shape[1]])\n shrink_factor = (sample_sz.astype(np.float) / im_sz)\n if mode == 'inside':\n shrink_factor = shrink_factor.max()\n elif mode == 'inside_major':\n shrink_factor = shrink_factor.min()\n shrink_factor.clamp_(min=1, max=max_scale_change)\n # sample_sz = (sample_sz.float() / shrink_factor).long()\n sample_sz = (sample_sz.astype(np.float) / shrink_factor).astype(np.int)\n\n # Compute pre-downsampling factor\n if output_sz is not None:\n # resize_factor = torch.min(sample_sz.float() / output_sz.float()).item()\n resize_factor = np.min(sample_sz.astype(np.float) / output_sz.astype(np.float)).item()\n df = int(max(int(resize_factor - 0.1), 1))\n else:\n df = int(1)\n\n # sz = sample_sz.float() / df # new size\n sz = sample_sz.astype(np.float) / df\n\n # Do downsampling\n if df > 1:\n os = posl % df # offset\n posl = (posl - os) // df # new position\n im2 = im[os[0].item()::df, os[1].item()::df, :] # downsample\n else:\n im2 = im\n\n # compute size to crop\n # szl = torch.max(sz.round(), torch.tensor([2.0], dtype=sz.dtype, device=sz.device)).long()\n szl = np.maximum(np.round(sz), 2.0).astype(np.int)\n\n # Extract top and bottom coordinates\n tl = posl - (szl - 1) // 2\n br = posl + szl // 2 + 1\n\n # Shift the crop to inside\n if mode == 'inside' or mode == 'inside_major':\n # im2_sz = torch.LongTensor([im2.shape[2], im2.shape[3]])\n # shift = (-tl).clamp(0) - (br - im2_sz).clamp(0)\n im2_sz = np.array([im2.shape[0], im2.shape[1]], dtype=np.int)\n shift = np.clip(-tl, 0) - np.clip(br - im2_sz, 0)\n tl += shift\n br += shift\n\n # outside = ((-tl).clamp(0) + (br - im2_sz).clamp(0)) // 2\n # shift = (-tl - outside) * (outside > 0).long()\n outside = (np.clip(-tl, 0) - np.clip(br - im2_sz, 0)) // 2\n shift = (-tl - outside) * (outside > 0).astype(np.int)\n tl += shift\n br += shift\n\n # Get image patch\n # im_patch = im2[...,tl[0].item():br[0].item(),tl[1].item():br[1].item()]\n\n crop_xyxy = np.array([tl[1], tl[0], br[1], br[0]])\n # warpAffine transform matrix\n M_13 = crop_xyxy[0]\n M_23 = crop_xyxy[1]\n M_11 = (crop_xyxy[2] - M_13) / (output_sz[0] - 1)\n M_22 = (crop_xyxy[3] - M_23) / (output_sz[1] - 1)\n mat2x3 = np.array([\n M_11,\n 0,\n M_13,\n 0,\n M_22,\n M_23,\n ]).reshape(2, 3)\n im_patch = cv2.warpAffine(im2,\n mat2x3, (output_sz[0], output_sz[1]),\n flags=(cv2.INTER_LINEAR | cv2.WARP_INVERSE_MAP),\n borderMode=cv2.BORDER_CONSTANT,\n borderValue=tuple(map(int, avg_chans)))\n # Get image coordinates\n patch_coord = df * np.concatenate([tl, br]).reshape(1, 4)\n scale = output_sz / (np.array([br[1] - tl[1] + 1, br[0] - tl[0] + 1]) * df)\n return im_patch, patch_coord, scale", "def downsample(self, number):\n for num, ss in enumerate(self.samples):\n self.samples[num], self.extra_kwargs[num] = _downsample(\n ss, number, extra_kwargs=self.extra_kwargs[num]\n )", "def down_sampling(record, down_sampling_factor=16):\n\n if len(record.shape) == 1:\n return record[slice(0, record.shape[0], down_sampling_factor)]\n else:\n row_idx = np.arange(record.shape[0])\n col_idx = np.arange(0, record.shape[1], down_sampling_factor)\n\n return record[np.ix_(row_idx, col_idx)]", "def downsample2d(inputArray, kernelSize):\n average_kernel = np.ones((kernelSize,kernelSize))\n\n blurred_array = sig.convolve2d(inputArray, average_kernel, mode='same')\n downsampled_array = blurred_array[::kernelSize,::kernelSize]\n return downsampled_array", "def _downsample(x):\n return nn.AvgPool2d(kernel_size=2)(x)", "def upsample(x):\n return F.interpolate(x, scale_factor=2, mode=\"nearest\")", "def sample(self, size):\n if len(self.observations) == 0:\n raise ValueError(\"need to fit KDE with observations\")\n\n samples = np.zeros(size)\n for i in range(size):\n \n # randomly select an anchor point for a gaussian\n anchor_point = np.random.choice(self.observations)\n\n # then sample from that gaussian\n sample = np.random.normal(loc=anchor_point, scale=self.bandwidth)\n samples[i] = sample\n return samples", "def resample(self, size, prior=None,\n xrange=None, rand_nsample=1e3, **kwargs):\n if prior is None:\n from scipy.stats import rv_continuous\n return self.rvdist.rvs(size) if rv_continuous in self.rvdist.__class__.__mro__ \\\n else self.rvdist.rvs(size, xrange=xrange, nsample=rand_nsample)\n\n # -----------\n # - Inputs\n if xrange is None or len(xrange) != 2:\n # -- it is not a kde\n xrange = self._default_sampling_xrange\n \n # - The random sampler seed\n x = np.linspace(xrange[0],xrange[1], int(rand_nsample))\n # the new pdf\n \n pdf = self.pdf(x) * prior(x, **kwargs)\n return np.random.choice(x, p= pdf / pdf.sum(), size=size)", "def rvs(kde, size, xrange=None, nsample=1e3):\n # faster than resample\n if xrange is None:\n scale = np.nanmax(kde.dataset) - np.nanmin(kde.dataset)\n xrange = np.nanmin(kde.dataset) - scale*0.1, np.nanmax(kde.dataset) + scale*0.1\n \n x = np.linspace(xrange[0], xrange[1], int(nsample))\n return np.random.choice(x, p= kde.pdf(x) / kde.pdf(x).sum(), size=size)", "def crop_and_downsample(originalX, downsample_size=32):\n current_dim = 250\n target_dim = 128\n margin = int((current_dim - target_dim) / 2)\n left_margin = margin\n right_margin = current_dim - margin\n\n # newim is shape (6, 128, 128)\n newim = originalX[:, left_margin:right_margin, left_margin:right_margin]\n\n # resized are shape (feature_width, feature_height, 3)\n feature_width = feature_height = downsample_size\n resized1 = imresize(newim[0:3, :, :], (feature_width, feature_height), interp=\"bicubic\", mode=\"RGB\")\n resized2 = imresize(newim[3:6, :, :], (feature_width, feature_height), interp=\"bicubic\", mode=\"RGB\")\n\n # re-packge into a new X entry\n newX = np.concatenate([resized1, resized2], axis=2)\n\n # the next line is EXTREMELY important.\n # if you don't normalize your data, all predictions will be 0 forever.\n newX = newX / 255.0\n\n return newX", "def downsample(state):\n return state[::2, ::2, :]", "def downsample_segmentation(data, factor, sparse=False):\n if len(factor) == 4:\n assert factor[3] == 1\n factor = factor[:3]\n\n factor = np.array(factor)\n if np.all(np.array(factor, int) == 1):\n return data\n\n if data.dtype.kind not in ('u', 'i'): # integer types\n return downsample_with_striding(data, tuple(factor))\n\n is_pot = lambda x: (x != 0) and not (x & (x - 1)) # is power of two\n is_twod_pot_downsample = np.any(factor == 1) and is_pot(\n reduce(operator.mul, factor))\n is_threed_pot_downsample = not np.any(factor == 1) and is_pot(\n reduce(operator.mul, factor))\n\n shape3d = np.array(data.shape[:3])\n modulo_shape = shape3d % 2\n # it's possible to write a 3d even to odd to make this\n # work for all data shapes.\n if is_threed_pot_downsample and sum(\n modulo_shape) == 0: # power of two downsample on an even shape\n return downsample_segmentation(countless3d(data), factor / 2)\n\n if not is_twod_pot_downsample:\n return downsample_with_striding(data, tuple(factor))\n\n return downsample_segmentation_2d(data, factor, sparse)", "def get_deconv_outsize(size, k, s, p, cover_all=False, d=1):\r\n dk = (k - 1) * d + 1\r\n if cover_all:\r\n return s * (size - 1) + dk - s + 1 - 2 * p\r\n else:\r\n return s * (size - 1) + dk - 2 * p", "def sample_kde(self, kde, num_samples=100, seed=0):\n return kde.sample(num_samples, random_state=seed)", "def downsample(self, number):\n self.samples, self.extra_kwargs = _downsample(\n self.samples, number, extra_kwargs=self.extra_kwargs\n )", "def shrink_kernel(self, kernel, up_scale):\n up_scale = torch.tensor(up_scale).float()\n # boundary padding based on the scaling law\n pad_in = (torch.ceil(up_scale**2).int())*((kernel.shape[2]-1)//2)\n pad_h = (torch.ceil(up_scale).int())*((kernel.shape[3]-1)//2)\n pad_w = (torch.ceil(up_scale).int())*((kernel.shape[4]-1)//2)\n padded_kernel = F.pad(kernel, (pad_w, pad_w, pad_h, pad_h, pad_in, pad_in))\n delta = up_scale%1\n \n if delta == 0:\n shrink_factor = 1\n else:\n # shrink_factor for coordinates.\n shrink_factor = (((kernel.shape[4]-1))/(padded_kernel.shape[-1]-1)*(up_scale+1))\n \n # Adjustment to deal with weird filtering on the grid sample function.\n shrink_factor = 1.5*(shrink_factor-0.5)**3 + 0.57 \n\n grid = torch.meshgrid(torch.linspace(-1, 1, kernel.shape[2])*(shrink_factor**2),\n torch.linspace(-1, 1, kernel.shape[3])*shrink_factor, \n torch.linspace(-1, 1, kernel.shape[4])*shrink_factor)\n\n grid = torch.cat([grid[2].unsqueeze(0).unsqueeze(-1), \n grid[1].unsqueeze(0).unsqueeze(-1), \n grid[0].unsqueeze(0).unsqueeze(-1)], dim = -1).repeat(kernel.shape[0],1,1,1,1)\n\n new_kernel = F.grid_sample(padded_kernel, grid.to(device))\n if kernel.shape[-1] - 2*up_scale > 0:\n new_kernel = new_kernel * (kernel.shape[-1]**2/((kernel.shape[-1] - 2*up_scale)**2 + 0.01))\n return new_kernel", "def upsample_2d(x, size=(2, 2)):\n h, w, _ = x.get_shape().as_list()[1:]\n size_x, size_y = size\n output_h = h * size_x\n output_w = w * size_y\n return tf.image.resize_bilinear(x, (output_h, output_w), align_corners=None, name='upsampling')", "def rvs(kde, size, xrange=None, nsample=1e3):\n if xrange is None:\n scale = np.nanmax(kde.dataset) - np.nanmin(kde.dataset)\n xrange = np.nanmin(kde.dataset) - scale*0.1, np.nanmax(kde.dataset) + scale*0.1\n \n x = np.linspace(xrange[0], xrange[1], int(nsample))\n return np.random.choice(x, p= kde.pdf(x) / kde.pdf(x).sum(), size=size)", "def straight_prune_subsample(neuron, number_of_nodes):\n if(neuron.n_node > 200):\n neuron, distance = straight_subsample_with_fixed_number(neuron, 200)\n sp_neuron, state = prune(neuron=neuron,\n threshold=2*distance,\n lowest_number=number_of_nodes)\n while(~state):\n distance += 1\n sp_neuron = straigh_subsample(neuron, distance)\n sp_neuron, state = prune(neuron=sp_neuron,\n threshold=2*distance,\n lowest_number=number_of_nodes)\n return sp_neuron", "def _downsample(samples, number, extra_kwargs=None):\n from pesummary.utils.utils import resample_posterior_distribution\n import copy\n\n _samples = np.array(samples).T\n if number > len(_samples[0]):\n raise ValueError(\n \"Failed to downsample the posterior samples to {} because \"\n \"there are only {} samples stored in the file.\".format(\n number, len(_samples[0])\n )\n )\n _samples = np.array(resample_posterior_distribution(_samples, number))\n if extra_kwargs is None:\n return _samples.T.tolist()\n _extra_kwargs = copy.deepcopy(extra_kwargs)\n _extra_kwargs[\"sampler\"][\"nsamples\"] = number\n return _samples.T.tolist(), _extra_kwargs" ]
[ "0.6413715", "0.59297705", "0.5833065", "0.58302635", "0.57907945", "0.57134163", "0.5706828", "0.56789315", "0.5579106", "0.55292034", "0.55220985", "0.5508379", "0.55038553", "0.5503162", "0.5483587", "0.5474814", "0.54742444", "0.54477376", "0.5436643", "0.54324836", "0.5417326", "0.540844", "0.53983593", "0.5394064", "0.5387921", "0.5378048", "0.5371443", "0.53680354", "0.5357885", "0.5347049" ]
0.666516
0
Copy the input content object into the session directory using archive naming conventions less version details. Return the full path of the session file or None
def copyToSession(self, contentType, formatType, version="latest", partitionNumber=1): inpFilePath = self.__getFilePath(fileSource=self.__fileSource, contentType=contentType, formatType=formatType, version=version, partitionNumber=partitionNumber) if self.__debug: self.__lfh.write("+DataExchange.copyToSession() source file type %s format %s version %s path %s\n" % (contentType, formatType, version, inpFilePath)) try: outFilePath = None if os.access(inpFilePath, os.R_OK): fn = self.__getArchiveFileName(contentType, formatType, version="none", partitionNumber=partitionNumber) outFilePath = os.path.join(self.__sessionPath, fn) if self.__verbose: self.__lfh.write("+DataExchange.copyToSession() content type %s format %s copied to session path %s\n" % (contentType, formatType, outFilePath)) shutil.copyfile(inpFilePath, outFilePath) return outFilePath else: if self.__debug: self.__lfh.write("+DataExchange.copyToSession() missing input file at path %s\n" % inpFilePath) return None except: # noqa: E722 pylint: disable=bare-except if self.__verbose: if self.__verbose: self.__lfh.write("+DataExchange.copyToSession() Failing for content type %s format %s with session path %s\n" % (contentType, formatType, outFilePath)) traceback.print_exc(file=self.__lfh) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def updateArchiveFromSession(self, contentType, formatType, version=\"next\", partitionNumber=1):\n fn = self.__getArchiveFileName(contentType, formatType, version=\"none\", partitionNumber=partitionNumber)\n inpFilePath = os.path.join(self.__sessionPath, fn)\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.updateArchiveDromSession() source file type %s format %s path %s\\n\" % (contentType, formatType, inpFilePath))\n\n try:\n if os.access(inpFilePath, os.R_OK):\n outFilePath = self.__getFilePath(fileSource=\"archive\", contentType=contentType, formatType=formatType, version=version, partitionNumber=partitionNumber)\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.updateArchiveFromSession() archive destination file path %s\\n\" % outFilePath)\n shutil.copyfile(inpFilePath, outFilePath)\n return outFilePath\n else:\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.updateArchiveFrom() missing session input file at path %s\\n\" % inpFilePath)\n return None\n except: # noqa: E722 pylint: disable=bare-except\n if self.__verbose:\n traceback.print_exc(file=self.__lfh)\n return None", "def _copy_to_media(self, template_name, source=''):\n dirpath = os.path.join(self.cache_root, os.path.dirname(template_name))\n filename = os.path.basename(template_name)\n fullpath = os.path.join(dirpath, filename)\n\n if not os.path.isfile(fullpath) or settings.DEBUG:\n if not os.path.exists(dirpath):\n os.makedirs(dirpath)\n\n f = open(fullpath, 'w')\n f.write(source)\n f.close()\n\n return urljoin(self.cache_url, template_name), filename", "def _expand_archive(self, name):\r\n target = path(self.temp_dir) / uuid.uuid4().hex\r\n os.mkdir(target)\r\n with tarfile.open(self.data_dir / name) as tar_file:\r\n tar_file.extractall(path=target)\r\n\r\n return target", "def copy_os_release_file(dut, fname=default_os_release_file):\n # src = os.path.join(os.path.sep, 'shared', os_release_files_dir, fname)\n dst = os.path.join(os.path.sep, 'etc', 'os-release')\n dut(\"/bin/cp /tmp/files/os_releases/\" + fname + \" \" + dst, shell=\"bash\")", "def file_copy_form_bcdbfs(self, path, dest):\n source_file = self._file_model.get_by_name(name=path)[0]\n if self.is_dir(dest):\n dest = j.sal.fs.joinPaths(dest, j.sal.fs.getBaseName(path))\n dest_file = self.file_create_empty(dest)\n if source_file.blocks:\n dest_file.blocks = source_file.blocks\n elif source_file.content:\n dest_file.content = source_file.content\n\n dest_file.save()\n return dest_file", "def copy(self, src_path: str, tgt_path: str) -> None:", "def copy(self, des=None, name=None):\n if des is None:\n des = self.source_path\n if name is None:\n name = self.name\n location = os.path.join(des, name)\n while os.path.isfile(location) is True:\n self.base.reset_random()\n self.name = self.base.random\n name = self.name\n location = os.path.join(des, name)\n with open(location, 'wb') as f:\n f.write(self.stream)\n return location", "def get_copy(self) -> Path:\n snapshot_source_dir = PERSISTENCE_SNAPSHOTS_DIR / self.version\n snapshot_copy_dir = Path(TemporaryDirectory().name) / self.version\n copytree(src=snapshot_source_dir, dst=snapshot_copy_dir)\n return snapshot_copy_dir", "def _create_session_data(self, abs_path, sess_root):\n sess_path = os.path.join(abs_path, sess_root)\n if not os.path.exists(sess_path):\n os.makedirs(sess_path)\n sess_id = len(os.listdir(sess_path))\n sess_path = os.path.join(sess_path, str(sess_id))\n print(\"SESSION PATH:\", sess_path)\n print(\"SESSION ID:\", sess_id) \n return sess_id, sess_path", "def extract(cls, path, outdir):\r\n raise NotImplementedError()", "def copyFile(self, *args):\n return _libSALOME_LifeCycleCORBA.SALOME_LifeCycleCORBA_copyFile(self, *args)", "def copyDirToSession(self, dirName):\n try:\n if self.__fileSource in [\"archive\", \"wf-archive\"]:\n pth = self.__pI.getArchivePath(self.__depDataSetId)\n elif self.__fileSource in [\"deposit\"]:\n pth = self.__pI.getDepositPath(self.__depDataSetId)\n elif self.__fileSource in [\"wf-instance\"]:\n pth = self.__pI.getInstancePath(self.__depDataSetId, self.__wfInstanceId)\n else:\n return False\n\n srcPath = os.path.join(pth, dirName)\n if not os.access(srcPath, os.R_OK):\n return False\n\n dstPath = os.path.join(self.__sessionPath, dirName)\n if not os.path.isdir(dstPath):\n os.makedirs(dstPath, 0o755)\n #\n fPattern = os.path.join(srcPath, \"*\")\n fpL = filter(os.path.isfile, glob.glob(fPattern))\n for fp in fpL:\n _dN, fN = os.path.split(fp)\n oP = os.path.join(dstPath, fN)\n shutil.copyfile(fp, oP)\n\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.copyDirToSession() successful session copy of dirName %s\\n\" % (dirName))\n return True\n except: # noqa: E722 pylint: disable=bare-except\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.copyDirToSession() fails for dirName %s\\n\" % (dirName))\n traceback.print_exc(file=self.__lfh)\n return False\n\n return True", "def copy(self, src, dest):\n\n src = os.path.join(os.path.dirname(__file__), \"collections\", \"kitchensink\", src)\n dest = os.path.join(self.checkout, dest)\n if os.path.isdir(src):\n shutil.copytree(src, dest)\n else:\n shutil.copy(src, dest)\n return dest", "def copy(location):\n\tcopyData = settings.getDataFile()\n\tcopyFileLocation = os.path.abspath(location)\n\tcopy = {\"copyLocation\": copyFileLocation}\n\tdataFile = open(copyData, \"wb\")\n\tpickle.dump(copy, dataFile)\n\tspeech.speak(location + \" copied successfully!\")\n\tspeech.speak(\"Tip: use 'hallie paste' to paste this file.\")", "def archive_path(self):\n return os.path.join(self.destination_directory, self.__archive_name__)", "def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()", "def test_profile_copy_file(profile_manager, test_profile,\n tmpdir, inventory_content):\n\n myfile = tmpdir.mkdir(\"ir_dir\").join(\"fake_hosts_file\")\n myfile.write(inventory_content)\n org_inventory = myfile.strpath\n\n target_path = test_profile.copy_file(org_inventory)\n assert target_path == os.path.join(\n test_profile.path, os.path.basename(org_inventory))\n\n profile_inventory = py.path.local(target_path)\n assert profile_inventory.check(file=1)\n assert inventory_content == profile_inventory.read()", "def create_content(\n copy_from: Optional[str] = None,\n ext: Optional[str] = None,\n type: Optional[str] = None,\n path: str = \"\",\n) -> str:\n ...", "def export_file(self):\n\n if not self.session_filename:\n return\n\n data = {\n \"session_filename\": self.session_filename,\n \"index_start\": self.total_mutant_index,\n \"sleep_time\": self.sleep_time,\n \"restart_sleep_time\": self.restart_sleep_time,\n \"restart_interval\": self.restart_interval,\n \"web_port\": self.web_port,\n \"crash_threshold\": self._crash_threshold_node,\n \"total_num_mutations\": self.total_num_mutations,\n \"total_mutant_index\": self.total_mutant_index,\n \"netmon_results\": self.netmon_results,\n \"procmon_results\": self.procmon_results,\n \"is_paused\": self.is_paused\n }\n\n fh = open(self.session_filename, \"wb+\")\n fh.write(zlib.compress(cPickle.dumps(data, protocol=2)))\n fh.close()", "def upload_file(self, session, output, serverdir):\n name = output.metadata['filename']\n self.log.debug(\"uploading %r to %r as %r\",\n output.file.name, serverdir, name)\n\n kwargs = {}\n if self.blocksize is not None:\n kwargs['blocksize'] = self.blocksize\n self.log.debug(\"using blocksize %d\", self.blocksize)\n\n upload_logger = KojiUploadLogger(self.log)\n session.uploadWrapper(output.file.name, serverdir, name=name,\n callback=upload_logger.callback, **kwargs)\n path = os.path.join(serverdir, name)\n self.log.debug(\"uploaded %r\", path)\n return path", "def load_game_session(self, game_name, session_name):\n self.file_path = os.path.join(self.file_directory % game_name, '%s.%s' % (session_name, self.file_extension))\n self.file_path = os.path.expanduser(self.file_path)\n self.load()", "def clone_data(self,req):\n # source folder\n source=req.source or \"/media/howie/archive/data/music/\"\n # destination folder\n dest=req.dest or \"/home/howie/data/music/\"\n # clone the music files\n c=0\n for i in self.list(isin={'kind':(\"track\",\"image\",\"file\")},orderby=\"uid\"):\n c+=1\n# print c,\" uid:\",i.uid,\" kind:\",i.kind,\" loc:\",i.file_folder(),\" name:\",i.name\n subfolder=i.file_folder()\n destfolder=dest+subfolder\n if not os.path.exists(destfolder):\n os.makedirs(destfolder)\n shutil.copy2(source+subfolder+\"/\"+i.code,destfolder)\n print(\"added %s\" % (dest+subfolder+\"/\"+i.code,))\n return \"clone completed: %s files added\" % c", "def fetch(self, dest=None):\n if dest is None:\n name = self.physical_key.basename()\n dest = PhysicalKey.from_path('.').join(name)\n else:\n dest = PhysicalKey.from_url(fix_url(dest))\n\n copy_file(self.physical_key, dest)\n\n # return a package reroot package physical keys after the copy operation succeeds\n # see GH#388 for context\n return self.with_physical_key(dest)", "def extract(self, path, archivecontentmaxsize):\n return None", "async def copy_snapshot(self, name, slug, backup_path):\n\n # ensure the name is a valid filename.\n if name:\n filename = slugify(name, lowercase=False, separator=\"_\")\n else:\n filename = slug\n\n # ensure the filename is a tar file.\n if not filename.endswith(\".tar\"):\n filename += \".tar\"\n\n destination = join(backup_path, filename)\n\n # check if file already exists\n if isfile(destination):\n destination = join(backup_path, f\"{slug}.tar\")\n\n await self.download_snapshot(slug, destination)", "def _get_session_dir(self):\n\n fnd = os.path.join(self.config.capture_dir, self.timestamp.date_string(), self.timestamp.time_string())\n if not os.path.isdir(fnd):\n os.makedirs(fnd)\n\n return fnd", "def copy_to_path(object, path):\n output = open(path, 'wb')\n _copy_and_close(object, output)", "def archive_backup(self):\n\n # Archiving the Training script\n shutil.copyfile(self.script_path, self.save_path + '/0-' + os.path.basename(self.script_path))\n os.chmod(self.save_path + '/0-' + os.path.basename(self.script_path), 0o755)\n # Archiving the src folder\n pkg_path = os.path.dirname(arch_src)\n backup_path = os.path.join(self.save_path, 'src_backup')\n shutil.make_archive(backup_path, 'gztar', pkg_path)\n\n # Archiving the Environment Info\n env_info = collect_env.get_pretty_env_info()\n with open(self.save_path + '/env_info.txt', 'w') as f:\n f.write(env_info)", "def copy(ctx, token, path):\n try:\n container_id = path[\"container_id\"]\n container_path = path[\"container_path\"]\n host_path = path[\"host_path\"]\n host_to_container = path[\"host_to_container\"]\n out = ctx.obj.copy_to_from_container(token,\n container_id,\n container_path,\n host_path,\n host_to_container)\n print_message(out)\n except BaseException as e:\n print_error(e.message)", "def move_from_temp_directory(self):" ]
[ "0.6401436", "0.5103961", "0.5062158", "0.5057045", "0.5040292", "0.5029681", "0.5028371", "0.49832347", "0.49700403", "0.496026", "0.4940959", "0.4940322", "0.49357715", "0.49029315", "0.49014482", "0.4897062", "0.48897016", "0.48732707", "0.4855773", "0.4842807", "0.48423657", "0.48371717", "0.48305207", "0.48138887", "0.4810515", "0.47996655", "0.47914526", "0.47732562", "0.47719944", "0.47598755" ]
0.6456665
0
Copy the input content object from the session directory stored using archive naming conventions less version details to archive storage. Return the full path of the archive file or None
def updateArchiveFromSession(self, contentType, formatType, version="next", partitionNumber=1): fn = self.__getArchiveFileName(contentType, formatType, version="none", partitionNumber=partitionNumber) inpFilePath = os.path.join(self.__sessionPath, fn) if self.__verbose: self.__lfh.write("+DataExchange.updateArchiveDromSession() source file type %s format %s path %s\n" % (contentType, formatType, inpFilePath)) try: if os.access(inpFilePath, os.R_OK): outFilePath = self.__getFilePath(fileSource="archive", contentType=contentType, formatType=formatType, version=version, partitionNumber=partitionNumber) if self.__verbose: self.__lfh.write("+DataExchange.updateArchiveFromSession() archive destination file path %s\n" % outFilePath) shutil.copyfile(inpFilePath, outFilePath) return outFilePath else: if self.__verbose: self.__lfh.write("+DataExchange.updateArchiveFrom() missing session input file at path %s\n" % inpFilePath) return None except: # noqa: E722 pylint: disable=bare-except if self.__verbose: traceback.print_exc(file=self.__lfh) return None
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def extract(self, path, archivecontentmaxsize):\n return None", "def archive_path(self):\n return os.path.join(self.destination_directory, self.__archive_name__)", "def _expand_archive(self, name):\r\n target = path(self.temp_dir) / uuid.uuid4().hex\r\n os.mkdir(target)\r\n with tarfile.open(self.data_dir / name) as tar_file:\r\n tar_file.extractall(path=target)\r\n\r\n return target", "def _archive(self):\n if self.__archive is None:\n t = time.time()\n try:\n self.__archive = file_util.read_npz(self.npz_path)\n except:\n raise ValueError('Failed to read %s' % self.npz_path)\n log('Time loading input archive: %0.2f' % (time.time() - t))\n return self.__archive", "def extract_to_disk(self):\n archive_name, extension = os.path.splitext(os.path.basename(self.file.name))\n if not os.path.isdir(os.path.join(os.getcwd(), archive_name)):\n os.mkdir(archive_name)\n os.chdir(archive_name)\n for filename, data in self.extract().items():\n f = open(filename, 'wb')\n f.write(data or b'')\n f.close()", "def copy_original_to_archives( self, date_stamp ):\n original_archive_file_path = '%s/REQ-ORIG_%s.dat' % ( self.PATH_TO_ARCHIVES_ORIGINALS_DIRECTORY, date_stamp ) # i.e. '/path/REQ-ORIG_2005-05-19T15/08/09.dat'\n try:\n shutil.copyfile( self.PATH_TO_SOURCE_FILE, original_archive_file_path )\n os.chmod( original_archive_file_path, 0640 )\n log.debug( 'source file copied to original archives' )\n except Exception, e:\n message = 'copy of original file from \"%s\" to \"%s\" unsuccessful; exception is: %s' % ( self.PATH_TO_SOURCE_FILE, original_archive_file_path, e )\n log.error( message )\n sys.exit( message )\n copy_check = utility_code.checkFileExistence( original_archive_file_path )\n if copy_check == 'exists':\n log.info( 'original file copied to: %s' % original_archive_file_path )\n else:\n message = 'copy of original file from \"%s\" to \"%s\" unsuccessful; exception is: %s' % ( self.PATH_TO_SOURCE_FILE, original_archive_file_path, copy_check )\n log.error( message )\n sys.exit( message )\n return", "def extract_one(self, archive: Path, dest: Path):\n if dest.exists():\n shutil.rmtree(dest)\n\n dest.mkdir(parents=True)\n\n if self.should_use_libarchive_c:\n import libarchive\n\n old_cwd = os.getcwd()\n os.chdir(str(dest))\n try:\n libarchive.extract_file(str(archive))\n finally:\n os.chdir(old_cwd)\n return\n\n if archive.name.endswith(EXTENSION_ZIP):\n with zipfile.ZipFile(archive) as zf:\n zf.extractall(dest)\n elif archive.name.endswith(EXTENSION_TAR):\n mode = \"r:bz2\" if archive.name.endswith(\".bz2\") else \"r:gz\"\n with tarfile.open(archive, mode) as tf:\n self.safe_extract_all(tf, dest)\n else:\n raise ValueError(f\"Unrecognized archive format {archive.name}\")\n\n for path in [dest, *dest.rglob(\"*\")]:\n path.chmod(MOD_DIRECTORY if path.is_dir() else MOD_FILE)", "def restore(self, archive):\n logger.info(\"Restoring an old archive run from {}\".format(archive))\n if os.path.isabs(archive):\n restorefile = archive\n else:\n restorefile = os.path.join(self.containerpath, const.ARCHIVEDIR, archive)\n with ignored(OSError):\n shutil.rmtree(os.path.join(self.rundir))\n with tarfile.open(restorefile, \"r:gz\") as f:\n def is_within_directory(directory, target):\n \n abs_directory = os.path.abspath(directory)\n abs_target = os.path.abspath(target)\n \n prefix = os.path.commonprefix([abs_directory, abs_target])\n \n return prefix == abs_directory\n \n def safe_extract(tar, path=\".\", members=None, *, numeric_owner=False):\n \n for member in tar.getmembers():\n member_path = os.path.join(path, member.name)\n if not is_within_directory(path, member_path):\n raise Exception(\"Attempted Path Traversal in Tar File\")\n \n tar.extractall(path, members, numeric_owner=numeric_owner) \n \n \n safe_extract(f, self.rundir)\n self._refreshconfig()", "def archive(mongo_backup_file):\r\n filename = get_archive_filename()\r\n tar = tarfile.open(filename, \"w|gz\")\r\n tar.add(mongo_backup_file)\r\n tar.close()\r\n\r\n return filename", "def copyToSession(self, contentType, formatType, version=\"latest\", partitionNumber=1):\n inpFilePath = self.__getFilePath(fileSource=self.__fileSource, contentType=contentType, formatType=formatType, version=version, partitionNumber=partitionNumber)\n if self.__debug:\n self.__lfh.write(\"+DataExchange.copyToSession() source file type %s format %s version %s path %s\\n\" % (contentType, formatType, version, inpFilePath))\n\n try:\n outFilePath = None\n if os.access(inpFilePath, os.R_OK):\n fn = self.__getArchiveFileName(contentType, formatType, version=\"none\", partitionNumber=partitionNumber)\n outFilePath = os.path.join(self.__sessionPath, fn)\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.copyToSession() content type %s format %s copied to session path %s\\n\" % (contentType, formatType, outFilePath))\n shutil.copyfile(inpFilePath, outFilePath)\n return outFilePath\n else:\n if self.__debug:\n self.__lfh.write(\"+DataExchange.copyToSession() missing input file at path %s\\n\" % inpFilePath)\n return None\n except: # noqa: E722 pylint: disable=bare-except\n if self.__verbose:\n if self.__verbose:\n self.__lfh.write(\"+DataExchange.copyToSession() Failing for content type %s format %s with session path %s\\n\" % (contentType, formatType, outFilePath))\n traceback.print_exc(file=self.__lfh)\n return None", "def remote_archiveUpload(self, talk_id, upload_id, role):\n source = yield self.getUpload(upload_id)\n extension = source.splitext()[1]\n\n # TODO: Check if the talk identified by talk_id exists and bind the\n # document to it.\n\n # TODO: Validate the given ``role`` argument (either strictly against a\n # list of known roles or loosely for sanity).\n\n # 2. Construct the final pathname\n version_id = ObjectId()\n basename = str(version_id) + extension\n destination = settings.data_root.child(talk_id).child(role)\n if not destination.exists():\n destination.makedirs()\n destination = destination.child(basename)\n\n # 3. move the file to its destination\n yield threads.deferToThread(source.moveTo, destination)\n\n # 2. Save the info to the database\n asset = Asset(\n _id=version_id,\n archiver_id=self.getID(),\n talk_id=talk_id,\n role=role\n )\n version = AssetVersion(\n version_id=version_id,\n filename=destination\n )\n asset.versions.append(version)\n\n yield asset.save()\n\n # 5. Start the upload triggers\n task = self.processAsset(asset)\n\n # TODO: Define the return value of this method. Shall it be the task,\n # the version_id/asset_id or both?\n defer.returnValue((str(version_id), task.id))", "def _archiveData(self, src, dest):\n \troot = os.getcwd()\n \tsrcPath = join(root,src)\n \tdestPath = join(root,dest)\n \tf = [] #Array with list of files to copy\n \ts = [] #Array with list of files successfully copied\n \tfor (dirpath, dirnames, filenames) in walk(srcPath):\n \t\tf.extend(filenames)\n \t\tif len(f) > 0:\n \t\t\tfor i in f:\n \t\t\t\tif str(i) != 'archiving_log.txt' and str(i) != 'archiving_log.txt~' and str(i) != 'archivingScript.py':\n \t\t\t\t\ttry:\n \t\t\t\t\t\tbuffer_size = int(20000)\n \t\t\t\t\t\tfileSrcPath = join(dirpath, i)\n \t\t\t\t\t\tfileDestPath = join(destPath, i)\n \t\t\t\t\t\twith open(fileSrcPath, 'rb') as fsrc:\n \t\t\t\t\t\t\twith open(fileDestPath, 'wb') as fdest:\n \t\t\t\t\t\t\t\tcopy = shutil.copyfileobj(fsrc,fdest,buffer_size)\n \t\t\t\t\t\t\t\tcopy\n \t\t\t\t\t\t\t\tself._backupLog('Copy Operation File: '+str(i)+ '\\t' + 'Time: '+ str(time.strftime(\"%H:%M:%S\")) + '\\t'+ 'Path: '+ str(srcPath)+'\\n')\n \t\t\t\t\t\t\t\ts.append(i)\n \t\t\t\t\texcept shutil.Error as e:\n \t\t\t\t\t\tself._backupLog('Error: %s' % e + '\\t' + 'File: '+str(i)+ '\\t' + 'Time: '+ str(time.strftime(\"%H:%M:%S\")) + '\\n')\n \t\t\t\t\texcept IOError as e:\n \t\t\t\t\t\tself._backupLog('Error: %s' % e.strerror + '\\t' + 'File: '+str(i)+ '\\t' + 'Time: '+ str(time.strftime(\"%H:%M:%S\")) + '\\n')\n \tif len(s) >0:\n for (dirpath,dirnames,filenames) in walk(srcPath):\n for cfile in f:\n for sfile in s:\n if cfile == sfile:\n try:\n filetoDelete = join(srcPath, cfile)\n os.remove(filetoDelete)\n self._backupLog('Delete Operation File: '+str(cfile)+ '\\t' + 'Time: '+ str(time.strftime(\"%H:%M:%S\")) + '\\n')\n except OSError, e:\n self._backupLog('Error deleting file: %s - %s.' % (e.filename, e.strerror) + '\\t' + 'Time: '+ str(time.strftime(\"%H:%M:%S\")) + '\\n')", "def extract(self, path, archivecontentmaxsize):\n if archivecontentmaxsize is not None and self.filesize(path) > archivecontentmaxsize:\n return None\n\n initial_position = self._handle.fileobj.tell()\n filecontent = self._handle.read()\n self._handle.fileobj.seek(initial_position)\n return filecontent", "async def copy_snapshot(self, name, slug, backup_path):\n\n # ensure the name is a valid filename.\n if name:\n filename = slugify(name, lowercase=False, separator=\"_\")\n else:\n filename = slug\n\n # ensure the filename is a tar file.\n if not filename.endswith(\".tar\"):\n filename += \".tar\"\n\n destination = join(backup_path, filename)\n\n # check if file already exists\n if isfile(destination):\n destination = join(backup_path, f\"{slug}.tar\")\n\n await self.download_snapshot(slug, destination)", "def extract(self, path, archivecontentmaxsize):\n if archivecontentmaxsize is not None and self.filesize(path) > archivecontentmaxsize:\n return None\n\n arinfo = self._handle.getmember(path)\n if not arinfo.isfile():\n return None\n x = self._handle.extractfile(path)\n extracted = x.read()\n x.close()\n return extracted", "def unpack(backend_name, archive_id):\n backend = get_backend(backend_name)\n click.echo(f\"Retrieving archive {archive_id}\")\n backend.archive_retrieve(config.root_path, archive_id)", "def test_history_import_abspath_in_archive():\n dest_parent = mkdtemp()\n arcname_prefix = os.path.abspath(os.path.join(dest_parent, 'insecure'))\n\n with HistoryArchive(arcname_prefix=arcname_prefix) as history_archive:\n history_archive.write_metafiles()\n history_archive.write_file('datasets/Pasted_Entry_1.txt', 'foo')\n history_archive.finalize()\n _run_unpack(history_archive, dest_parent, 'Absolute path in import archive allowed')", "def get_archive_file_path(self,results):\n path = os.path.join(self.archive_path,results.version)\n if not os.path.exists(path):\n os.makedirs(path)\n return os.path.join(path,self.get_archive_filename(results))", "def path_as_archived( wav_file_path, archive_dir ):\n return os.path.join( archive_dir, os.path.basename( wav_file_path ) )", "def extract(cls, path, outdir):\r\n raise NotImplementedError()", "def _GetLocalArchive(self):\n if FLAGS.perfspect_tarball:\n logging.info('perfspect_tarball specified: %s', FLAGS.perfspect_tarball)\n local_archive_path = FLAGS.perfspect_tarball\n else:\n url = FLAGS.perfspect_url or PERFSPECT_ARCHIVE_URL\n logging.info('downloading PerfSpect from: %s', url)\n filename = os.path.basename(urlparse(url).path)\n local_archive_path = posixpath.join(vm_util.GetTempDir(), filename)\n vm_util.IssueCommand(['curl', '-k', '-L', '-o', local_archive_path, url],\n timeout=None)\n return local_archive_path", "def extract_file(self):\n# path_destination = os.path.join(\n# self.root, self.resources.replace(\".zip\", \"\"))\n# os.makedirs(path_destination, exist_ok=True)\n shutil.unpack_archive(os.path.join(\n self.root, self.resources), self.root)\n os.remove(os.path.join(self.root, self.resources))", "def default_archiver(random, population, archive, args):\r\n return archive", "def put(self, obj):\n\n if obj is None:\n return\n\n assert os.path.exists(obj), f'path {obj} does not exist.'\n\n return shutil.make_archive(obj, 'tar', obj)", "def _archive(self):\n # LOG: change this to something archive specific\n self.set_property('processing_type', 'archive')\n self.should_copy = False\n self.is_recursive = True", "def put_archive(self, path, data):\n return self.client.api.put_archive(self.id, path, data)", "def get_archive(*args, **kwargs):\n return get_archive_async(*args, **kwargs).get_result()", "def archive_backup(self):\n\n # Archiving the Training script\n shutil.copyfile(self.script_path, self.save_path + '/0-' + os.path.basename(self.script_path))\n os.chmod(self.save_path + '/0-' + os.path.basename(self.script_path), 0o755)\n # Archiving the src folder\n pkg_path = os.path.dirname(arch_src)\n backup_path = os.path.join(self.save_path, 'src_backup')\n shutil.make_archive(backup_path, 'gztar', pkg_path)\n\n # Archiving the Environment Info\n env_info = collect_env.get_pretty_env_info()\n with open(self.save_path + '/env_info.txt', 'w') as f:\n f.write(env_info)", "def extract(self, path, archivecontentmaxsize):\n if archivecontentmaxsize is not None and self.filesize(path) > archivecontentmaxsize:\n return None\n return self._handle.read(path)", "def extract(self, path, archivecontentmaxsize):\n if archivecontentmaxsize is not None and self.filesize(path) > archivecontentmaxsize:\n return None\n return self._handle.read(path)" ]
[ "0.61428714", "0.611737", "0.60615754", "0.59071267", "0.57440823", "0.5698574", "0.5692508", "0.5683624", "0.56766564", "0.5667239", "0.56335604", "0.55756545", "0.5565427", "0.55461234", "0.5537251", "0.5460467", "0.545186", "0.5447522", "0.5439493", "0.54372364", "0.5425858", "0.5398837", "0.53920084", "0.5383544", "0.5367535", "0.53657216", "0.5365531", "0.53651196", "0.5362652", "0.5362652" ]
0.66665554
0
Decipher given text using Caesar method. Note you should use the same charset that ciphering end did.
def decipher(ciphered_text: str, key: int, charset: str = DEFAULT_CHARSET) -> str: deciphered_text = _offset_text(ciphered_text, key, False, Ciphers.CAESAR, charset) return deciphered_text
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def decrypt_caesar(ciphertext: str) -> str:\n plaintext = ''\n for ab in ciphertext:\n if ('a' <= ab <= 'z') or ('A' <= ab <= 'Z'):\n ans = ord(ab) - 3\n if (ans < ord('a')) and (ans > ord('Z')) or (ans < ord('A')):\n ans += 26\n plaintext += chr(ans)\n else:\n plaintext += ab\n return plaintext", "def decipher(self):\n plaintext = \"\"\n for ct, key_char in zip(self.text, self.key):\n char_index = self.char_block.rows[key_char].index(ct)\n plaintext += self.char_block.alphabet[char_index]\n print(plaintext)", "def decrypt_caesar(ciphertext):\n plaintext = \"\"\n for c in ciphertext:\n if (ord(c) <= 64) or (91 <= ord(c) <= 96) or (123 <= ord(c)):\n plaintext += chr(ord(c))\n elif (97 <= ord(c) - 3 <= 122) or (65 <= ord(c) - 3 <= 90):\n plaintext += chr(ord(c) - 3)\n else:\n plaintext += chr(ord(c) - 3 + 26)\n return plaintext", "def decrypt_caesar(ciphertext):\n return ''.join([cipher_to_plain[old] for old in ciphertext.upper()])", "def decrypt(self, text):\n\n output = []\n text = text.upper()\n for char in text:\n try:\n text_index = self.combined.index(char)\n output.append(self.alphabet[text_index])\n except ValueError:\n output.append(char)\n\n return ''.join(output)", "def decrypt(text,key):\r\n aes = pyaes.AESModeOfOperationCTR(key)\r\n decrypted = aes.decrypt(text)\r\n return decrypted", "def decrypt(\r\n key: bytes,\r\n cipher_text: bytes,\r\n) -> str:\r\n block_size = 16\r\n iv = cipher_text[:block_size]\r\n cipher = AES.new(key, AES.MODE_CBC, iv)\r\n plain_text = cipher.decrypt(cipher_text[block_size:]).decode('utf-8')\r\n return _unpad(plain_text)", "def decrypt(self, text):\n\n output = []\n text = text.upper()\n\n for char in text:\n try:\n index = self.alpha.index(char)\n except ValueError:\n output.append(char)\n else:\n output.append(self.alpha[21 * (index - 8) % 26])\n return \"\".join(output)", "def decrypt_caesar(ciphertext: str, shift: int = 3) -> str:\n plaintext = \"\"\n low_first = ord(\"a\")\n low_last = ord(\"z\")\n high_first = ord(\"A\")\n high_last = ord(\"Z\")\n eng_alp = 26\n for i in ciphertext:\n if i.isalpha():\n if low_first <= ord(i) <= low_last:\n a = chr((((ord(i) - low_first) - shift) % eng_alp) + low_first)\n plaintext += a\n elif high_first <= ord(i) <= high_last:\n a = chr((((ord(i) - high_first) - shift) % eng_alp) + high_first)\n plaintext += a\n else:\n plaintext += i\n return plaintext", "def decrypt(self, text):\n return self.encrypt(text)", "def decrypt_caesar(ciphertext: str, shift: int = 3) -> str:\n plaintext = \"\"\n alph = [chr(letter) for letter in range(ord(\"a\"), (ord(\"a\") + shift))]\n alph.extend([chr(letter) for letter in range(ord(\"A\"), (ord(\"A\") + shift))])\n for letter in ciphertext:\n if (\"a\" <= letter <= \"z\") or (\"A\" <= letter <= \"Z\"):\n if letter in alph:\n plaintext = plaintext + chr(ord(letter) + 26 - shift)\n else:\n plaintext = plaintext + chr(ord(letter) - shift)\n else:\n plaintext += letter\n return plaintext", "def decipher(self,string): \n string = self.remove_punctuation(string)\n ret = ''\n for (i,c) in enumerate(string):\n if i<len(self.key): offset = self.a2i(self.key[i])\n else: offset = self.a2i(ret[i-len(self.key)]) \n ret += self.i2a(self.a2i(c)-offset)\n return ret", "def decrypt(self, text):\n\n decrypted_word = []\n for letter in text:\n try:\n index = self.alpha.index(letter)\n except ValueError:\n decrypted_word.append(letter)\n else:\n # Uses Affine decryption function to decrypt the word\n new_index = ((21*(index-self.b)) % self.m)\n decrypted_word.append(self.alpha[new_index])\n return \"\".join(decrypted_word)", "def decrypt(self, ciphertext):\n text = []\n # ciphertext = ciphertext.upper()\n for char in ciphertext:\n try:\n key = math_utils.mult_mod_inv(self.a, len(self.characters)) * (self.characters.index(char) - self.b) % len(self.characters)\n # If character is not in set for cipher,\n # directly append it without transformation\n except ValueError:\n text.append(char)\n else:\n text.append(self.characters[key])\n return ''.join(text)", "def decrypt(text, offset):\n decrypted_text = \"\"\n for char in text:\n if ord(char) <= 64:\n decrypted_character = chr(ord(char))\n elif ord(char) <= 90:\n decrypted_character = ord(char) - offset\n if decrypted_character < 65:\n decrypted_character += 26\n decrypted_character = chr(decrypted_character)\n else:\n decrypted_character = ord(char) - offset\n if decrypted_character < 97:\n decrypted_character += 26\n decrypted_character = chr(decrypted_character)\n decrypted_text += decrypted_character\n\n return decrypted_text", "def weaksauce_decrypt(text, password):\n offset = sum([ord(x) for x in password])\n decoded = ''.join(\n chr(max(ord(x) - offset, 0))\n for x in text\n )\n return decoded", "def decrypt(self, message):\n output = []\n for letter in message:\n # preventing white spaces and numbers\n if letter == ' ' or isinstance(letter, int):\n output.append(letter)\n else:\n idx_in_plain = self.CIPHER_TEXT_ALPH.index(letter.upper())\n output.append(self.PLAIN_TEXT_ALPH[idx_in_plain])\n return \"\".join(output)", "def decrypt_vigenere(ciphertext: str, keyword: str) -> str:\n plaintext = \"\"\n # PUT YOUR CODE HERE\n key_lenght = len(keyword)\n text_lenght = len(ciphertext)\n\n while key_lenght != text_lenght:\n keyword += keyword\n key_lenght = len(keyword)\n if key_lenght > text_lenght:\n keyword = keyword[:text_lenght]\n key_lenght = len(keyword)\n code_key = []\n ord_a = ord('a')\n ord_A = ord('A')\n\n if ciphertext.islower():\n for i in range(key_lenght):\n if ciphertext[i] == \" \":\n code_key.append(\" \")\n else:\n code_key.append(ord(keyword[i]) - ord_a)\n code_text = []\n for n in range(text_lenght):\n if ciphertext[n] == \" \":\n code_text.append(\" \")\n else:\n code_text.append(ord(ciphertext[n]) - ord_a)\n for u in range(text_lenght):\n if ciphertext[u] == \" \":\n value = ord(\" \")\n else:\n\n value = ((code_text[u] - code_key[u] + 26) % 26) + ord_a\n plaintext += chr(value)\n else:\n for i in range(key_lenght):\n if ciphertext[i] == \" \":\n code_key.append(\" \")\n else:\n code_key.append(ord(keyword[i]) - ord_A)\n code_text = []\n for n in range(text_lenght):\n if ciphertext[n] == \" \":\n code_text.append(\" \")\n else:\n code_text.append(ord(ciphertext[n]) - ord_A)\n for u in range(text_lenght):\n if ciphertext[u] == \" \":\n value = ord(\" \")\n else:\n value = ((code_text[u] - code_key[u] + 26) % 26) + ord_A\n plaintext += chr(value)\n\n return plaintext", "def caesar_cipher_decrept(string, key):\n try:\n string_to_return = \"\"\n for l in string:\n if not(l >= 'A'and l <= 'Z' or l >= 'a'and l <= 'z'):\n string_to_return += l\n elif ord(l.upper()) - key < ord('A'):\n string_to_return += chr(ord('Z') - ord('A') + ord(l.upper()))\n else:\n string_to_return += chr(ord(l.upper())-key)\n return string_to_return\n except (ValueError, IndexError, Exception) as ex:\n print(EXCEPTION_MESSAGE, ex)", "def decrypt(text: str, key: str = None):\n if not text.isdecimal():\n raise ValueError(\"Encrypted text must contain only numbers.\")\n tmpres = []\n lkey = []\n if key is not None:\n lkey = list(key.encode(\"utf-8\"))\n i = 0\n counter = 0\n while i < len(text):\n l = int(text[i])\n tmp = text[i + 1:i + l + 1]\n i += l + 1\n if not tmp:\n break\n if lkey:\n c = int(tmp) - lkey[counter % len(lkey)]\n else:\n pm = 1 if tmp[0] == \"0\" else -1\n ri = int(tmp[1]) * pm\n c = int(tmp[2:]) - ri\n tmpres.append(c)\n counter += 1\n return bytes(tmpres).decode(\"utf8\")", "def decrypt(text, offset):\r\n return format_text(text, -offset)", "def decrypt(self, text):\n\t\tif self.offsets != self.start_off:\n\t\t\traise Exception(\"Current offset != starting offset. Use the reset\"+\\\n\t\t\t\t\t\t\t\" method before decrypting.\")\n\t\treturn self.encrypt(text)", "def caesar_encryption(text):\n result = ''\n for char in text:\n if char.isdigit():\n i = (num_key.index(char) - 4) % 10\n result += num_key[i]\n elif not char.isdigit() and char.lower() in alpha_key:\n i = (alpha_key.index(char.lower()) - 4) % 26\n result += alpha_key[i]\n else:\n result += char\n return result", "def decrypt(self, data):", "def AES_decrypt(ciphertext: bytes) -> Text:\n text = b64decode(ciphertext)\n cipher = AES.new(secret_key, mode, IV)\n return Padding.unpad(cipher.decrypt(text), bs).decode('utf-8')", "def decrypt(self, ciphertext):\n return self._transform(ciphertext, self._backward)", "def decrypt_vigenere(cipehrtext: str, keyword: str) -> str:\n plaintext = \"\"\n if len(keyword) < len(cipehrtext):\n for j in range(len(cipehrtext) - len(keyword)):\n keyword += keyword[j]\n for i in range(len(cipehrtext)):\n n = ord(cipehrtext[i])\n m = ord(keyword[i])\n if (n >= ord('A')) and (n <= ord('Z')):\n if n >= m:\n plaintext += chr(n - m + ord('A'))\n else:\n plaintext += chr(ord('Z') + 1 - (m - n))\n else:\n if n >= m:\n plaintext += chr(n - m + ord('a'))\n else:\n plaintext += chr(ord('Z') + 1 - (m - n))\n return plaintext", "def decrypt_vigenere(ciphertext, keyword):\n list = []\n index = 0\n for char in ciphertext:\n new_char_val = ord(char) - (ord(keyword[index]) - ord('A'))\n if new_char_val < ord('A'):\n new_char_val += 26\n list.append(chr(new_char_val))\n index += 1\n index %= len(keyword)\n return ''.join(list)", "def caesar_cipher_decode(n: int, text: str, p: str) -> str:\n lookup_table = str.maketrans(p, p[-n:] + p[:-n])\n\n return text.translate(lookup_table)", "def decryptionSelfMadeFunction(text,index):\n s = text\n transformedChar = \"\"\n\n transformedChar = s[:index] + s[-1] + s[index:len(s)-1]\n\n print(\"Decrypted Transformed text : \" )\n return transformedChar" ]
[ "0.73526025", "0.7299322", "0.72758853", "0.71435684", "0.70446986", "0.69707316", "0.69267696", "0.692425", "0.68940175", "0.68750525", "0.67252564", "0.67230713", "0.6704006", "0.66632414", "0.6645716", "0.65805125", "0.64702874", "0.6456406", "0.6453587", "0.6432239", "0.64105946", "0.6383451", "0.6373935", "0.62932336", "0.6276572", "0.62733984", "0.6260698", "0.62511575", "0.62342834", "0.6230407" ]
0.8169176
0
show progress if self.__is_show_proegress True. \param[in] _cur_file_idx current processing file index \param[in] _file_count number of total files
def __show_progress(self, _cur_file_idx, _file_count): if (self.__is_show_proegress == False): return if(_file_count == 0): raise StandardError('no file found.') # show progress for each 5% (20 steps) digit = math.modf(math.log10(_file_count))[1] if(digit < 3): print "prog: [{0}%] {1}/{2}".format((100 * _cur_file_idx) /_file_count, _cur_file_idx, _file_count) else: digit = digit - 2 skipstep10 = math.pow(10, digit) if ((_cur_file_idx % skipstep10) == 0): print "prog: [{0}%] {1}/{2}".format((100 * _cur_file_idx) /_file_count, _cur_file_idx, _file_count)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def copy_progress(self, percentage_complete, filecount, filecomplete):\n ##TODO: display the current transfer rate\n ##TODO: display the current file being transferred and possibly the progress thereof.\n ##Perhaps use the statusbar method for this\n self.progress.setValue(int(percentage_complete))", "def _bar_progress(self, count, done=False):\n if self.blank:\n return\n self.current_count = count\n count = min(count, self.total)\n if self.total == count or not self.total:\n complete = 100\n else:\n complete = int(floor(100.0*count/self.total))\n if complete <= self.last_percent:\n return\n self.last_percent = complete\n if self.view_type is self.PERCENT:\n self.f.write('\b\b\b\b%3d%%' % complete)\n elif self.view_type is self.BAR:\n blockcount = int(complete//2)\n if blockcount <= self.blockcount:\n return\n for i in range(self.blockcount, blockcount):\n self.f.write(self.bar_char)\n self.blockcount = blockcount\n else:\n raise Exception('unknown value for view_type: %r' % self.view_type)\n if complete == 100:\n self.f.write('\\n')\n self.f.flush()", "def progress_bar(self, count, total, status):\n\n bar_len = 50\n filled_len = int(round(bar_len * count / float(total)))\n\n file_size_bytes = f\"{count:,}/{total:,} Bytes\"\n transfer_percent = round(100.0 * count / float(total), 2)\n file_bar = '=' * filled_len + '-' * (bar_len - filled_len)\n\n prefix = f\"[{self.LOGGER.host}:{self.LOGGER.port}]\"\n sys.stdout.write(f\"{prefix} -> |{file_bar}| {file_size_bytes} | {transfer_percent}% | {status}...\\r\")\n sys.stdout.flush()\n\n if count >= total: print()", "def progress(count, total):\r\n bar_len = 45\r\n filled_len = int(round(bar_len * count / float(total)))\r\n\r\n percents = round(100 * count / float(total), 1)\r\n p_bar = '=' * filled_len + '.' * (bar_len - filled_len)\r\n try:\r\n sys.stdout.write(' File {} of {} [{}] {}{}\\r'.format(count, total, p_bar, percents, '%'))\r\n except:\r\n pass\r\n sys.stdout.flush()", "def print_progress(self, index):\r\n if not self.verbose:\r\n return\r\n elapsed_time = time.time() - self._start_time\r\n\r\n # This is heuristic code to print only 'verbose' times a messages\r\n # The challenge is that we may not know the queue length\r\n if self._original_iterable:\r\n if _verbosity_filter(index, self.verbose):\r\n return\r\n self._print('Done %3i jobs | elapsed: %s',\r\n (index + 1,\r\n short_format_time(elapsed_time),\r\n ))\r\n else:\r\n # We are finished dispatching\r\n queue_length = self.n_dispatched\r\n # We always display the first loop\r\n if not index == 0:\r\n # Display depending on the number of remaining items\r\n # A message as soon as we finish dispatching, cursor is 0\r\n cursor = (queue_length - index + 1\r\n - self._pre_dispatch_amount)\r\n frequency = (queue_length // self.verbose) + 1\r\n is_last_item = (index + 1 == queue_length)\r\n if (is_last_item or cursor % frequency):\r\n return\r\n remaining_time = (elapsed_time / (index + 1) *\r\n (self.n_dispatched - index - 1.))\r\n self._print('Done %3i out of %3i | elapsed: %s remaining: %s',\r\n (index + 1,\r\n queue_length,\r\n short_format_time(elapsed_time),\r\n short_format_time(remaining_time),\r\n ))", "def update_progress(self, value=None):\n if self.main_app is not None:\n if value is not None:\n self.main_app.update_progress(value)\n else:\n if self.total_files != 0:\n self.main_app.update_progress((self.current_file / self.total_files) * 100)", "def fetch_progress(self):\n threads = len(opts.thread)\n files = len(self.files)\n t_width = len(str(threads))\n f_width = len(str(files))\n\n t_progress = f\"[{self.pos: >{t_width}}/{threads}]\"\n f_progress = f\"[{self.count: >{f_width}}/{files}]\"\n\n if self.count:\n progress = f\"{t_progress} {f_progress}\"\n else:\n progress = t_progress\n\n return progress", "def update_progressbar(self, count, value):\n self.status(\"Progress %s/%s\" % (value, count))", "def print_file_stats(self):\n\n # current epoch time, file number, filename, filesize, trans secs, status\n print(f\"TRANS_STATS_FILE: {time.time()} {self.batchvals['numfiles']} {self.filevals['filename']} {self.filevals['numbytes']} {self.filevals['end_time'] - self.filevals['start_time']} {self.filevals['status']}\")", "def _count_progress(self, count, done=False):\n if self.blank:\n return\n self.current_count = count\n now = time.time()\n if now - self.last_time < 1 and not done:\n return\n self.f.write('\b'*len(str(self.last_count))+str(count))\n self.f.flush()\n self.last_count = count\n self.last_time = now\n if done:\n self.f.write('\\n')\n self.f.flush()", "def print_progress(self, i, current_params):\n for split in range(1,11):\n if i == (round(self.iterations/10*split)-1):\n post = -self.full_neg_posterior(current_params)\n approx = self.create_normal_logq(current_params)\n diff = post - approx\n if not self.quiet_progress:\n print(str(split) + \"0% done : ELBO is \" + str(diff) + \", p(y,z) is \" + str(post) + \", q(z) is \" + str(approx))", "def update_progress(self, done):\r\n if done % 100 == 0:\r\n print >>sys.stderr, \" %d processed, run time %d secs\" % (done, (datetime.now() - self.started_at).seconds)", "def _download_progress(count, block_size, total_size):\n #pylint: disable=unused-argument\n if count == 0:\n return\n duration = time.time() - start_time\n progress_size = int(count * block_size)\n speed = int(progress_size / (1024 * duration))\n percent = min(int(count * block_size * 100 / total_size), 100)\n sys.stdout.write(\"\\r...%d%%, %d MB, %d KB/s, %d seconds passed\" %\n (percent, progress_size / (1024 * 1024), speed, duration))\n sys.stdout.flush()", "def print_progress(self, i, current_params):\n for split in range(1,11):\n if i == (round(self.iterations/10*split)-1):\n post = -self.neg_posterior(current_params)\n approx = self.create_normal_logq(current_params)\n diff = post - approx\n if not self.quiet_progress:\n print(str(split) + \"0% done : ELBO is \" + str(diff) + \", p(y,z) is \" + str(post) + \", q(z) is \" + str(approx))", "async def progress(current, total, event, start, type_of_ps, file_name=None):\n now = time.time()\n diff = now - start\n if round(diff % 10.00) == 0 or current == total:\n percentage = current * 100 / total\n speed = current / diff\n elapsed_time = round(diff) * 1000\n time_to_completion = round((total - current) / speed) * 1000\n estimated_total_time = elapsed_time + time_to_completion\n progress_str = \"{0}{1} {2}%\\n\".format(\n \"\".join(\"■\" for i in range(math.floor(percentage / 10))),\n \"\".join(\"□\" for i in range(10 - math.floor(percentage / 10))),\n round(percentage, 2),\n )\n tmp = progress_str + \"{0} of {1}\\nETA: {2}\".format(\n humanbytes(current), humanbytes(total), time_formatter(estimated_total_time)\n )\n if file_name:\n await event.edit(\n \"{}\\nFile Name: `{}`\\n{}\".format(type_of_ps, file_name, tmp)\n )\n else:\n await event.edit(\"{}\\n{}\".format(type_of_ps, tmp))", "def _printProgress(self, progress):\n if not self._quiet:\n sys.stdout.write('\\rWriting store to CSV: [{0:50s}] {1:.2f}% '.format('#' * int(progress * 50.0), progress * 100.0))\n sys.stdout.flush()", "def _download_progress(count, block_size, total_size):\n #pylint: disable=unused-argument\n if count == 0:\n return\n duration = time.time() - start_time\n progress_size = int(count * block_size)\n speed = int(progress_size / (1024 * duration))\n percent = min(int(count * block_size * 100 / total_size), 100)\n sys.stdout.write(\"\\r...%d%%, %.2f MB, %d KB/s, %d seconds passed\" %\n (percent, progress_size / (1024.0 * 1024), speed, duration))\n sys.stdout.flush()", "def print_progress(self, info_dict):\n if self.n_print != 0:\n t = info_dict['t']\n if t == 1 or t % self.n_print == 0:\n string = 'Iteration {0}'.format(str(t).rjust(len(str(self.n_iter))))\n string += ' [{0}%]'.format(str(int(t / self.n_iter * 100)).rjust(3))\n print(string)", "def download_add_progress(self, nfiles, nbytes):\n\n self.dl_cur_nbytes += nbytes\n self.dl_cur_nfiles += nfiles\n if self.dl_started:\n if self.dl_goal_nbytes != 0:\n self.dl_output()\n elif self.republish_started:\n if self.dl_goal_nbytes != 0:\n self.republish_output()", "def _print_progress(self):\n if self.current_training_size % 1000 == 0:\n print(self.current_training_size, end='')\n elif self.current_training_size % 100 == 0:\n print('.', end='')", "def _print_progress(counter):\n\tif(slogviz.config.interactive):\n\t\tprint('parse log file entry nr: {}'.format(counter),end='\\r')", "def progress_func(completed, total):\n if not self.log:\n return\n dots = (completed * dot_count) / total\n if dots > dot_count:\n dots = dot_count\n self.progress_lock.acquire()\n if self.dots_written < dot_count:\n dots_to_write = dots - self.dots_written\n self.dots_written = dots\n os.write(old_stdout, '.' * dots_to_write)\n self.progress_lock.release()", "def _dl_progress_bar(self):\n if not self.show_progress:\n return\n\n if self.file_size:\n ratio = float(self.bytes_read) / self.file_size\n else:\n ratio = 1\n percent = int(ratio * 100)\n\n bar_len = 60\n done = int(bar_len * ratio)\n bar = ('=' * done) + (' ' * (bar_len - done))\n\n progress = '{percent: >3}%: [{bar}]'.format(percent=percent, bar=bar)\n backspace = '\\b' * len(progress)\n print(backspace + '\\r', end='')\n print(progress, end='')", "def OnProgress(bytes_read, total_bytes, percent):\n sys.stdout.write(\"progress: %.2f%% \\r\" % (percent))\n sys.stdout.flush()", "def file_progress_sig_handler(self, bytes_read: int):\n # Increment the bytes read\n self.file_bytes_read += bytes_read\n\n # Update the progress bar\n self.fileAnalyzeProgressBar.setValue(self.file_bytes_read)\n\n logging.debug(\"Analyzing File Progress: \" + str(self.file_bytes_read))", "def progressbar(self, complete = 0.0):\n\n if self.is_subprocess:\n sys.stderr.write(\"%f\\n\" % complete)\n sys.stderr.flush()\n else:\n gdal.TermProgress_nocb(complete)", "def _printProgressBar(self, fractionComplete):\n import sys\n nInc = 50\n count = int(nInc * fractionComplete)\n proBar = \"|\"\n for i in range(nInc):\n if i < count:\n proBar += \"-\"\n else:\n proBar += \" \"\n proBar += \"|\"\n print((proBar, int(fractionComplete * 100), \"%\\r\",))\n sys.stdout.flush()\n\n return", "def print_progress(self):\n\n if not self.verbose:\n return\n\n elapsed_time = time.time() - self._start_time\n\n if self._is_completed():\n # Make sure that we get a last message telling us we are done\n self._print(\n f\"Done {self.n_completed_tasks:3d} out of \"\n f\"{self.n_completed_tasks:3d} | elapsed: \"\n f\"{short_format_time(elapsed_time)} finished\"\n )\n return\n\n # Original job iterator becomes None once it has been fully\n # consumed : at this point we know the total number of jobs and we are\n # able to display an estimation of the remaining time based on already\n # completed jobs. Otherwise, we simply display the number of completed\n # tasks.\n elif self._original_iterator is not None:\n if _verbosity_filter(self.n_dispatched_batches, self.verbose):\n return\n self._print(\n f\"Done {self.n_completed_tasks:3d} tasks | elapsed: \"\n f\"{short_format_time(elapsed_time)}\"\n )\n else:\n index = self.n_completed_tasks\n # We are finished dispatching\n total_tasks = self.n_dispatched_tasks\n # We always display the first loop\n if not index == 0:\n # Display depending on the number of remaining items\n # A message as soon as we finish dispatching, cursor is 0\n cursor = (total_tasks - index + 1 -\n self._pre_dispatch_amount)\n frequency = (total_tasks // self.verbose) + 1\n is_last_item = (index + 1 == total_tasks)\n if (is_last_item or cursor % frequency):\n return\n remaining_time = (elapsed_time / index) * \\\n (self.n_dispatched_tasks - index * 1.0)\n # only display status if remaining time is greater or equal to 0\n self._print(\n f\"Done {index:3d} out of {total_tasks:3d} | elapsed: \"\n f\"{short_format_time(elapsed_time)} remaining: \"\n f\"{short_format_time(remaining_time)}\"\n )", "def click(self, current_idx, max_idx, total_length=40):\n if self.start_time is None:\n self.start_time = time.time()\n else:\n self.time = time.time()-self.start_time\n self.iter_per_sec = 1/self.time\n perc = current_idx * total_length / max_idx\n # print progress bar\n print '\\r|'+'='*perc+'>'+' '*(total_length-1-perc)+'| %d/%d (%.2f iter/s)' % (current_idx+1,\n max_idx,\n self.iter_per_sec),\n self.start_time = time.time()", "def progress(count, total, status=''):\n bar_len = 60\n filled_len = int(round(bar_len * count / float(total)))\n\n percents = round(100.0 * count / float(total), 1)\n bar = '=' * filled_len + '-' * (bar_len - filled_len)\n \n if count >= total: \n sys.stdout.write('[%s] %s%s ...%s%s\\r' % (bar, percents, '%', status, '\\n'))\n sys.stdout.flush()\n else:\n sys.stdout.write('[%s] %s%s ...%s\\r' % (bar, percents, '%', status))\n sys.stdout.flush()" ]
[ "0.59621894", "0.58859885", "0.58060676", "0.57941943", "0.5504679", "0.54205585", "0.5397171", "0.5359194", "0.53416574", "0.53402764", "0.53393024", "0.53196186", "0.5318342", "0.5314393", "0.5307423", "0.530685", "0.5302948", "0.52970344", "0.52880377", "0.52654934", "0.5262406", "0.5241219", "0.5228907", "0.5210244", "0.51849896", "0.5177761", "0.51770574", "0.5133892", "0.51308036", "0.5104675" ]
0.84238493
0
read blacklist file and keep in the set. \param[in] _blacklist_fname blacklist filename
def read_blacklist(self, _blacklist_fname): try: blacklist_f = codecs.open(_blacklist_fname, mode='r', encoding='utf-8') line_idx = 0 for fline in blacklist_f: line_idx = line_idx + 1 line = fline.strip() if ((len(line) > 0) and (line[0] != '#')): # non null and not started # line ... add to the set if (line in self.__black_list_set): print u'duplication found [' + line + u'] at ' + str(line_idx) + \ u' ignored' else: self.__black_list_set.add(line) print u'read blacklist_file [' + _blacklist_fname + \ u'], number of entries: ' + str(len(self.__black_list_set)) except IOError as e: print "I/O error({0}): {1}".format(e.errno, e.strerror) print "Can not open a blacklist file {0}".format(_blacklist_fname) print "Please create blacklist file (an empty file is also fine.)" sys.exit(1) except: print "Unexpected error:", sys.exc_info()[0] raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def blacklist_file(self, fkey):\n self.blacklist.update([fkey])", "def open_blacklist(filepath):\n with open(filepath, 'r') as f:\n blacklist = [tuple(line.strip().split('\\t')) for line in f.readlines()]\n return blacklist", "def ReadBlackListFile(BlackListFile):\n blacklist = []\n if os.path.isfile(BlackListFile):\n with open(BlackListFile, 'r') as filecontent:\n for line in filecontent:\n #(chrom1, start1, chrom2, start2) = line.rstrip().split(\"\\t\")\n blacklist.append(line)\n return(blacklist)", "def getBlackList(filename):\n #filename = \"filelist/blacklist_%s.txt\"%dataset.lstrip('/').replace('/','__')\n blacklist = [ ]\n if os.path.exists(filename):\n with open(filename,'r') as file:\n for line in file:\n line = line.rstrip('\\n')\n if line and '#' not in line:\n blacklist.append(line)\n return blacklist", "def validate_file_blacklist(blacklist):\n valid_values = [\n # 'checkpoint',\n \"description\",\n \"heartbeat\",\n \"predictions_holdout\",\n \"predictions_in_fold\",\n \"predictions_oof\",\n \"predictions_test\",\n \"script_backup\",\n \"tested_keys\",\n \"current_heartbeat\",\n ]\n if blacklist == \"ALL\":\n G.warn('WARNING: Received `blacklist`=\"ALL\". Nothing will be saved')\n return blacklist\n\n if not blacklist:\n return []\n elif not isinstance(blacklist, list):\n raise TypeError(\"Expected blacklist to be a list, not: {}\".format(blacklist))\n elif not all([isinstance(_, str) for _ in blacklist]):\n invalid_files = [(type(_).__name__, _) for _ in blacklist if not isinstance(_, str)]\n raise TypeError(\"Expected blacklist contents to be strings, not: {}\".format(invalid_files))\n\n for a_file in blacklist:\n if a_file not in valid_values:\n raise ValueError(f\"Invalid blacklist value: {a_file}.\\nExpected one of: {valid_values}\")\n if a_file in [\"description\", \"tested_keys\"]:\n G.warn(f\"Including {a_file!r} in blacklist will severely impede library functionality\")\n\n # Blacklist experiment-specific heartbeat if general (current) heartbeat is blacklisted\n if (\"current_heartbeat\" in blacklist) and (\"heartbeat\" not in blacklist):\n blacklist.append(\"heartbeat\")\n\n return blacklist", "def _parse_blacklist(path):\n if path is None:\n return []\n with open(path, 'rt') as f:\n return [line.strip() for line in f]", "def load_blacklist(experiment):\n blacklist = np.loadtxt('../Slip_Property_Data/%s_blacklist.txt'%experiment)\n return blacklist", "def load_blacklist(experiment):\n blacklist = np.loadtxt('../Slip_Property_Data/%s_blacklist.txt'%experiment)\n return blacklist", "def blacklisted_file_patterns():\n with open(\"patterns.txt\", 'r') as f:\n lines = [line.strip() for line in f if line.strip()]\n return set(lines)", "def read_whitelist(whitelist_file):\n\n # create an empty dictionary to store the white lists\n whitelistdict = {}\n with open(whitelist_file, 'r') as fp:\n for line in fp:\n whitelistdict[line.strip()] = True\n\n # return the list of whitelist\n return whitelistdict.keys()", "def load_blocked_groups(self):\n print(\" ->[*] Loading group blacklist...\")\n blacklist = set()\n if os.access(\"blocked_groups\", os.F_OK):\n with codecs.open(\"blocked_groups\", \"r\", encoding=\"utf-8\") as groups:\n blocked_groups = groups.readlines()\n for group in blocked_groups:\n blacklist.add(group)\n return blacklist", "def load_blacklists(self, blacklist_urls: List[str]) -> Set[str]:\n items: Set[str] = set()\n sets = []\n for url in blacklist_urls:\n fname = f'/tmp/{hashlib.md5(url.encode()).hexdigest()}-blacklist'\n # Download if necessary\n if not os.path.exists(fname):\n log.debug(f'Downloading {url} -> {fname}')\n urlretrieve(url, fname)\n\n fset = set(line.strip() for line in open(fname))\n log.debug(f'Got {len(fset)} records from \\t {url}')\n sets.append(fset)\n\n log.debug(f'Total: {sum(len(s) for s in sets)} records')\n items = items.union(*sets)\n log.debug(f'Aggregated into {len(items)} records')\n return items", "def remove_blacklisted(blacklist, ssc, output_dir):\n print('[INFO] Writing blacklisted corpus to {}...'.format(output_dir))\n # assuming there is only 1 SSC, so take index 0\n ssc_filepaths = list(get_filepaths(ssc))[0]\n # for faster lookup\n blacklist = set(blacklist)\n for filepath in ssc_filepaths:\n with open(filepath, 'r') as f:\n # remove blacklisted entities\n lines = f.readlines()\n for i in range(1, len(lines) - 1):\n previous_tag = 'O' if lines[i-1] == '\\n' else lines[i-1].strip().split('\\t')[1]\n next_tag = 'O' if lines[i+1] == '\\n' else lines[i+1].strip().split('\\t')[1]\n single_token_entity = (previous_tag != 'I-' and next_tag != 'I-')\n blacklisted = tuple(lines[i].strip().split('\\t')) in blacklist\n if single_token_entity and blacklisted:\n lines[i] = '{}\\tO\\n'.format(lines[i].split('\\t')[0])\n # write blacklisted copy to disk\n corpus_name = os.path.basename(ssc) + '_blacklisted'\n output_directory = os.path.join(output_dir, corpus_name)\n make_dir(output_directory)\n output_filepath = os.path.join(output_directory, os.path.basename(filepath))\n with open(output_filepath, 'w') as f:\n for line in lines:\n f.write(line)", "def saveFileListLocal(dataset,filelist,blacklist=[ ],tag=\"\"):\n if '/pnfs/' in dataset:\n tag += \"_pnfs\"\n dataset = '__'.join(dataset.split('/')[-3:])\n filename = \"filelist/filelist_%s%s.txt\"%(dataset.replace('/','__'),tag)\n with open(filename,'w+') as file:\n for line in filelist:\n if line not in blacklist:\n file.write(line+'\\n')\n return filename", "def whitelist_file(self, fkey):\n self.whitelist.update([fkey])", "def save_blacklist(blacklist, output_dir):\n output_filepath = os.path.join(output_dir, 'blacklist.txt')\n print('[INFO] Writing blacklist to {}...'.format(output_filepath))\n with open(output_filepath, 'w') as f:\n for ent in blacklist:\n f.write('{}\\t{}\\n'.format(ent[0], ent[1]))", "def handler(state, _):\n if state[0] == 'u':\n fname = player.playlist[player.playlist_pos]['filename']\n fkey = get_file_key(fname)\n col.blacklist_file(fkey)\n player.playlist_remove()\n os.remove(fname)\n print('Blacklisted: {}'.format(fname))", "def getFileListLocal(dataset,blacklist=[ ],tag=\"\"):\n if '/pnfs/' in dataset:\n tag += \"_pnfs\"\n dataset = '__'.join(dataset.split('/')[-3:])\n filename = \"filelist/filelist_%s%s.txt\"%(dataset.lstrip('/').replace('/','__'),tag)\n filelist = [ ]\n if os.path.exists(filename):\n with open(filename,'r') as file:\n for line in file:\n line = line.rstrip('\\n')\n if line and '#' not in line and line not in blacklist:\n filelist.append(line.rstrip('\\n'))\n return filelist", "def importBrainstormBrickFile(filename):\n #init the list with all bricks in the file\n allBricks = []\n \n #open the brainstorming words file and read the lines\n with open(filename, 'r') as fp:\n lines = fp.readlines()\n \n #cycle strip and clean the lines and add them to the set\n for curLine in lines:\n if curLine.startswith('Enter one user'):\n continue\n if curLine.strip():\n allBricks.append( curLine.strip().lower() )\n \n return allBricks", "def is_blacklisted(fname):\n return is_dot(fname) or is_excluded_filetype(fname)", "def blacklist(self) -> List[str]:\n return self.raw_config.get(\"blacklist\", [])", "def apply_tempest_blacklist(self, black_list):\n LOGGER.debug(\"Applying tempest blacklist...\")\n if os.path.exists(self.raw_list):\n os.remove(self.raw_list)\n os.rename(self.list, self.raw_list)\n cases_file = self.read_file(self.raw_list)\n with open(self.list, 'w', encoding='utf-8') as result_file:\n black_tests = []\n try:\n deploy_scenario = env.get('DEPLOY_SCENARIO')\n if bool(deploy_scenario):\n # if DEPLOY_SCENARIO is set we read the file\n with open(black_list, encoding='utf-8') as black_list_file:\n black_list_yaml = yaml.safe_load(black_list_file)\n black_list_file.close()\n for item in black_list_yaml:\n scenarios = item['scenarios']\n in_it = rally.RallyBase.in_iterable_re\n if in_it(deploy_scenario, scenarios):\n tests = item['tests']\n black_tests.extend(tests)\n except Exception: # pylint: disable=broad-except\n black_tests = []\n LOGGER.debug(\"Tempest blacklist file does not exist.\")\n\n for cases_line in cases_file:\n for black_tests_line in black_tests:\n if re.search(black_tests_line, cases_line):\n break\n else:\n result_file.write(str(cases_line) + '\\n')", "def is_blacklisted(self, fkey):\n return fkey in self.blacklist", "def blacklist_handler(col, player):\n def handler(state, _):\n \"\"\"\n Retains the current file in the player in the collection's blacklist\n and removes it from the airlock. The player is then advanced to the\n next file.\n \"\"\"\n if state[0] == 'u':\n fname = player.playlist[player.playlist_pos]['filename']\n fkey = get_file_key(fname)\n col.blacklist_file(fkey)\n player.playlist_remove()\n os.remove(fname)\n print('Blacklisted: {}'.format(fname))\n return handler", "def load_bnf_file(filepath, repository = None):\r\n linelist = []\r\n with open(filepath,'r') as mlfile:\r\n for line in mlfile:\r\n linelist.append(line)\r\n return strlist_to_production_set(linelist, repository)", "def namespace_resource_blacklist(self, namespace_resource_blacklist):\n\n self._namespace_resource_blacklist = namespace_resource_blacklist", "def read_from_file(fname,verb_list,w_list):\n\t#print str(fname)\n\tstemmer=SnowballStemmer('english')\n\t\n\tverb_list=map(str,map(stemmer.stem,verb_list))\n\ttmpword_list=remove_stopwords(map(str,map(stemmer.stem,w_list)))\n\tword_list=[]\n\tfor i in tmpword_list:\n\t\tword_list.extend(i.split('_'))\n\tprint verb_list,word_list\n\tfilter_svo_list=[]\n\tprint \"svo loading...\"\n\twith open(fname,'r') as f:\n\t\tsvo_lst=pickle.load(f)\n\tprint \"svo loading completed\"\n\t#print verb_list,word_list\n\t#with open(fname) as fp:\n\tfor verb in verb_list:\n\t\tfor tmp in svo_lst:\n\t\t\tif verb in tmp[1]:\n\t\t\t\t#chk the statement\n\t\t\t\tif lst1_in_lst2(tmp[0],word_list) and lst1_in_lst2(tmp[2],word_list):\n\t\t\t\t\tfilter_svo_list.append(tmp)\n\tprint \"searching completed\"\n\treturn filter_svo_list", "def _stopwords():\n global _stopword_set\n if _stopword_set:\n return _stopword_set\n f_name = \"stopword.list\"\n if os.path.isfile(f_name):\n res = set()\n with open(f_name) as f:\n for line in f:\n res.add(line.strip())\n _stopword_set = res\n return res\n else:\n error(\"stop words - not a file: %s\" % f_name)", "def blacklist_add():\n db = unitdata.kv()\n blacklist = db.get(BLACKLIST_KEY, [])\n for device in get_devices():\n if not os.path.exists(device):\n raise Error('{}: No such file or directory.'.format(device))\n if device not in blacklist:\n blacklist.append(device)\n db.set(BLACKLIST_KEY, blacklist)\n db.flush()", "def fromfile(cls, f):\n raise NotImplementedError(\"ScalableRedisLocalBloomFilter not support fromfile\")" ]
[ "0.74668455", "0.7435937", "0.7226039", "0.71409386", "0.69012386", "0.68086356", "0.65102315", "0.65102315", "0.6492251", "0.6445948", "0.6428848", "0.6298281", "0.62561387", "0.6169755", "0.6096331", "0.60444635", "0.59310824", "0.58466583", "0.58374816", "0.58168995", "0.57231987", "0.5686371", "0.5663551", "0.5646089", "0.5634171", "0.56154835", "0.5590461", "0.5568086", "0.55527157", "0.55283666" ]
0.85046655
0
check _fname has writer description
def has_writer_description(self, _fname): # print u'info: checking ['+ _fname + u']' # # It seems some filename is not considered unicode (even I # made a unicode string with u'', so make sure the encoding is # utf-8. For example, Lemprière's_Bibliotheca_Classica can not # be passed in urlopen. 2012-7-29(Sun) Hitoshi # data = urllib2.urlopen(_fname.encode('utf-8')).read() soup = BeautifulSoup(data) # check persondata table persondata_tab = soup.find_all('table', { "id" : "persondata" }) if (persondata_tab != None): for pd in persondata_tab: # pd.text mat = self.__author_regex.search(pd.text) if(mat != None): #print u'debug: found in persondata: ' + mat.string[mat.start():mat.end()] return True # check category link catlinks = soup.find_all('div', { "id" : "mw-normal-catlinks" }) if ((catlinks != None) and (len(catlinks) > 0)): for cat in catlinks[0].find_all('a'): # print cat.text mat = self.__author_regex.search(cat.text) if(mat != None): # print u'debug: found in category: ' + mat.string[mat.start():mat.end()] return True return False
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def has_filename(self):\n if self.filename == \"untitled\":\n return False\n else:\n return True", "def test_is_check_filename(self):\n self.assertTrue(check_filename('sample.csv'))", "def check_empty_desc_file(out):\n return MISSING_RESOURCE in out.lower()", "def test_is_check_filename_False(self):\n self.assertFalse(check_filename('sample.txt'))", "def checkExist(self,fname,status):\n\n if (self.status == \"r\"):\n # Checks to see if it exists for reading\n # Which means it must be present\n\n if (not (os.path.exists(self.fname))):\n print(f\"Couldn't open input file: {self.fname}\")\n return False\n else:\n # Check to see if exists for reading\n # (i.e. must not exist)\n if (os.path.exists(self.fname)):\n print(f\"File {self.fname} already exists.\")\n return False\n\n return True", "def checkExist(self,fname,status):\n\n if (self.status == \"r\"):\n # Checks to see if it exists for reading\n # Which means it must be present\n\n if (not (os.path.exists(self.fname))):\n print(f\"Couldn't open input file: {self.fname}\")\n return False\n else:\n # Check to see if exists for reading\n # (i.e. must not exist)\n if (os.path.exists(self.fname)):\n print(f\"File {self.fname} already exists.\")\n return False\n\n return True", "def is_writable_file(obj):\n try:\n obj.write(\"\")\n except(AttributeError, OSError, IOError):\n return False\n else:\n return True", "def isValidFeatureWriter(klass):\n if not isclass(klass):\n logger.error(\"%r is not a class\", klass)\n return False\n if not hasattr(klass, \"tableTag\"):\n logger.error(\"%r does not have required 'tableTag' attribute\", klass)\n return False\n if not hasattr(klass, \"write\"):\n logger.error(\"%r does not have a required 'write' method\", klass)\n return False\n if getfullargspec(klass.write).args != getfullargspec(BaseFeatureWriter.write).args:\n logger.error(\"%r 'write' method has incorrect signature\", klass)\n return False\n return True", "def is_new_file(self):\n return self.filename is None", "def testInitialize(self):\n file_writer = writers.FileWriter()\n self.assertIsNotNone(file_writer)", "def has_fileout(self):\n return self.fileout is not None", "def checkFilename(self):\r\n \r\n #all this should be in the view\r\n\r\n print(\"working directory \", self.path) \r\n print(\"If you'd like to use another directory/folder, please include the full path with the filename.\")\r\n #should i let users change working directory or just put it in the file path\r\n print(\"checking filename \", self.filename)\r\n\r\n if not os.path.isfile(self.filename):\r\n print(\"this is not an existing file\")\r\n createYN = (input(\"create it? y/n \")).upper()\r\n if createYN=='Y':\r\n self.createFile()\r\n self.getHeaderDict()\r\n\r\n else: # create file = NO\r\n headerDict = {} #create an empty dictionary\r\n self.loadDictRow(keystring = '') #this will create keys but not values\r\n\r\n else:\r\n \"\"\"\r\n Check to see if the first row is headers, and second row is Test Router\r\n \"\"\"\r\n print(\"this is an existing file\")\r\n self.getHeaderDict()", "def test_valid_file_name(self):\n Base.save_to_file([self.r0, self.s1])\n self.assertTrue(path.exists('Base.json'))", "def valid_file(fname):\r\n try:\r\n if os.stat(fname).st_size > 0: # if filename contains data\r\n return \"0\"\r\n else:\r\n return \"Selected file is empty....please reenter\"\r\n except OSError:\r\n return \"Can not find the file....please reenter\"", "def _verify_descriptors(self, msg):\n self.assertTrue(is_writable_file(msg.chlderr))\n self.assertTrue(is_writable_file(msg.chldout))\n self.assertTrue(is_writable_file(msg.chldnul))", "def has_file(self, doc):\n return len(doc.package.files) != 0", "def check_file_exist(self):\n return False", "def file_exist() -> bool:\n pass", "def has_doc() -> None:", "def test_incomplete_outfile(self):\n outfile = os.path.join(data_dir, 'incomplete', 'incomplete.log')\n self.assertRaises(grinder.NoTestNames, grinder.Report, 60, outfile)", "def is_file_exist(self):\n return os.path.isfile(os.path.join(self.output_path, 'amr_corpus_ext.pickle'))", "def _is_valid_unique_fname(self, fname):\n return (fname.startswith(self._lockfilename)\n and len(fname) > len(self._lockfilename))", "def object_exists(self, fname):\n return False", "def is_data_by_filename(fname):\n return \"Run2017\" in fname", "def test_fname():\n\n assert fname('data', 'json') == 'data.json'\n assert fname('data.json', 'json') == 'data.json'\n assert fname('pic', 'png') == 'pic.png'\n assert fname('pic.png', 'png') == 'pic.png'\n assert fname('report.pdf', 'pdf') == 'report.pdf'\n assert fname('report.png', 'pdf') == 'report.png'", "def __is_file_eligible_to_scan(cls, path_to_test):\n return path_to_test.endswith(\".md\")", "def _validateFilename(self, filePath):\n # assert True\n raise NotImplementedError", "def is_declaring_file(self, address, file_path):", "def object_exists(self, fname):\n return True", "def is_file_exists(self):\n pass" ]
[ "0.6337745", "0.6027433", "0.60234314", "0.5945401", "0.5772521", "0.5772521", "0.5694627", "0.5692084", "0.56618637", "0.5653594", "0.56347966", "0.5584619", "0.5582658", "0.55805594", "0.5580391", "0.556575", "0.5557586", "0.5545394", "0.5540392", "0.55333614", "0.5510318", "0.55102223", "0.5502573", "0.5495531", "0.5493705", "0.5493363", "0.5492546", "0.5491416", "0.54835355", "0.5480879" ]
0.73286253
0
get vector from scanning directory. \param[in] _input_dir input files' directory \param[in] _output_file output file name
def get_vector(self, _input_dir, _output_file): if (not os.path.exists(_input_dir)): raise StandardError, ('No such input directory [' + _input_dir + ']') if (os.path.exists(_output_file)): raise StandardError, ('Output file exists [' + _output_file + ']') try: starttime = time.time() outfile = codecs.open(_output_file, mode='w', encoding='utf-8') outfile.write(u'#FileVectorExtractor 0\n'); outfile.write(u'# generated by FileVectorExtractor. (C) Hitoshi 2012-2013\n'); outfile.write(u'# input directory [' + _input_dir + ']\n'); os.chdir(_input_dir) flist = os.listdir(".") file_count = len(flist) cur_file_idx = 0 flist.sort() for fname in flist: if (not os.path.isfile(fname)): continue cur_file_idx = cur_file_idx + 1 self.__show_progress(cur_file_idx, file_count) ufname = unicode(fname, encoding='utf-8', errors='strict') # if (ufname != u"Lemprière's_Bibliotheca_Classica"): # DEBUG # continue if (ufname in self.__black_list_set): print ufname, 'is in the blacklist. continue.' continue # in the blacklist url = u'file:///' + os.path.join(_input_dir, ufname) if (self.has_writer_description(url)): self.__author_list.append(ufname) # print u'found author [' + ufname + ']' else: print u'info: seems not an author [' + ufname + ']' # output for auth in self.__author_list: outfile.write(auth + u'\n'); elapsedtime = time.time() - starttime print u'info: elapsed time = {0}'.format(elapsedtime) except IOError as e: print "I/O error({0}): {1}".format(e.errno, e.strerror) print "Can not open a output file {0}".format(_output_file) sys.exit(1) except: print "Unexpected error:", sys.exc_info()[0] raise
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def merge_vec_files(vec_directory, output_vec_file):\n\n\t# Check that the .vec directory does not end in '/' and if it does, remove it.\n\tif vec_directory.endswith('/'):\n\t\tvec_directory = vec_directory[:-1]\n\t# Get .vec files\n\tfiles = glob.glob('{0}/*.vec'.format(vec_directory))\n\n\t# Check to make sure there are .vec files in the directory\n\tif len(files) <= 0:\n\t\tprint('Vec files to be mereged could not be found from directory: {0}'.format(vec_directory))\n\t\tsys.exit(1)\n\t# Check to make sure there are more than one .vec files\n\tif len(files) == 1:\n\t\tprint('Only 1 vec file was found in directory: {0}. Cannot merge a single file.'.format(vec_directory))\n\t\tsys.exit(1)\n\n\n\t# Get the value for the first image size\n\tprev_image_size = 0\n\ttry:\n\t\twith open(files[0], 'rb') as vecfile:\n\t\t\tcontent = b''.join((line) for line in vecfile.readlines())\n\t\t\tval = struct.unpack('<iihh', content[:12])\n\t\t\tprev_image_size = val[1]\n\texcept IOError as e:\n\t\tprint('An IO error occured while processing the file: {0}'.format(f))\n\t\texception_response(e)\n\n\n\t# Get the total number of images\n\ttotal_num_images = 0\n\tfor f in files:\n\t\ttry:\n\t\t\twith open(f, 'rb') as vecfile:\n\t\t\t\tcontent = b''.join((line) for line in vecfile.readlines())\n\t\t\t\tval = struct.unpack('<iihh', content[:12])\n\t\t\t\tnum_images = val[0]\n\t\t\t\timage_size = val[1]\n\t\t\t\tif image_size != prev_image_size:\n\t\t\t\t\terr_msg = \"\"\"The image sizes in the .vec files differ. These values must be the same. \\n The image size of file {0}: {1}\\n\n\t\t\t\t\t\tThe image size of previous files: {0}\"\"\".format(f, image_size, prev_image_size)\n\t\t\t\t\tsys.exit(err_msg)\n\n\t\t\t\ttotal_num_images += num_images\n\t\texcept IOError as e:\n\t\t\tprint('An IO error occured while processing the file: {0}'.format(f))\n\t\t\texception_response(e)\n\n\n\t# Iterate through the .vec files, writing their data (not the header) to the output file\n\t# '<iihh' means 'little endian, int, int, short, short'\n\theader = struct.pack('<iihh', total_num_images, image_size, 0, 0)\n\ttry:\n\t\twith open(output_vec_file, 'wb') as outputfile:\n\t\t\toutputfile.write(header)\n\n\t\t\tfor f in files:\n\t\t\t\twith open(f, 'rb') as vecfile:\n\t\t\t\t\tcontent = b''.join((line) for line in vecfile.readlines())\n\t\t\t\t\toutputfile.write(bytearray(content[12:]))\n\texcept Exception as e:\n\t\texception_response(e)", "def treat(input, output):\n files = find(input)\n acc = []\n for file in files:\n fileInfo = extract(file)\n out = makeOutputPath(output, fileInfo[\"path\"], fileInfo[\"filename\"])\n if not out == None:\n fileInfo[\"outPath\"] = out\n acc += [fileInfo]\n return acc", "def main(input_dir, output_dir):\n\n process(input_dir, output_dir)", "def create_sample_vectors(cleaned_data_directory, out_vectors_path):\n vectors = []\n\n for filename in os.listdir(cleaned_data_directory):\n if not filename.endswith(\".txt\"):\n continue\n\n path = os.path.join(cleaned_data_directory, filename)\n f = open(path, mode='r', encoding='utf8')\n\n print(\"Processing\", path)\n\n lang = filename[:2]\n lang_number = language_codes.index(lang)\n\n print(f\"\\tLanguage: {lang} ({lang_number})\")\n print(\"\\tReading...\", end=' ')\n\n file_content = f.read()\n content_length = len(file_content)\n\n print(\"done.\")\n print(\"\\tExtracting vectors...\", end=' ')\n\n sample_start_index = 0\n count = 0\n\n while sample_start_index + text_sample_size < content_length:\n sample = get_sample(file_content, sample_start_index, text_sample_size)\n input_vector = build_input_vector(sample)\n vector = input_vector + [lang_number]\n vectors.append(vector)\n sample_start_index += text_sample_size\n count += 1\n\n print(\"done.\")\n print(f\"\\tExtracted {count} vectors.\")\n\n del file_content\n\n print(f\"Total {len(vectors)} vectors.\")\n\n np_vectors = np.array(vectors, dtype=np.uint16)\n np.random.shuffle(np_vectors)\n\n print(f\"Converted to NumPy array, shape: {np_vectors.shape}.\")\n\n np.savez_compressed(out_vectors_path, data=np_vectors)\n\n print(f\"Saved to {out_vectors_path}.\")", "def get_parsed_files(output_path, directory):\n parsed_files = set(os.listdir(os.path.join(output_path, directory)))\n \n return parsed_files", "def extract(src_dir,feat_file,ivectors_dir,num_gselect):\n os.system(\"./extract_ivectors.sh --num-gselect \"+str(num_gselect)+ \" \" + src_dir + \" \" + feat_file + \" \" + ivectors_dir)\n keys=[]\n ivectors=np.empty((0,0))\n for key,mat in kaldi_io.read_vec_flt_scp(ivectors_dir+'/ivector.scp'):\n if ivectors.shape[1] != mat.shape[0]:\n ivectors=ivectors.reshape((0,mat.shape[0]))\n ivectors=np.vstack((ivectors,mat))\n keys.append(key)\n\n ivectors=np.asarray(ivectors)\n keys=np.asarray(keys)\n return ivectors,keys", "def get_files(input_dir):\n file_rep = { \"tars\" : [] }\n \n files = os.listdir(input_dir)\n \n the_file, the_date = find_bootstrap(files)\n \n #add index file in file_rep\n file_rep['index'] = the_file\n file_rep['date'] = the_date\n \n pattern = \"ncep_forecast_%s_(?P<name>\\S+).tar\" % (the_date)\n \n the_re = re.compile(pattern)\n\n for the_file in files:\n matched = the_re.match(the_file)\n if matched:\n print(\"matched %s\" % (matched.group(\"name\")))\n file_rep['tars'].append(the_file)\n \n return file_rep", "def get_input_files(dir_path):\n return [os.path.join(dir_path,f) for f in os.listdir(dir_path)\n if os.path.isfile(os.path.join(dir_path,f))]", "def process_imgdir(self,imgdir):\n #Write images into resultdir\n resultdir = os.path.join(imgdir, 'results')\n #Read images from input dir\n inputdir = os.path.join(imgdir, 'inputs')\n shutil.rmtree(resultdir)\n os.mkdir(resultdir)\n #Read files from input images\n for fullname in os.listdir(inputdir):\n filepath = os.path.join(inputdir, fullname)\n if os.path.isfile(filepath):\n basename = os.path.basename(filepath)\n image = cv2.imread(filepath, cv2.IMREAD_COLOR)\n if len(image.shape) == 3 and image.shape[2] == 3:\n print('Processing %s ...' % basename)\n else:\n sys.stderr.write('Skipping %s, not RGB' % basename)\n continue\n #Extract haze from the scene and then save the image\n dehazed = self.get_scene_radiance(image)\n cv2.imwrite(os.path.join(resultdir, basename), dehazed)\n return os.path.join(resultdir, basename)", "def get_output(self, output_dir=\"tools_output\"):\n\n output_dir = self.project_dir / output_dir / self.name\n # create output directory if didn't exist\n if not output_dir.exists():\n os.makedirs(output_dir)\n logger.info(f\"Created {output_dir}\")\n\n for outfile in self.output:\n outfile = self.project_dir / outfile\n if outfile.exists():\n src = os.fspath(outfile)\n dst = os.fspath(output_dir / outfile.name)\n shutil.move(src, dst)\n logger.info(f\"Moved {outfile.name} to {output_dir}\")\n else:\n msg = f\"File not found: {outfile} - did you execute run() before?\"\n logger.error(msg)\n raise FileNotFoundError(msg)", "def out_featuredir(self):\n return self.outputfrominput(inputformat='tokfoliadir', stripextension='.tok.foliadir', addextension='.featuredir')", "def main(file):\n\n # Get the current working directory.\n here = os.getcwd()\n #Need the file_name to set globe, so that other functions can access to it.\n global file_name\n # Spite the Input into file_path and file_name.\n file_path = spilt_path(file)[0]\n file_name = spilt_path(file)[1]\n\n # Try to get into the file_path, if exist\n try:\n os.chdir(file_path)\n except IOError, e:\n print e\n\n # Now convert it\n convertFile(file_name)\n # going back to orgin folder\n os.chdir(here)\n return os.path.join(output_dir, file_name)", "def generate_vec_file(args, path_list, file_name):\n if args.vec != None:\n #If the user wants to create a vec file, so calls opencv to create it.\n command = \" \".join([\"opencv_createsamples -vec\", args.vec,\n \"-info\", os.path.join(path_list, file_name),\n \"-num\", str(args.num), \"-h\", str(args.height),\n \"-w\", str(args.width)])\n execute_commands([command])\n if args.out_img_folder == None:\n \"\"\"If the user don't want to save created samples,\n so deletes the entire folder\"\"\"\n rmtree(os.path.abspath(path_list))", "def make_files(dir_in, dir_out):\n try:\n listaFisiere = os.listdir(f\"{dir_in}\")\n except Exception as eroare:\n print(\"Path to input file is invalid, exiting...\")\n quit()\n if not os.path.exists(f\"{dir_out}\"):\n os.mkdir(f\"{dir_out}\")\n paths_out = []\n for numeFisier in listaFisiere:\n numeFisierOutput=\"output_\"+numeFisier\n f=open(f\"{dir_out}/\"+numeFisierOutput,\"w\")\n paths_out.append(f\"{dir_out}/\"+numeFisierOutput)\n f.close()\n for i in range(len(listaFisiere)):\n listaFisiere[i] = dir_in + \"/\" + listaFisiere[i]\n return listaFisiere, paths_out", "def fetch_direction(file_list, column_name):\r\n start_time_aggrigating_vector_components = time.time()\r\n res_dir_x_list, res_dir_y_list, mag, direction = draw_dirs2(file_list, column_name)\r\n all_in_x = np.zeros_like(mag[0])\r\n all_in_y = np.zeros_like(mag[0])\r\n all_mag = np.zeros_like(mag[0])\r\n for i in range(len(res_dir_x_list)):\r\n all_in_x = np.add(all_in_x, res_dir_x_list[i])\r\n all_in_y = np.add(all_in_y, res_dir_y_list[i])\r\n res_x = all_in_x.ravel()\r\n res_y = all_in_y.ravel()\r\n data = pd.read_csv(file_list[0])\r\n data['res_x'] = res_x\r\n data['res_y'] = res_y\r\n print(\"For aggregating vector components %s seconds\" % (time.time() - start_time_aggrigating_vector_components))\r\n return data", "def get_files(filedir):\n inputfiles = sorted(glob.glob(os.path.normcase(filedir+\"/\")+\"*.json\"))\n return inputfiles", "def matrix_and_vector_from_graphs(ndmg_participant_dir, atlas, return_files=False):\n # TODO: Figure out if there's a more computationally efficient way than building from a loop, that still absolutely guarentees that the target vector and corresponding matrix row are from the same subject\n # TODO: change the way I define out_matrix, currently I instantiate an empty matrix first then build on top of it\n # TODO: make out_matrix be dynamic to atlas\n out_matrix = np.empty((1, 70 * 70)) # TODO: make this dynamic to the atlas\n out_target_vector = []\n graphs = get_graph_files(ndmg_participant_dir, atlas)\n rgx = re.compile(\n r\"(sub-)([a-zA-Z0-9]*)\"\n ) # to be used for grabbing the subject name\n for (\n filename\n ) in (\n graphs\n ): # Builds the matrix and target vector element-by-element on a per-subject basis.\n if filename.endswith(\"ssv\"): # Account for ssv files, eric's code\n mat = numpy_from_output_graph(filename, sep=\" \")\n else:\n mat = numpy_from_output_graph(filename) # For csv files\n if mat.shape == (\n 70,\n 70,\n ): # TODO: make this dynamic to the atlas, currently it's only for desikan\n sub_and_session = \"\".join(rgx.search(filename).groups())\n out_target_vector.append(sub_and_session) # update out_target_vector\n out_matrix = np.append(\n out_matrix, mat.flatten()[np.newaxis, :], axis=0\n ) # TODO: implement this as a list, then convert to numpy after. I think that's more efficient.\n if return_files:\n np.savetxt(\n \"{}_X.csv\".format(ndmg_participant_dir),\n out_matrix[1:, :],\n fmt=\"%f\",\n delimiter=\",\",\n ) # save X\n for i in out_target_vector: # save y\n with open(\"{}_y.csv\".format(ndmg_participant_dir), \"a\") as f:\n f.write(i + \"\\n\")\n else:\n return (out_matrix[1:, :], out_target_vector)", "def main(base_dir: str, output_dir: str) -> None:\n base_path = pathlib.Path(base_dir)\n output_path = pathlib.Path(output_dir).expanduser()\n\n stage_copy_images(base_path, output_path)\n stage_extract_videos(base_path, output_path)", "def get_vector(filename):\n\n command = 'tokenizer -l Java ' + filename\n process = subprocess.Popen(command.split(), stdout=subprocess.PIPE)\n\n try:\n output, error = process.communicate()\n output = output.decode('utf8')\n output = output.replace('\\t', ' ').replace('\\n', '')\n return output\n except:\n print(error)\n return ''", "def get_image_path(raw_input_dir: str) -> list:\n result = []\n for root, dirs, files in os.walk(raw_input_dir):\n for file in files:\n result.append(os.path.join(root, file))\n return result", "def side_function(input_, dir_=None):\r\n input_ = abspath(input_)\r\n if not dir_:\r\n base = basename(input_)\r\n base = base.replace(\".pdb\", \"\")\r\n else:\r\n base = dir_\r\n if not os.path.exists(\"mutations_{}\".format(base)):\r\n os.mkdir(\"mutations_{}\".format(base))\r\n os.chdir(\"mutations_{}\".format(base))\r\n\r\n return input_", "def initialize_output(fn, output_dir, station_id, dataset):\n \n source_file =fn.split('/')[-1]\n output_file = output_dir + '/' + station_id + '_' + dataset + '_harvested_' + source_file + '.nc' # creating an output file name e.g. chera5.conv._10393.nc , try 01009 faster\n return output_file , source_file", "def get_files():\n\n img_dir = '../ADE20K_2016_07_26/full_data/images/validation/'\n sem_dir = '../ADE20K_2016_07_26/full_data/annotations/validation/'\n ins_dir = '../ADE20K_2016_07_26/full_data/annotations_instance/validation/'\n\n img_files = os.listdir(img_dir)\n sem_files = os.listdir(sem_dir)\n ins_files = os.listdir(ins_dir)\n \n img_files = [ os.path.join(img_dir,item) for item in img_files ]\n sem_files = [ os.path.join(sem_dir,item) for item in sem_files ]\n ins_files = [ os.path.join(ins_dir,item) for item in ins_files ]\n \n img_files.sort()\n sem_files.sort()\n ins_files.sort()\n \n return img_files, sem_files, ins_files", "def prepare_output_dir(out_dir, test_dir):\r\n\r\n if not out_dir.exists():\r\n out_dir.mkdir()\r\n\r\n # get the necessary file names\r\n file_names = get_file_names(test_dir, args.distance, print_file_names=False)\r\n\r\n # copy the images in the firstIms into the output folder\r\n for name in file_names[1][0]:\r\n file_path = Path(test_dir / name)\r\n copy_to = Path(out_dir / name)\r\n shutil.copy(file_path, copy_to)\r\n\r\n # the firstIms list does not contain the last image,\r\n # so we need to also copy the last image of the secIms into the output folder\r\n last_im = file_names[1][1][-1]\r\n shutil.copy(Path(test_dir/last_im), Path(out_dir/last_im))\r\n\r\n return file_names", "def get_files_from_directory(self, folder):\n return ['{}/{}'.format(folder, each) for each in os.listdir(folder) if each.endswith('.vm')]", "def openFile(dir):\n # If path is a directory\n if os.path.isdir(dir):\n # Check file sizes from first file\n Nt = len(glob.glob(dir + \"/1_*.txt\")) # Nt\n tmp = np.loadtxt(dir + \"/1_0.txt\") \n Ny, Nx = tmp.shape\n # Output array\n V = np.zeros((Nt, 2, Ny, Nx))\n for n in range(Nt):\n V1 = np.loadtxt(\"{0}/1_{1}.txt\".format(dir, n))\n V2 = np.loadtxt(\"{0}/2_{1}.txt\".format(dir, n))\n V[n] = V1, V2\n return V\n # If path is a file\n elif os.path.isfile(dir):\n if '.npy' in dir: # Numpy data file\n return np.load(dir)\n elif '.txt' in dir: # Plain text data file\n return np.loadtxt(dir)\n else:\n raise Exception(\"File extension not supported.\")\n else:\n raise Exception(\"Path is not supported.\")", "def get_file_list(input_dir):\n\tfile_paths = [input_dir +'/' + f for f in listdir(input_dir) if isfile(join(input_dir, f)) ]\n\treturn file_paths", "def index_files(self, input_dir, output_dir):\n self.lucene = Lucene(output_dir)\n self.lucene.open_writer()\n for path, dirs, _ in os.walk(input_dir):\n for dir in sorted(dirs):\n for _, _, files in os.walk(os.path.join(input_dir, dir)):\n for fn in sorted(files):\n print \"Indexing \", os.path.join(input_dir + dir, fn), \"...\"\n self.index_file(os.path.join(input_dir + dir, fn))\n # closes Lucene index\n self.lucene.close_writer()", "def gen_dtu_mvs_path(dtu_data_folder, mode='training'):\n sample_list = []\n \n # parse camera pairs\n cluster_file_path = dtu_data_folder + '/Cameras/pair.txt'\n cluster_list = open(cluster_file_path).read().split()\n\n # 3 sets\n training_set = [2, 6, 7, 8, 14, 16, 18, 19, 20, 22, 30, 31, 36, 39, 41, 42, 44,\n 45, 46, 47, 50, 51, 52, 53, 55, 57, 58, 60, 61, 63, 64, 65, 68, 69, 70, 71, 72,\n 74, 76, 83, 84, 85, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100,\n 101, 102, 103, 104, 105, 107, 108, 109, 111, 112, 113, 115, 116, 119, 120,\n 121, 122, 123, 124, 125, 126, 127, 128]\n validation_set = [3, 5, 17, 21, 28, 35, 37, 38, 40, 43, 56, 59, 66, 67, 82, 86, 106, 117]\n evaluation_set = [1, 4, 9, 10, 11, 12, 13, 15, 23, 24, 29, 32, 33, 34, 48, 49, 62, 75, 77, \n 110, 114, 118]\n\n # for each dataset\n data_set = []\n if mode == 'training':\n data_set = training_set\n elif mode == 'validation':\n data_set = validation_set\n elif mode == 'evaluation':\n data_set = evaluation_set\n\n # for each dataset\n for i in data_set:\n\n image_folder = os.path.join(dtu_data_folder, ('Rectified/scan%d' % i))\n cam_folder = os.path.join(dtu_data_folder, 'Cameras')\n depth_folder = os.path.join(dtu_data_folder, ('Depths/scan%d' % i))\n\n if mode == 'training':\n # for each lighting\n for j in range(0, 7):\n # for each reference image\n for p in range(0, int(cluster_list[0])):\n paths = []\n # ref image\n ref_index = int(cluster_list[22 * p + 1])\n ref_image_path = os.path.join(\n image_folder, ('rect_%03d_%d_r5000.png' % ((ref_index + 1), j)))\n ref_cam_path = os.path.join(cam_folder, ('%08d_cam.txt' % ref_index))\n paths.append(ref_image_path)\n paths.append(ref_cam_path)\n # view images\n for view in range(FLAGS.view_num - 1):\n view_index = int(cluster_list[22 * p + 2 * view + 3])\n view_image_path = os.path.join(\n image_folder, ('rect_%03d_%d_r5000.png' % ((view_index + 1), j)))\n view_cam_path = os.path.join(cam_folder, ('%08d_cam.txt' % view_index))\n paths.append(view_image_path)\n paths.append(view_cam_path)\n # depth path\n depth_image_path = os.path.join(depth_folder, ('depth_map_%04d.pfm' % ref_index))\n paths.append(depth_image_path)\n sample_list.append(paths)\n else:\n # for each reference image\n j = 5\n for p in range(0, int(cluster_list[0])):\n paths = []\n # ref image\n ref_index = int(cluster_list[22 * p + 1])\n ref_image_path = os.path.join(\n image_folder, ('rect_%03d_%d_r5000.png' % ((ref_index + 1), j)))\n ref_cam_path = os.path.join(cam_folder, ('%08d_cam.txt' % ref_index))\n paths.append(ref_image_path)\n paths.append(ref_cam_path)\n # view images\n for view in range(FLAGS.view_num - 1):\n view_index = int(cluster_list[22 * p + 2 * view + 3])\n view_image_path = os.path.join(\n image_folder, ('rect_%03d_%d_r5000.png' % ((view_index + 1), j)))\n view_cam_path = os.path.join(cam_folder, ('%08d_cam.txt' % view_index))\n paths.append(view_image_path)\n paths.append(view_cam_path)\n # depth path\n depth_image_path = os.path.join(depth_folder, ('depth_map_%04d.pfm' % ref_index))\n paths.append(depth_image_path)\n sample_list.append(paths)\n \n return sample_list", "def read_scan(self, dir, **args):\n files = []\n files_dir = {}\n for file in os.listdir(dir):\n if file.endswith('tif'):\n fnbase = file[:-4]\n elif file.endswith('tiff'):\n fnbase = file[:-4]\n else:\n continue\n last_digits = re.search(r'\\d+$', fnbase)\n if last_digits is not None:\n key = int(last_digits.group())\n files_dir[key] = file\n\n ordered_keys = sorted(list(files_dir.keys()))\n\n for key in ordered_keys:\n file = files_dir[key]\n files.append(os.path.join(dir, file))\n\n # look at slice0 to find out shape\n n = 0\n try:\n slice0 = self.detector.get_frame(files[n], self.roi, self.Imult)\n except Exception as e:\n print(e)\n return None\n shape = (slice0.shape[0], slice0.shape[1], len(files))\n arr = np.zeros(shape, dtype=slice0.dtype)\n arr[:, :, 0] = slice0\n\n for file in files[1:]:\n n = n + 1\n slice = self.detector.get_frame(file, self.roi, self.Imult)\n arr[:, :, n] = slice\n return arr" ]
[ "0.6080736", "0.5842313", "0.573423", "0.57060766", "0.56463504", "0.55878353", "0.54047006", "0.53952485", "0.53682417", "0.5323462", "0.5306015", "0.528734", "0.5246308", "0.52261233", "0.51942784", "0.5187178", "0.517565", "0.5173988", "0.5163878", "0.5163753", "0.51474893", "0.51318204", "0.5128367", "0.5098167", "0.5081925", "0.50777596", "0.5075219", "0.5075078", "0.5069027", "0.50660175" ]
0.7355258
0
Setup Gmail API instance
def get_gmail_api_instance(): if not os.path.exists('token.pickle'): print("err: no credentials .pickle file found") gmailtoken_generator() with open('token.pickle', 'rb') as token: creds = pickle.load(token) service = build('gmail', 'v1', credentials=creds) return service
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def build_gmail_api_v1():\n\n credentials = build_credentials()\n return googleapiclient.discovery.build('gmail', 'v1', credentials=credentials)", "def __init__(self, mailbox, api_key, base_url='https://mailosaur.com/api', smtp_host='mailosaur.io'):\n self.mailbox = mailbox\n self.api_key = api_key\n self.base_url = base_url\n self.smtp_host = smtp_host", "def main():\n credentials = get_credentials()\n http = credentials.authorize(httplib2.Http())\n service = discovery.build('gmail', 'v1', http=http)\n user_id = 'me'\n\n ## get_labels ##\n #print_all_labels(service,user_id)\n #fetch_and_store(service,user_id)\n #apply_rules()", "def __init__(self, address):\n self.address = address\n self.creds = None\n # if there's an access token from previous authentication, load it\n if os.path.exists(ACCESS_TOKEN_PATH):\n with open(ACCESS_TOKEN_PATH, 'rb') as tokenfile:\n self.creds = pickle.load(tokenfile)\n\n # if the credentials are invalid or non-existent, prompt to authenticate\n if not self.creds or not self.creds.valid:\n if self.creds and self.creds.expired and self.creds.refresh_token:\n self.creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n CLIENT_ID_PATH, SCOPES)\n self.creds = flow.run_local_server()\n # save the credentials for the next run\n with open(ACCESS_TOKEN_PATH, 'wb') as tokenfile:\n pickle.dump(self.creds, tokenfile)\n\n self.service = build('gmail', 'v1', credentials=self.creds)", "def __init__(self, domain, email, password, app):\n self.client = EmailSettingsClient(domain=domain)\n self.client.ClientLogin(email=email, password=password,\n source=app)", "def __init__(self, domain: str, api_key: str, sender_name: str) -> None:\n self.auth = (\"api\", api_key)\n self.api_url = f\"https://api.mailgun.net/v3/{domain}\"\n self.sender = f\"{sender_name} <noreply@{domain}>\"", "def main():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server()\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n\n service = build('gmail', 'v1', credentials=creds)\n\n labels = ListLabels(service, 'me')\n\n messages = ListMessagesWithLabels(service, 'me', label_ids=[\"CATEGORY_FORUMS\"])", "def main():\n token = 'C:/Users/asif.rouf/PycharmProjects/pythonProject/AX_Admin_portal/Test/utils/google-api-token.json'\n credential = 'C:/Users/asif.rouf/PycharmProjects/pythonProject/AX_Admin_portal/Test/utils/google-api-credentials.json'\n creds = None\n # The file token.json stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists(token):\n creds = Credentials.from_authorized_user_file(token, SCOPES)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n credential, SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n # with open('token.json', 'w') as token:\n # token.write(creds.to_json())\n\n service = build('gmail', 'v1', credentials=creds)\n\n # # Call the Gmail API\n # results = service.users().labels().list(userId='me').execute()\n # labels = results.get('labels', [])\n #\n # if not labels:\n # print('No labels found.')\n # else:\n # print('Labels:')\n # for label in labels:\n # print(label['name'])\n\n # Call the Gmail API to fetch INBOX\n results = service.users().messages().list(userId='me', labelIds=['INBOX']).execute()\n messages = results.get('messages', [])\n # message1 = messages[0]\n # print(message1)\n message1 = {'id': '17a5ca5f5f4bd0aa', 'threadId': '17a5b1bb861b3bc2'}\n message1 = {'id': '17a5cbc54c546465', 'threadId': '17a5b1bb861b3bc2'}\n\n # message1 = {'id': '17a5b852afe04a52', 'threadId': '17a50c997c059e68'}\n print(messages)\n print(message1)\n\n if not messages:\n print(\"No messages found.\")\n else:\n print(\"Message snippets:\")\n # for message in messages:\n # msg = service.users().messages().get(userId='me', id=message['id']).execute()\n # print(messages)\n # print(msg['snippet'])\n\n # msg = service.users().messages().get(userId='me', id=message1['id']).execute()\n # print(msg['snippet'])\n ###############################\n msg = service.users().messages().get(userId='me', id=message1['id'], format='raw').execute()\n msg_str = base64.urlsafe_b64decode(msg['raw'].encode('ASCII'))\n mime_msg = email.message_from_bytes(msg_str)\n print(msg['snippet'])\n print(mime_msg)\n print(mime_msg['Date'])\n print(mime_msg['From'])\n print(mime_msg['To'])\n print(mime_msg['Subject'])\n #\n # print(datetime.utcnow())\n\n ######################################################\n # msg = service.users().messages().get(userId='me', id=message1['id'], format='full').execute()\n # # parts can be the message body, or attachments\n # payload = msg['payload']\n # headers = payload.get(\"headers\")\n # parts = payload.get(\"parts\")\n # # print(payload)\n # # print(parts)\n # # print(headers)\n # for header in headers:\n # print(header['name'])\n # print(header['value'])\n #\n ######################################################\n msg = service.users().messages().get(userId='me', id=message1['id']).execute()\n\n # Use try-except to avoid any Errors\n try:\n # Get value of 'payload' from dictionary 'txt'\n payload = msg['payload']\n headers = payload['headers']\n subject = ''\n sender = ''\n\n # Look for Subject and Sender Email in the headers\n for d in headers:\n if d['name'] == 'Subject':\n subject = d['value']\n if d['name'] == 'From':\n sender = d['value']\n # The Body of the message is in Encrypted format. So, we have to decode it.\n # Get the data and decode it with base 64 decoder.\n parts = payload.get('parts')[0]\n data = parts['body']['data']\n data = data.replace(\"-\", \"+\").replace(\"_\", \"/\")\n decoded_data = base64.b64decode(data)\n\n # Now, the data obtained is in lxml. So, we will parse\n # it with BeautifulSoup library\n soup = BeautifulSoup(decoded_data, \"lxml\")\n body = soup.body()\n\n # Printing the subject, sender's email and message\n print(\"Subject: \", subject)\n print(\"From: \", sender)\n print(\"Message: \", body)\n # for link in soup.find_all('a', href=True):\n # print(link['href'])\n link = soup.find('a', href=True)\n print(link['href'])\n except:\n pass", "def __init__(self, email, private_key, refresh_token=None,\n feed=None, client=None):\n self.adapters = {}\n self.email = email\n self.refresh_token = refresh_token\n self.private_key = private_key\n self.feed = feed\n self.client = client", "def __init__(self, sendgrid_email_env_name: str, sendgrid_api_key_env_name: str):\n try:\n self.sendgrid_email = os.environ[sendgrid_email_env_name]\n self.sendgrid_api_key = os.environ[sendgrid_api_key_env_name]\n except KeyError:\n self.sendgrid_email = None\n self.sendgrid_api_key = None\n self.logger.error(\"Failed to initialize email service\")\n return\n self.logger.info(\"Email service initialized\")", "def build_service():\r\n creds = None\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n f\"{EMAIL_ACCOUNT_FILE}\", SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n\r\n service = build('gmail', 'v1', credentials=creds)\r\n return service", "def email_startup():\n imap = imaplib.IMAP4_SSL('imap.gmail.com')\n # authenticate\n imap.login(email_credentials.email_user, email_credentials.email_pass)\n return imap", "def __init__ (self, email, domain, password):\n\n self.gd_client = gdata.apps.service.AppsService()\n self.gd_client.email = email\n self.gd_client.domain = domain\n self.gd_client.password = password\n self.gd_client.ProgrammaticLogin()", "def get_service():\r\n creds = None\r\n # The file token.json stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('/var/jail/home/team28/final_project/python/EmailApp/token.json'):\r\n creds = Credentials.from_authorized_user_file('/var/jail/home/team28/final_project/python/EmailApp/token.json', SCOPES)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file('/var/jail/home/team28/final_project/python/EmailApp/credentials.json', SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('/var/jail/home/team28/final_project/python/EmailApp/token.json', 'w') as token:\r\n token.write(creds.to_json())\r\n\r\n service = build('gmail', 'v1', credentials=creds)\r\n return service", "def main():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('gmail', 'v1', credentials=creds)\n\n # Call the Gmail API\n results = service.users().labels().list(userId='me').execute()\n labels = results.get('labels', [])\n\n if not labels:\n print('No labels found.')\n else:\n print('Labels:')\n for label in labels:\n print(label['name'])\n path = \"./ham\"\n try:\n os.mkdir(path)\n except OSError:\n print (\"Creation of the directory %s failed\" % path)\n else:\n print (\"Successfully created the directory %s \" % path)\n\n messages = []\n messages = ListMessagesMatchingQuery(service, 'me', 'in:inbox')\n idx = 0\n for message in messages:\n GetMimeMessage(service, 'me', message['id'], idx)\n idx+=1", "def setup(bot):\n bot.add_cog(EmailAddressCRUD(bot))", "def gmailtoken_generator():\r\n # If modifying these scopes, delete the file token.pickle.\r\n # SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']\r\n SCOPES = ['https://www.googleapis.com/auth/gmail.send']\r\n\r\n creds = None\r\n # The file token.pickle stores the user's access and refresh tokens, and is\r\n # created automatically when the authorization flow completes for the first\r\n # time.\r\n if os.path.exists('token.pickle'):\r\n with open('token.pickle', 'rb') as token:\r\n creds = pickle.load(token)\r\n # If there are no (valid) credentials available, let the user log in.\r\n if not creds or not creds.valid:\r\n if creds and creds.expired and creds.refresh_token:\r\n creds.refresh(Request())\r\n else:\r\n flow = InstalledAppFlow.from_client_secrets_file(\r\n 'credentials.json', SCOPES)\r\n creds = flow.run_local_server(port=0)\r\n # Save the credentials for the next run\r\n with open('token.pickle', 'wb') as token:\r\n pickle.dump(creds, token)\r\n\r\n service = build('gmail', 'v1', credentials=creds)", "def setup_class(cls):\n initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST)", "def get_service():\n creds = None\n\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file('credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n\n service = build('gmail', 'v1', credentials=creds)\n\n return service", "def __init__(self, config):\n self._config = config\n\n self.twitter = Twitter(auth=OAuth(\n self._config['twitter']['oauth'][0],\n self._config['twitter']['oauth'][1],\n self._config['twitter']['oauth'][2],\n self._config['twitter']['oauth'][3]\n ))\n\n self.sender = Mailer('smtp.gmail.com', use_tls=True, port=587)\n self.sender.login(self._config['mail']['address'], self._config['mail']['pass'])", "def build_service():\n\n\tstore = file.Storage('credentials.json')\n\tcreds = store.get()\n\tif not creds or creds.invalid:\n\t flow = client.flow_from_clientsecrets('client_secret.json', SCOPES)\n\t creds = tools.run_flow(flow, store)\n\tservice = build('gmail', 'v1', http=creds.authorize(Http(disable_ssl_certificate_validation=True)))\n\treturn service", "def main():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('gmail', 'v1', credentials=creds)\n\n # Calls the Gmail API to get Emails\n threads = listMessages(service, 'me', 'Jay Patel,')\n\n if not threads:\n print('No TUalerts found.')\n else:\n getCrimeLocation(service, 'me', threads)\n\n # Prints the TUlalerts (Mostly for testing purposes)\n printAlerts()", "def init_api(self):\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists(self.gdrive_config.TOKEN_PICK_PATH):\n with open(self.gdrive_config.TOKEN_PICK_PATH, 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n self.gdrive_config.CREDENTIAL_PATH, self.gdrive_config.SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open(self.gdrive_config.TOKEN_PICK_PATH, 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('drive', 'v3', credentials=creds)\n return service", "def __init__(self, **kwargs):\n esp_name = self.esp_name\n self.api_key = get_anymail_setting(\n \"api_key\",\n esp_name=esp_name,\n kwargs=kwargs,\n allow_bare=True,\n )\n api_url = get_anymail_setting(\n \"api_url\",\n esp_name=esp_name,\n kwargs=kwargs,\n default=\"https://api.brevo.com/v3/\",\n )\n if not api_url.endswith(\"/\"):\n api_url += \"/\"\n super().__init__(api_url, **kwargs)", "def __init__(self, email, password, currentPlayer = None, gameThread = None, application = None):\n Client.__init__(self, email, password, max_tries=1)\n self.currentPlayer = currentPlayer\n self.gameThread = gameThread\n self.application = application", "def main():\n #Gmail2TelegramClient(\"1234\") -- a person\n #Gmail2TelegramClient(\"-1234\") -- group chat", "def __init__(self, *args, **kwargs):\n\n super(TestGoogleBooksTelescope, self).__init__(*args, **kwargs)\n self.host = \"localhost\"\n self.api_port = 5000\n self.sftp_port = 3373\n self.project_id = os.getenv(\"TEST_GCP_PROJECT_ID\")\n self.data_location = os.getenv(\"TEST_GCP_DATA_LOCATION\")\n self.organisation_name = \"anu-press\"\n self.organisation_folder = \"anu-press\"", "def initialize_client():\n logging.info('Initializing Sendgrid provider')\n sendgrid_authentication, sendgrid_username = get_provider_credentials('sendgrid') \n sendgrid_provider = SendGridProvider(sendgrid_authentication, sendgrid_username)\n\n logging.info('Initializing Mailgun provider')\n mailgun_authentication, mailgun_domain = get_provider_credentials('mailgun')\n mailgun_provider = MailGunProvider(mailgun_authentication, mailgun_domain)\n\n logging.info('Registering providers')\n client.register_provider(sendgrid_provider, 10)\n client.register_provider(mailgun_provider, 20)", "def __init__(self, api_key, app_id, email):\n api.APIRequest.__init__(self, api_key)\n self._app_id = app_id\n self._method = 'POST'\n self._email = email\n self._first_name = None\n self._last_name = None\n self._message = None\n self._role = None\n self._tags = None", "def service_authentication():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('gmail', 'v1', credentials=creds)\n\n return service" ]
[ "0.67993057", "0.6465097", "0.6379843", "0.62756974", "0.6221557", "0.61724395", "0.6157281", "0.61341727", "0.60823137", "0.6077386", "0.6068", "0.6049752", "0.6017722", "0.59777987", "0.59563124", "0.59244496", "0.5898986", "0.58902305", "0.5883836", "0.57801676", "0.57378507", "0.57056755", "0.5695731", "0.5656688", "0.56390494", "0.5629614", "0.5626255", "0.5597795", "0.55914205", "0.55895275" ]
0.72582275
0
Set up Gmail API instance, use it to send an email 'sender' is the Gmail address that is authenticated by the Gmail API 'receiver' is the receiver's email address 'subject' is the subject of our email 'message_text' is the content of the email
def Gmail_mailsender(receiver, sender, subject, message_text): # authenticate with Gmail API service = get_gmail_api_instance() # create message structure message = create_message(sender, receiver, subject, message_text) # send email result = send_email(service, sender, message) # wtf if not result == None: print(f"Message sent successfully! Message id: {result['id']}")
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def with_api(cls, receiver, email_template, subject):\n\n url = 'https://api.mailgun.net/v3/{}/messages'.format(cls.domain_name)\n auth = ('api', cls.api_key)\n data = {\n 'from': cls.sender.format(cls.domain_name),\n 'to': receiver,\n 'subject': subject,\n \"html\": email_template,\n }\n\n response = cls.api_send(url, auth, data)\n\n if response.status_code != 200:\n raises(EXCEPTIONS['EMAIL_ERROR'], 500)", "def main():\r\n print(\"-------This script was run on %s--------\" % DATE)\r\n try:\r\n creds = create_credentials() # creates a token with credentials for the bot\r\n\r\n service = build('gmail', 'v1', credentials=creds) # creates the service to interact with the Gmail API\r\n\r\n recipients = \", \".join(TO_EMAILS)\r\n subject = \"Gym Family Plan\"\r\n body_html = HTML_MESSAGE.format(PAYEE)\r\n body_plain_txt = PLAIN_MESSAGE.format(PAYEE)\r\n msg = create_message(FROM_EMAIL, recipients, subject, body_html, body_plain_txt)\r\n send_message(service, \"me\", msg)\r\n print(\"email sent successfully\")\r\n except:\r\n print(\"error, something went wrong.\")\r\n print(\"--------------------------------------------------------------\")", "def send_email(recipient,subject,message):\n msg = MIMEText(message)\n me = '[email protected]'\n \n msg['Subject'] = subject\n msg['From'] = me\n msg['To'] = recipient\n\n # Send the message via our own SMTP server, but don't include the\n # envelope header.\n username='cryolt2'\n password='Diamond=Geil!'\n\n server = smtplib.SMTP('smtp.gmail.com:587') \n server.starttls() \n server.login(username,password) \n server.sendmail(me, recipient, msg.as_string()) \n server.quit()", "def send_email(self):\n message = MIMEText(self.email_body, 'plain', 'utf-8')\n\n message['Subject'] = self.email_subject\n message['From'] = gmail_user\n message['To'] = ', '.join(self.recipients)\n\n try:\n server = smtplib.SMTP_SSL('smtp.gmail.com', 465)\n server.ehlo()\n\n server.login(gmail_user, gmail_password)\n\n server.sendmail(message['From'], self.recipients, message.as_string())\n\n server.close()\n\n print('Email sent!')\n except Exception as err:\n # TODO Write error to log file\n raise err", "def send():\n try:\n data = request.get_json()\n if data['authkey'] != os.environ.get('MAIL_AUTHKEY'): \n return \"Ooops. Wrong `authkey`.\"\n msg = Message(data['subject'],\n sender=os.environ.get('MAIL_USERNAME'),\n recipients=[data['recipient']])\n msg.body = data['body'] \n mail.send(msg)\n return 'Mail sent!'\n except Exception as e:\n print('We got an error at ' + httpdate(datetime.datetime.now()))\n print(str(e)) \n return 'There was an error with that request.'", "def send_email(recipient, subject, body) -> None:\n port = 465\n smtp_server = \"smtp.gmail.com\"\n sender_email = user['username']\n password = user['password']\n\n message = MIMEMultipart()\n message['From'] = sender_email\n message['To'] = recipient\n message['Subject'] = subject\n body = MIMEText(body) \n message.attach(body)\n\n server = smtplib.SMTP_SSL(smtp_server, port)\n server.login(sender_email, password)\n server.sendmail(sender_email, recipient, message.as_string())\n server.quit()", "def __init__(self, domain: str, api_key: str, sender_name: str) -> None:\n self.auth = (\"api\", api_key)\n self.api_url = f\"https://api.mailgun.net/v3/{domain}\"\n self.sender = f\"{sender_name} <noreply@{domain}>\"", "def send(\r\n self,\r\n to = '', #list of email addresses - Required\r\n subject='None', #message's subject - Required\r\n message_text='None', #message body in plain text - Required\r\n message_html=None, #message body in html - Optional\r\n attachments=None, #list of truples [(filename, file_contents)] - Optional\r\n cc = None, #list of email addresses to CC message to\r\n bcc = None, #list of email addresses to BCC message to\r\n reply_to = None, #single email address to have replies send to\r\n ): \r\n if not isinstance(to, list):\r\n to = [to]\r\n\r\n try:\r\n if self.settings.private.email_server == 'gae':\r\n from google.appengine.api import mail\r\n #untested on GAE, but in theory should work\r\n #http://code.google.com/appengine/docs/python/mail/emailmessagefields.html\r\n mail.send_mail(sender=self.settings.private.email_sender, to=to,\r\n subject=subject, body=message_text, html=message_html, attachments=attachments, cc = cc,\r\n bcc = bcc, reply_to = reply_to)\r\n else:\r\n\r\n msg = self.buildMIME(sender = self.settings.private.email_sender,\r\n recipients = to, subject = subject,\r\n message_text = message_text, message_html = message_html,\r\n attachments = attachments,\r\n cc = cc, bcc = bcc, reply_to = reply_to)\r\n #print 'message'+msg.as_string()\r\n #Build MIME body\r\n (host, port) = self.settings.mail.server.split(':')\r\n\r\n if self.settings.mail.ssl: \r\n try:\r\n server = smtplib.SMTP_SSL(host, port)\r\n except:\r\n # ERROR python <= 2.6\r\n pass\r\n else:\r\n server = smtplib.SMTP(host, port)\r\n\r\n if self.settings.mail.login:\r\n try:\r\n server.ehlo_or_helo_if_needed()\r\n except SMTPHeloError:\r\n logger.info(\"SMTP Helo Error in HELO\")\r\n\r\n if self.settings.mail.use_tls:\r\n try:\r\n server.starttls()\r\n except SMTPHeloError:\r\n logger.info(\"SMTP Helo Error in STARTTLS\")\r\n except SMTPException:\r\n logger.info(\"Server does not support TLS\")\r\n\r\n except RuntimeError:\r\n logger.info(\"Python version does not support TLS (<= 2.6?)\")\r\n\r\n try:\r\n server.ehlo_or_helo_if_needed()\r\n except SMTPHeloError:\r\n logger.info(\"SMTP Helo Error in HELO\")\r\n\r\n (username, password) = self.settings.mail.login.split(':')\r\n try:\r\n server.login(username, password)\r\n except SMTPHeloError:\r\n logger.info(\"SMTP Helo Error in LOGIN\")\r\n\r\n except SMTPAuthenticationError:\r\n logger.info(\"Invalid username/password combination\")\r\n\r\n except SMTPException:\r\n logger.info(\"SMTP error in login\")\r\n\r\n try:\r\n server.sendmail(self.settings.private.email_sender, to, msg.as_string())\r\n server.quit()\r\n\r\n except SMTPRecipientsRefused:\r\n logger.info(\"All recipients were refused. Nobody got the mail.\")\r\n\r\n except SMTPHeloError:\r\n logger.info(\"The server didn't reply properly to the HELO greeting.\")\r\n\r\n except SMTPSenderRefused:\r\n logger.info(\"The server didn't accept the from_addr.\")\r\n\r\n except SMTPDataError:\r\n logger.info(\"The server replied with an unexpected error code (other than a refusal of a recipient).\")\r\n \r\n except Exception, e:\r\n return False\r\n return True", "def gmail_send_message():\n creds, _ = google.auth.default()\n\n try:\n service = build('gmail', 'v1', credentials=creds)\n message = MIMEText('This is automated draft mail')\n message['to'] = '[email protected]'\n message['from'] = '[email protected]'\n message['subject'] = 'Automated draft'\n # encoded message\n encoded_message = base64.urlsafe_b64encode(message.as_bytes()) \\\n .decode()\n\n create_message = {\n 'message': {\n\n 'raw': encoded_message\n }\n }\n # pylint: disable=E1101\n send_message = (service.users().messages().send\n (userId=\"me\", body=create_message).execute())\n print(F'Message Id: {send_message[\"id\"]}')\n except HttpError as error:\n print(F'An error occurred: {error}')\n send_message = None\n return send_message", "def send_email(receiver_email: str, subject: str, message_text: str, username=\"christopher1duplessis\"):\n yag = yagmail.SMTP(username, keyring.get_password(\"email\", username))\n yag.send(\n to=receiver_email,\n subject=subject,\n contents=message_text)", "def send_email(subject, text):\n url = ('https://api.mailgun.net/v3/%s/messages' %\n cfg('mail:mailgun_domain'))\n auth = ('api', cfg('mail:mailgun_key'))\n data = {'from': 'Akari Bot <%s>' % cfg('mail:from'),\n 'to': [cfg('mail:to')],\n 'subject': subject, 'text': text}\n return requests.post(url, auth=auth, data=data)", "def send_api(recipient, subject, text):\n mailer = Mail(\n from_email=settings.MAIL_FROM_ADDRESS,\n to_emails=recipient,\n subject=subject,\n html_content=text)\n try:\n sg = SendGridAPIClient(os.environ.get('SENDGRID_API_KEY'))\n response = sg.send(mailer)\n return response.status_code\n except Exception as e:\n print(e)", "def send_mail(sender_domain, sender_name, sender_account, recipient, subject, text=None, html=None):\n msg_data = {\n 'from': '{} <{}@{}>'.format(sender_name, sender_account, sender_domain),\n 'to': [recipient],\n 'subject': subject\n }\n if text:\n msg_data['text'] = text\n elif html:\n msg_data['html'] = html\n else:\n raise ValueError('Expected html or text body')\n\n resp = requests.post(\n 'https://api.mailgun.net/v3/{}/messages'.format(sender_domain),\n auth=(\"api\", mailgun_key),\n data=msg_data\n )\n resp.raise_for_status()\n\n return 'mailgun: ' + resp.json()['id']", "def send_email(email_subject, recipient, message, config = None):\n try:\n config = current_app.config\n except:\n config = config\n\n sender = sendgrid.SendGridClient(config['SENDGRID_API_KEY'])\n\n email = sendgrid.Mail()\n\n email.set_subject(email_subject)\n email.add_to(recipient)\n email.set_from(config['FROM_EMAIL'])\n email.set_from_name(config['FROM_NAME'])\n email.set_replyto(config['FROM_NAME'])\n email.set_html(message)\n\n status, msg = sender.send(email)\n\n return status, msg", "def make_and_mail(receiver, subject, body):\n emailbody = MIMEText(body, 'html')\n msg = MIMEMultipart('alternative')\n msg['From'] = SENDER\n msg['To'] = receiver\n msg['Subject'] = subject\n msg.attach(emailbody)\n #pylint: disable=invalid-name\n try:\n s = smtplib.SMTP('smtp.gmail.com', 587)\n s.starttls()\n s.login(YOUPAPER_EMAIL, YOUPAPER_PASSWORD)\n s.sendmail(SENDER, receiver, msg.as_string())\n s.quit()\n return 1 #returns 1 if success\n except smtplib.SMTPException:\n return 0 #returns 0 if failure", "def custom_send_mail(subject=None, html_message=None, from_email=None, recipient_list=None, text_message=None, email_instance=None):\n if email_instance:\n subject = email_instance.get_subject()\n text_message = email_instance.get_text_message()\n html_message = email_instance.get_html_message()\n from_email = email_instance.get_from_email()\n recipient_list = email_instance.get_recipient_list()\n\n if settings.USE_SENDGRID:\n sg = SendGridAPIClient(settings.SENDGRID_API_KEY)\n from_email = Email(from_email)\n to_list = Personalization()\n for email in set(recipient_list):\n to_list.add_to(Email(email))\n mail = Mail(\n from_email=from_email,\n to_emails=None,\n subject=subject,\n html_content=html_message,\n plain_text_content=text_message\n )\n mail.add_personalization(to_list)\n try:\n sg.send(mail)\n except BadRequestsError:\n print(\"bad request. email not sent\")\n\n elif settings.USE_AWS_SES:\n my_config = Config(\n region_name='ca-central-1',\n signature_version='v4',\n retries={\n 'max_attempts': 10,\n 'mode': 'standard'\n }\n )\n # from https://docs.aws.amazon.com/ses/latest/DeveloperGuide/send-using-sdk-python.html\n # The character encoding for the email.\n CHARSET = \"UTF-8\"\n # Create a new SES resource and specify a region.\n client = boto3.client('ses', aws_access_key_id=settings.AWS_ACCESS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, config=my_config)\n # Try to send the email.\n try:\n # Provide the contents of the email.\n response = client.send_email(\n Destination={\n 'ToAddresses': recipient_list\n },\n Message={\n 'Body': {\n 'Html': {\n 'Charset': CHARSET,\n 'Data': nz(html_message, ''),\n },\n 'Text': {\n 'Charset': CHARSET,\n 'Data': nz(text_message, ''),\n },\n },\n 'Subject': {\n 'Charset': CHARSET,\n 'Data': subject,\n },\n },\n Source=from_email,\n )\n # Display an error if something goes wrong.\n except ClientError as e:\n print(e.response['Error']['Message'])\n else:\n print(\"Email sent! Message ID:\"),\n print(response['MessageId'])\n\n elif settings.USE_SMTP_EMAIL:\n django_send_mail(\n subject=subject,\n message=nz(text_message, ''),\n html_message=html_message,\n from_email=from_email,\n recipient_list=recipient_list,\n fail_silently=False\n )\n\n else:\n print('No email configuration present in application...')\n if email_instance:\n print(email_instance)\n else:\n print(\"FROM: {}\\nTO: {}\\nSUBJECT: {}\\nMESSAGE:{}\".format(from_email, recipient_list, subject, html_message))", "def send_email(subject, sender, recipients, text_body, html_body):\n msg = Message(subject=subject, sender=sender, recipients=recipients)\n msg.body = text_body\n msg.html = html_body\n mail.send(msg)", "def _send_mailjet(message, subject, to, to_name, sender, sender_name):\n api_key = app.config.get('MJ_APIKEY_PUBLIC')\n api_secret = app.config.get('MJ_APIKEY_PRIVATE')\n if not api_key or not api_secret:\n app.logger.error('Missing MJ_APIKEY_PUBLIC/MJ_APIKEY_PRIVATE!')\n return\n # Note the data structures we use are api v3.1\n client = mailjet_rest.Client(\n auth=(api_key, api_secret),\n api_url='https://api.mailjet.com/',\n version='v3.1')\n from_obj = {\n \"Email\": sender,\n }\n if sender_name:\n from_obj[\"Name\"] = sender_name\n to_obj = [{\n \"Email\": to,\n }]\n if to_name:\n to_obj[0][\"Name\"] = to_name\n message = {\n \"From\": from_obj,\n \"To\": to_obj,\n \"Subject\": subject,\n \"TextPart\": message,\n }\n result = client.send.create(data={'Messages': [message]})\n if result.status_code != 200:\n app.logger.error(\n 'Error sending via mailjet: (%d) %r',\n result.status_code, result.text)\n raise MailFailure('Error sending via mailjet!')\n try:\n j = result.json()\n except Exception:\n app.logger.error('Error sending via mailjet: %r', result.text)\n raise MailFailure('Error sending via mailjet!')\n if j['Messages'][0]['Status'] != 'success':\n app.logger.error('Error sending via mailjet: %r', j)\n raise MailFailure('Error sending via mailjet!')", "def send_email(subject, sender, recipients, text_body, html_body):\n\t\tmsg = Message(subject, sender=sender, recipients=recipients)\n\t\tmsg.body = text_body\n\t\tmsg.html = html_body\n\t\tmail.send(msg)", "def main():\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token.pickle'):\n with open('token.pickle', 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials.json', SCOPES)\n creds = flow.run_local_server()\n # Save the credentials for the next run\n with open('token.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n\n service = build('gmail', 'v1', credentials=creds)\n\n labels = ListLabels(service, 'me')\n\n messages = ListMessagesWithLabels(service, 'me', label_ids=[\"CATEGORY_FORUMS\"])", "async def send_email_gmail(self, *, emails: List[EmailStr], username: str, generated_code: str):\n email_content = f\"\"\"\n <html>\n <body>\n <p>Hello {username}, Your email verification code is {generated_code}\n <br>Thanks for using our Todo Application.</p>\n </body>\n </html>\n \"\"\"\n message = email.message.Message()\n message[\"Subject\"] = 'Todo App Authentication'\n message[\"From\"] = EMAIL_ADDR\n\n message.add_header('Content-Type', 'text/html')\n message.set_payload(email_content)\n client = smtplib.SMTP('smtp.gmail.com: 587')\n client.starttls()\n\n # Login Credentials to send the mail.\n client.login(message[\"From\"], EMAIL_PWD)\n\n for user_email in emails:\n client.sendmail(message[\"From\"], user_email, message.as_string())\n print(f\"sending to {user_email}\")", "def with_smtp(cls, receiver, email_template, subject):\n\n msg = MIMEMultipart('alternative')\n sender = cls.sender.format(cls.domain_name)\n msg['Subject'] = subject\n msg['From'] = sender\n msg['To'] = receiver\n\n part1 = MIMEText(email_template, 'html')\n msg.attach(part1)\n\n cls.send(sender, receiver, msg)", "def __init__(self, user, password, _recipients, templatedir='templates'):\n\n self.user = user\n self.password = password\n self.recipient = _recipients if type (_recipients) is list else [_recipients]\n self.server = 'smtp.gmail.com'\n self.port = 587\n\n if os.path.isdir(templatedir):\n self.templatedir = templatedir\n else:\n self.templatedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), templatedir)\n\n self.env = Environment(loader=FileSystemLoader(self.templatedir))", "def send_error_email(receiver_email, subject, body):\n\n sender_email = \"[email protected]\"\n\n with open(CWD(\"mailcreds.txt\"), \"r\") as file:\n password = file.read()\n\n # Create a multipart message and set headers\n message = MIMEMultipart()\n message[\"From\"] = sender_email\n message[\"To\"] = receiver_email\n message[\"Subject\"] = subject\n\n # Add body to email\n message.attach(MIMEText(body, \"plain\"))\n\n text = message.as_string()\n\n # Log in to server using secure context and send email\n context = ssl.create_default_context()\n with smtplib.SMTP_SSL(\"smtp.gmail.com\", 465, context=context) as server:\n server.login(sender_email, password)\n server.sendmail(sender_email, receiver_email, text)", "def send_email(from_email, to_emails, subject, text, smtp_domain):\n if smtp_domain not in config[\"GUN_MAIL\"] or not config[\"GUN_MAIL\"].get(\n smtp_domain\n ).get(\"smtp_password\"):\n raise NotFound(\n \"SMTP Domain '{}' does not exist in configuration for GUN_MAIL or \"\n \"smtp_password was not provided. \"\n \"Cannot send email.\".format(smtp_domain)\n )\n\n api_key = config[\"GUN_MAIL\"][smtp_domain].get(\"api_key\", \"\")\n email_url = config[\"GUN_MAIL\"][smtp_domain].get(\"api_url\", \"\") + \"/messages\"\n\n return requests.post(\n email_url,\n auth=(\"api\", api_key),\n data={\"from\": from_email, \"to\": to_emails, \"subject\": subject, \"text\": text},\n )", "def send_email(sender, recipient, subject, html_content, txt_content, api_key):\n post_headers = SENDGRID_POST_HEADERS.copy()\n post_headers['Authorization'] = 'Bearer {0}'.format(api_key)\n data = {\n 'content': [\n {\n 'type': 'text/plain',\n 'value': txt_content,\n },\n {\n 'type': 'text/html',\n 'value': html_content,\n }\n ],\n 'from': {\n 'email': sender.email,\n 'name': sender.name,\n\n },\n 'personalizations': [\n {\n 'to': [\n {\n 'email': recipient.email,\n 'name': recipient.name,\n }\n ],\n }\n ],\n 'subject': subject,\n }\n response = requests.post(\n '{api_url}/mail/send'.format(api_url=SENDGRID_API_URL),\n headers=post_headers,\n data=json.dumps(data, ensure_ascii=False).encode('utf-8')\n )\n response.raise_for_status()", "def send_mail(to, sender, subject, message):\n\n msg = MIMEText(message)\n msg['From'] = sender\n msg['To'] = to\n msg['Subject'] = subject\n body = {'raw': base64.urlsafe_b64encode(msg.as_bytes()).decode()}\n MESSAGES.send(userId='me', body=body).execute()", "def __init__(self, sender, user):\r\n self.user = user\r\n self.sender = Email(sender)\r\n self.recipient = Email(self.user.getEmail())\r\n self.sg = sendgrid.SendGridAPIClient(apikey = \"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\")", "def __init__(self, mailbox, api_key, base_url='https://mailosaur.com/api', smtp_host='mailosaur.io'):\n self.mailbox = mailbox\n self.api_key = api_key\n self.base_url = base_url\n self.smtp_host = smtp_host", "def send_email(recipients, subject, body):\n # Store login info\n gmail_user = '[email protected]'\n gmail_password = 'carissahunterife'\n\n # Build message in format needed for gmail\n email_text = \"\\r\\n\".join([\n \"From: \" + gmail_user,\n \"To: \" + \",\".join(recipients),\n \"Subject: \" + subject,\n \"\",\n body\n ])\n\n\n try:\n # Open server\n server = smtplib.SMTP_SSL('smtp.gmail.com', 465)\n server.ehlo()\n server.login(gmail_user, gmail_password)\n server.sendmail(gmail_user, recipients, email_text)\n server.close()\n except:\n return None" ]
[ "0.7326535", "0.673758", "0.6671385", "0.6635266", "0.65357083", "0.6474429", "0.64681345", "0.6457392", "0.6438135", "0.6404577", "0.63888466", "0.6345327", "0.6312482", "0.6308214", "0.6279667", "0.6239861", "0.6233244", "0.62318534", "0.62204283", "0.62175447", "0.6185709", "0.6171517", "0.61580354", "0.6148945", "0.6135232", "0.6127026", "0.6109583", "0.6108891", "0.6087988", "0.6038631" ]
0.7533274
0
This is the main function to run the INCREMENT turing machine program
def main(argv): inputfile = '' try: opts, arg = getopt.getopt(argv, "hi:o:", ["ifile="]) except getopt.GetoptError: print('usage: main_increment.py -i <inputfile>') sys.exit(2) for opt, arg in opts: if opt == '-h': print('main_increment.py -i <inputfile>') sys.exit() elif opt in ("-i", "--ifile"): inputfile = arg program = open("tm_INCREMENT.txt").read() ifile = open(inputfile).read() turing_machine = TM() turing_machine.read(program) for line in ifile.splitlines(): tape = line print(line) tape_pos = 1 start = "INCREMENTSTART" end = "FINALINCREMENT" reject = "FINALREJECT" turing_machine.execute(tape, tape_pos, start, end, reject)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def run():\n\tif len(sys.argv) > 1 and sys.argv[1] in {'-V', '--version'}:\n\t\tprint(\"pokesim - Pokémon Battle Simulator - Version %s\" % __version__)\n\t\texit()\n\n\trandom.seed()\n\ttry:\n\t\tmain()\n\texcept (KeyboardInterrupt, EOFError):\n\t\texit(0)", "def main():\n\n GAME = \"Assignment1-Taxi-v2\"\n env = gym.make(GAME)\n n_state = env.observation_space.n\n n_action = env.action_space.n\n env = Monitor(env, \"taxi_simple\", force=True)\n\n s = env.reset()\n steps = 100\n for step in range(steps):\n env.render()\n action = int(input(\"Please type in the next action:\"))\n s, r, done, info = env.step(action)\n print(s)\n print(r)\n print(done)\n print(info)\n\n # close environment and monitor\n env.close()", "def main():\n tng.api.runner()", "def main():\n run_program()", "def main_int(args):\n\n print(\"Running for you in INT mode ... \")\n\n # Read in transcript ID list.\n tr_ids_dic = cliplib.read_ids_into_dic(args.in_tr_list)\n tr_ids_c = len(tr_ids_dic)\n assert tr_ids_c, \"no transcript IDs read in from \\\"%s\\\"\" %(args.in_tr_list)\n print(\"# transcript IDs read in: %i\" %(tr_ids_c))\n\n # Count input files.\n c_in = cliplib.count_file_rows(args.in_bed)\n assert c_in, \"--in BED file \\\"%s\\\" is empty\" %(args.in_bed)\n print(\"# of --in input sites: %i\" %(c_in))\n\n # Generate .tmp files.\n random_id = uuid.uuid1()\n tmp_bed1 = str(random_id) + \".filtered_input_sites.tmp.bed\"\n random_id = uuid.uuid1()\n tmp_bed2 = str(random_id) + \".exon_regions.tmp.bed\"\n random_id = uuid.uuid1()\n tmp_bed3 = str(random_id) + \".intron_regions.tmp.bed\"\n\n # Filter input sites.\n print(\"Extracting intron regions for given transcript IDs ... \")\n cliplib.bed_process_bed_file(args.in_bed, tmp_bed1,\n score_thr=args.score_thr,\n min_len=args.min_len,\n max_len=args.max_len,\n rev_filter=args.rev_filter)\n # Count filtered sites.\n c_in_filt = cliplib.count_file_rows(tmp_bed1)\n assert c_in_filt, \"no --in sites remaining after filtering\"\n print(\"# of --in sites after filtering: %i\" %(c_in_filt))\n\n # Extract exon+intron regions for given transcripts.\n cliplib.gtf_extract_exon_bed(args.in_gtf, tmp_bed2,\n out_intron_bed=tmp_bed3,\n tr_ids_dic=tr_ids_dic)\n\n # Overlap input sites with introns.\n params = \"-s -u -f %f\" %(args.min_intron_ovlp)\n cliplib.intersect_bed_files(tmp_bed1, tmp_bed3, params, args.out_bed)\n # Count overlapping sites.\n c_ovlp = cliplib.count_file_rows(args.out_bed)\n assert c_ovlp, \"no --in sites overlapping with given intron regions\"\n\n # Litter the street.\n clean_up = True\n if clean_up:\n # Remove tmp files.\n if os.path.exists(tmp_bed1):\n os.remove(tmp_bed1)\n if os.path.exists(tmp_bed2):\n os.remove(tmp_bed2)\n if os.path.exists(tmp_bed3):\n os.remove(tmp_bed3)\n\n # Report.\n print(\"# of --in sites overlappin with introns: %i\" %(c_ovlp))\n print(\"Overlapping sites written to:\\n%s\\n\" %(args.out_bed))", "def main() -> int:\n parser = argparse.ArgumentParser(\n prog=\"Deep Raga\",\n description=\"A program \"\n + \"that trains a neural network on MIDI \"\n + \"audio files, and uses that trained \"\n + \"network to optionally generate new \"\n + \"music that tries to emulate the style\"\n + \" of the training data.\",\n )\n args = parser.parse_args()\n return 0", "def main():\n\timport docopt\n\targs = docopt.docopt(main.__doc__)\n\tv = Ventilator(args[\"--port\"], int(args[\"--speed\"]))\n\tif args[\"--debug\"]:\n\t\tlogging.basicConfig(level=logging.DEBUG)\n\telse:\n\t\tlogging.basicConfig(level=logging.INFO)\n\n\tt = v.kernel(args[\"SOURCE\"], address=int(args[\"--address\"], 16),\n\t\truns=int(args[\"--runs\"]), repeats=int(args[\"--repeats\"]))\n\tasyncio.get_event_loop().run_until_complete(t)", "def main():\n\n BASIC.run(PROGRAM)", "def main():\n ex = Experiment(SEED)\n ex.main()", "def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"--identifier\", required=True,\n help=\"A short name/identifier for your experiment, e.g. 'ex42b'.\")\n args = parser.parse_args()\n\n train(args)", "def main():\r\n LEDStrip = createNeoPixelObject()\r\n setup(LEDStrip)\r\n clock(LEDStrip)", "def main():\n return 0", "def main():\n return 0", "def main():\n return", "def main():\n pass", "def run():\n main()", "def run():\r\n \r\n match = a4_acc.Game() # Instantiate a Game object \r\n setup(match)\r\n\r\n if constants.SHOW_GRAPHICS:\r\n axes= startGraphics(match.board) #step 0\r\n \r\n \r\n for k in range(constants.STEPS):\r\n update(match)\r\n updateGraphics(board, k, caxes)\r\n \r\n ########\r\n # TO DO: \r\n # Simulate game given the intial state for constants.STEPS iterations\r\n \r\n # Example code to call the updateGraphics function; the second argument\r\n # needs to be replaced:\r\n # if constants.SHOW_GRAPHICS:\r\n # updateGraphics(match.board, None, axes) \r\n \r\n # Do not change or add code below here for function run\r\n endNow= raw_input('Press ENTER to continue.')", "def main():\n even_game()", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():", "def main():" ]
[ "0.65374327", "0.6535227", "0.635439", "0.6309623", "0.6243766", "0.62209076", "0.6213963", "0.61493194", "0.61216235", "0.6104337", "0.60718757", "0.6022445", "0.6022445", "0.59985065", "0.5974082", "0.59726065", "0.5948179", "0.593142", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686", "0.59125686" ]
0.72980213
0
Generates human readable string for a number.
def MakeHumanReadable(num): i = 0 while i+1 < len(EXP_STRINGS) and num >= (2 ** EXP_STRINGS[i+1][0]): i += 1 rounded_val = round(float(num) / 2 ** EXP_STRINGS[i][0], 2) return '%s %s' % (rounded_val, EXP_STRINGS[i][1])
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _num2str(self, num):\n q, mod = divmod(num, 10)\n suffix = \"th\" if q == 1 else self.SUFFIX_DICT[mod]\n return f\"{num}{suffix}\"", "def __str__(self):\n if self.is_int():\n return str(self._num)\n else:\n return \"{0:d} / {1:d}\".format(self._num, self._den)", "def friendly_number(num):\n # Convert to a (shorter) string for human consumption\n string = \"\"\n # The length of the string can be determined by STRING_LENGTH or by how many\n # characters are necessary to present a base 30 representation of SIZE.\n while STRING_LENGTH and len(string) <= STRING_LENGTH \\\n or len(VALID_CHARS)**len(string) <= SIZE:\n # PREpend string (to remove all obvious signs of order)\n string = VALID_CHARS[num%len(VALID_CHARS)] + string\n num = num/len(VALID_CHARS)\n return string", "def base2str(self, int_number):\r\n return self.format_base % (float(int_number) / self.mult_base)", "def _int2str(num):\n if num<10:\n return '00%s'%str(num)\n elif 10<=num<100:\n return '0%s'%str(num)\n else:\n return '%s'%str(num)", "def spell_number(num):\n tens, units = num / 10, num % 10\n tens_str = NUMBERS_10[tens]\n units_str = NUMBERS_1[units]\n if tens == 1:\n return NUMBERS_TEEN[units]\n elif tens:\n if units:\n return \"{t} {u}\".format(t=tens_str, u=units_str)\n return \"{t}\".format(t=tens_str)\n else:\n return units_str", "def num_repr(num):\n if num <= 9999:\n return str(num)\n\n def digit_count(x):\n \"\"\" Return number of digits. \"\"\"\n return int(math.floor(math.log10(x)) + 1)\n\n digits = digit_count(num)\n sig = 3 if digits % 3 == 0 else 2\n rounded = int(round(num, int(sig - digits)))\n digits = digit_count(rounded)\n suffix = \"_kmBTqXYX\"[(digits - 1) // 3]\n front = 3 if digits % 3 == 0 else digits % 3\n\n if not front == 1:\n return str(rounded)[0:front] + suffix\n\n return str(rounded)[0] + \".\" + str(rounded)[1] + suffix", "def _get_random_number_code(self):\r\n return \"str(random.randint(0, 1e9))\"", "def friendly_number(number, base=1000, decimals=0, suffix='',\n\t\t\t\t\tpowers=['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y']):\n\tfrom math import log, pow\n\textend = pow(10, decimals)\n\tpower_index = int(log(number * extend, base))\n\tpower = powers[power_index]\n\tif decimals:\n\t\tcut_off_length = base * power_index - decimals\n\t\tstr_num = str(number)[:-cut_off_length]\n\t\tif number[-cut_off_length] >= '5':\n\t\t\tstr_num = str(int(str_num)+1)\n\n\treal = number / power\n\treturn str(number)", "def to_str(n: float) -> str:\n return str(n)", "def numberFormat(self,num,isImag=False):\n string=str(num)\n if num!=0:\n if num>0:\n string=\"+\" if num==1 else \"+\"+string\n else:\n string=\"-\" if num==-1 else string\n return string+\"i\" if isImag else string\n return \"\"", "def num(number: int) -> str:\n numbers = {1: \"one\", 2: \"two\", 3: \"three\", 4: \"four\", 5: \"five\",\n 6: \"six\", 7: \"seven\", 8: \"eight\", 9: \"nine\", 10: \"ten\"}\n if number in numbers:\n return numbers[number]\n else:\n return f\"{number:,}\"", "def serialize_number(n):\n return str(n)", "def compact_number(value: int) -> str:\n value = float('{:.3g}'.format(value))\n magnitude = 0\n while abs(value) >= 1000:\n magnitude += 1\n value /= 1000.0\n return '{}{}'.format(\n '{:f}'.format(value).rstrip('0').rstrip('.'), ['', 'K', 'M', 'B', 'T'][magnitude]\n )", "def numToStrLabel(self, value):\n zero_count = 3 - len(str(value))\n return zero_count * \"0\" + str(value)", "def transforme(n):\n if n<10 :\n return '0'+str(n)\n else :\n return str(n)", "def num2str(num):\n require_type(is_number(num), 'parameter of number->string must be a number')\n return tostr(num)", "def _number(self, number: float, decimal: int = 0) -> str:\n\n auto_write = self._auto_write\n self._auto_write = False\n stnum = str(number)\n dot = stnum.find(\".\")\n\n if (len(stnum) > self._chars + 1) or ((len(stnum) > self._chars) and (dot < 0)):\n self._auto_write = auto_write\n raise ValueError(\n \"Input overflow - {0} is too large for the display!\".format(number)\n )\n\n if dot < 0:\n # No decimal point (Integer)\n places = len(stnum)\n else:\n places = len(stnum[:dot])\n\n if places <= 0 < decimal:\n self.fill(False)\n places = self._chars\n\n if \".\" in stnum:\n places += 1\n\n # Set decimal places, if number of decimal places is specified (decimal > 0)\n txt = stnum\n if places > 0 < decimal < len(stnum[places:]) and dot > 0:\n txt = stnum[: dot + decimal + 1]\n elif places > 0:\n txt = stnum[:places]\n\n if len(txt) > self._chars + 1:\n self._auto_write = auto_write\n raise ValueError(\"Output string ('{0}') is too long!\".format(txt))\n\n self._text(txt)\n self._auto_write = auto_write\n\n return txt", "def number_as_string(x):\n \n numnames = {1 : \"one\", 2 : \"two\", 3 : \"three\", 4 : \"four\", 5 : \"five\", 6 : \"six\", 7 : \"seven\", 8 : \"eight\", 9 : \"nine\",\n 10 : \"ten\", 11 : \"eleven\", 12 : \"twelve\", 13 : \"thirteen\", 14 : \"fourteen\", 15 : \"fifteen\", 16 : \"sixteen\",\n 17 : \"seventeen\", 18 : \"eighteen\", 19 : \"nineteen\", 20 : \"twenty\", 30 : \"thirty\", 40 : \"forty\", 50 : \"fifty\", \n 60 : \"sixty\", 70 : \"seventy\", 80 : \"eighty\", 90 : \"ninety\"}\n \n numparts = []\n needAnd = (x > 100) and (x % 100)\n if x >= 1000:\n numparts.append(numnames[x/1000])\n numparts.append(\"thousand\")\n x %= 1000\n \n if x >= 100:\n numparts.append(numnames[x/100])\n numparts.append(\"hundred\")\n x %= 100\n \n if needAnd:\n numparts.append(\"and\")\n \n if 11 <= x <= 19:\n numparts.append(numnames[x])\n else:\n if x >= 10:\n numparts.append(numnames[(x/10)*10])\n x %= 10\n\n if x > 0:\n numparts.append(numnames[x])\n \n return \" \".join(numparts)", "def human_format(num, signed=False):\n if num <= 1005:\n return num\n\n num = float('{:.3g}'.format(num))\n magnitude = 0\n while abs(num) >= 1000:\n magnitude += 1\n num /= 1000.0\n my_num = '{} {}'.format('{:f}'.format(num).rstrip('0').rstrip('.').replace('.',','), ['', 'mila', 'Mln.', 'G', 'T'][magnitude])\n if signed and num > 0:\n my_num = '+' + my_num\n return my_num", "def number(i):\r\n return ''.join(str(_random.randrange(0, 10)) for x in xrange(i))", "def get_str_from_expnotation(num):\n return '{0:.15f}'.format(num)", "def intRender(self, number):\n\n data = unicode(number)\n bites = list()\n\n while data:\n bites.append(data[-3:])\n data = data[:-3]\n\n return \" \".join(reversed(bites))", "def ten(number: int) -> str:\n\n string_form = str(number)\n return string_form if number >= 0 else \"0\" + string_form", "def english(number):\r\n if number == 0:\r\n return 'zero'\r\n word = ''\r\n for step in itertools.count():\r\n number, rest = divmod(number, 1000)\r\n word = format_num(en3(rest), step) + word\r\n if number == 0:\r\n return word.strip()", "def number_route(n):\n return \"{:d} is a number\".format(n)", "def num_generator(num):\n\n num = str(num)\n if len(num) == 1:\n return '0'+num\n elif len(num) == 2:\n return num\n else:\n print('There was a problem with the number generator')", "def quote2str(self, int_number):\r\n return self.format_quote % (float(int_number) / self.mult_quote)", "def textualize(num):\n if isinstance(num, float):\n num = int(num)\n # special case\n if num == 0:\n return 'zero'\n\n # if the number is negative, we put the word\n # 'negative' in front of it.\n is_negative = False\n if num < 0:\n is_negative = True\n num = -1 * num\n\n num = str(num)\n # pad with zeroes\n while len(num) % 3 != 0:\n num = ''.join([ '0', num ])\n\n # as groups are textualized, their strings will be\n # appended to this list\n num_string = []\n group_counter = 0\n while len(num) > 0:\n group = num[-3:]\n num = num[:-3]\n text = _textualize_group(group)\n\n # thousand, million, etc.\n if group_counter > 0 and text:\n group_name = group_names[group_counter]\n text = ' '.join([ text, group_name ])\n\n if text:\n num_string.insert(0, text)\n\n group_counter += 1\n\n if is_negative:\n num_string.insert(0, 'negative')\n\n return ' '.join(num_string)", "def formatted_number(number):\n try:\n number = int(number)\n if number < 0:\n return '-' + formatted_number(-number)\n result = ''\n while number >= 1000:\n number, number2 = divmod(number, 1000)\n result = \",%03d%s\" % (number2, result)\n return \"%d%s\" % (number, result)\n except Exception:\n return \"\"" ]
[ "0.7651889", "0.7019703", "0.6973662", "0.696883", "0.69481635", "0.69195724", "0.69157094", "0.6817007", "0.66960204", "0.6691108", "0.668076", "0.66506594", "0.6627283", "0.6615014", "0.66030735", "0.6598126", "0.6549139", "0.65482795", "0.6547588", "0.65153563", "0.650068", "0.6478427", "0.64768195", "0.6473324", "0.64653814", "0.6464866", "0.6447226", "0.6430678", "0.63946146", "0.6379095" ]
0.7083336
1
Constructs a StorageUri string for the given iterated_uri and object.
def UriStrFor(iterated_uri, obj): return '%s://%s/%s' % (iterated_uri.scheme, obj.bucket.name, obj.name)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def StorageUri(self, uri_str, debug=0, validate=True):\n return boto.storage_uri(\n uri_str, 'file', debug=debug, validate=validate,\n bucket_storage_uri_class=self.bucket_storage_uri_class)", "def create_uri(uri):\n return URIRef(uri)", "def uri_string(self):\n if isinstance(self.entity, int):\n uri_string = \"{{{0}}}\".format(self.entity)\n elif isinstance(self.entity, NodePointer):\n uri_string = \"{{{0}}}\".format(self.entity.address)\n else:\n try:\n uri_string = self.entity.ref\n except AttributeError:\n uri_string = ustr(self.entity)\n if self.segments:\n if not uri_string.endswith(\"/\"):\n uri_string += \"/\"\n uri_string += \"/\".join(map(percent_encode, self.segments))\n return uri_string", "def _append_object(base_path, obj)->str:\n base_path = _trim_path(base_path)\n return f\"{base_path}/{obj}\"", "def construct_sas_url(blob, uri):\n newuri = copy.copy(uri)\n newuri.pathname = '{}/{}'.format(uri.path, quote(blob.name.encode('utf-8')))\n return newuri.geturl()", "def ConstructDstUri(self, src_uri, exp_src_uri, base_dst_uri):\n if base_dst_uri.names_container():\n # To match naming semantics of UNIX 'cp' command, copying files\n # to buckets/dirs should result in objects/files named by just the\n # final filename component; while copying directories should result\n # in objects/files mirroring the directory hierarchy. Example of the\n # first case:\n # gsutil cp dir1/file1 gs://bucket\n # should create object gs://bucket/file1\n # Example of the second case:\n # gsutil cp dir1/dir2 gs://bucket\n # should create object gs://bucket/dir2/file2 (assuming dir1/dir2\n # contains file2).\n if src_uri.names_container():\n dst_path_start = (src_uri.object_name.rstrip(os.sep)\n .rpartition(os.sep)[-1])\n start_pos = exp_src_uri.object_name.find(dst_path_start)\n dst_key_name = exp_src_uri.object_name[start_pos:]\n else:\n # src is a file or object, so use final component of src name.\n dst_key_name = os.path.basename(exp_src_uri.object_name)\n if base_dst_uri.is_file_uri():\n # dst names a directory, so append src obj name to dst obj name.\n dst_key_name = '%s%s%s' % (base_dst_uri.object_name, os.sep,\n dst_key_name)\n self.CheckForDirFileConflict(exp_src_uri, dst_key_name)\n else:\n # dest is an object or file: use dst obj name\n dst_key_name = base_dst_uri.object_name\n return base_dst_uri.clone_replace_name(dst_key_name)", "def uri_for_service(self, region, service_id, base_uri):\n return str(URLPath.fromString(base_uri)\n .child(\"service\").child(region).child(service_id).child(\"\"))", "def generate_uri(uri):\n return uri[:-5] + uuid.uuid4().hex", "def __str__(self):\n if self._str is None:\n # special cases\n if self == URI.INVALID():\n self._str = \"[invalid]\"\n elif self == URI.EMPTY():\n self._str = \"\"\n elif self == URI.INLINE():\n self._str = \"[inline]\"\n elif self == URI.EVAL():\n self._str = \"[eval]\"\n elif not self._isEmpty(self._scheme) and self._isEmpty(self._host) and self._isEmpty(self._port) and self._isEmpty(self._path) and self._isEmpty(self._query):\n self._str = self._scheme + \":\"\n else:\n self._str = \"\"\n if self._scheme in defaults.schemesWithNoDoubleSlash:\n self._str += self._scheme + \":\"\n elif self._scheme is not None:\n self._str += self._scheme + \"://\"\n \n self._str += self._host\n \n if self._port is not None:\n self._str += \":\" + str(self._port)\n \n if self._path is not None:\n self._str += urllib.quote(self._path.encode('utf8')).decode('ascii')\n \n if self._query is not None:\n self._str += \"?\" + self._query\n return self._str", "def prepare_resource_uri(self, object):\n return '/api/v1/actor/{0}/'.format(object.id)", "def to_uri(bucket: str, key: str) -> str:\n return f's3://{bucket}/{key}'", "def __init__(self, object_name, debug, is_stream=False):\r\n\r\n self.scheme = 'file'\r\n self.bucket_name = ''\r\n self.object_name = object_name\r\n self.uri = 'file://' + object_name\r\n self.debug = debug\r\n self.stream = is_stream", "def __init__(self, scheme, bucket_name=None, object_name=None,\r\n debug=0, connection_args=None):\r\n\r\n self.scheme = scheme\r\n self.bucket_name = bucket_name\r\n self.object_name = object_name\r\n if connection_args:\r\n self.connection_args = connection_args\r\n if self.bucket_name and self.object_name:\r\n self.uri = ('%s://%s/%s' % (self.scheme, self.bucket_name,\r\n self.object_name))\r\n elif self.bucket_name:\r\n self.uri = ('%s://%s/' % (self.scheme, self.bucket_name))\r\n else:\r\n self.uri = ('%s://' % self.scheme)\r\n self.debug = debug", "def _uri_realm_creator(self, endpoint=\"json\", realm=None, uri=None, arguments=None):\n if realm is not None:\n uri = endpoint + '/' + realm + '/' + uri\n else:\n uri = endpoint + '/' + uri\n\n if arguments is not None:\n uri += arguments\n\n return uri", "def __init__(self):\r\n raise BotoClientError('Attempt to instantiate abstract StorageUri '\r\n 'class')", "def build_uri(secret, name, initial_count=None, issuer_name=None,\n algorithm=None, digits=None, period=None):\n # initial_count may be 0 as a valid param\n is_initial_count_present = (initial_count is not None)\n\n # Handling values different from defaults\n is_algorithm_set = (algorithm is not None and algorithm != 'sha1')\n is_digits_set = (digits is not None and digits != 6)\n is_period_set = (period is not None and period != 30)\n\n otp_type = 'hotp' if is_initial_count_present else 'totp'\n base_uri = 'otpauth://{0}/{1}?{2}'\n\n url_args = {'secret': secret}\n\n label = quote(name)\n if issuer_name is not None:\n label = quote(issuer_name) + ':' + label\n url_args['issuer'] = issuer_name\n\n if is_initial_count_present:\n url_args['counter'] = initial_count\n if is_algorithm_set:\n url_args['algorithm'] = algorithm.upper()\n if is_digits_set:\n url_args['digits'] = digits\n if is_period_set:\n url_args['period'] = period\n\n uri = base_uri.format(otp_type, label, urlencode(url_args).replace(\"+\", \"%20\"))\n return uri", "def _get_blob_path(self, prefix: str, oid: str) -> str:\n if not self.path_prefix:\n storage_prefix = ''\n elif self.path_prefix[0] == '/':\n storage_prefix = self.path_prefix[1:]\n else:\n storage_prefix = self.path_prefix\n return posixpath.join(storage_prefix, prefix, oid)", "def object_url(self, object_t, object_id=None, relation=None):\n if object_t not in self.objects_types:\n raise TypeError(f\"{object_t} is not a valid type\")\n request_items = (\n str(item) for item in [object_t, object_id, relation] if item is not None\n )\n request_path = \"/\".join(request_items)\n return self.url(request_path)", "def uri(self):\n parts = []\n # if I have a scheme\n if self.scheme: parts.append('{}:'.format(self.scheme))\n # if I have an authority\n if self.authority: parts.append('//{}'.format(self.authority))\n # if I have an address\n if self.address: parts.append('{}'.format(self.address))\n # if I have a query\n if self.query: parts.append('?{}'.format(self.query))\n # if I have a fragment\n if self.fragment: parts.append('#{}'.format(self.fragment))\n # assemble and return\n return ''.join(parts)", "def create_uri(self, chunk, document = False):\n # first we create the the base uri\n if self.options.get(\"prefix\"):\n prefix = self.options.get(\"prefix\")\n else:\n prefix = gethostname() + \"#\"\n\n word = chunk.split(\"/\")[0] if not document else chunk\n\n ## calculate the index of the current chunk\n # find all indices in the text\n indices = [m.start() for m in re.finditer(re.escape(word), self.text)]\n\n # indices could be None because of - I think - a bug in MontyLingua\n # which tags me/you as me/PRP :/: you/PRP and so the colon can't be\n # found in the orginal text. Because the slash is the only known\n # case of this bug, we simply replace the colon\n if not indices:\n indices = [m.start() for m in re.finditer(\"/\", self.text)]\n\n if len(indices) > 1:\n try:\n # get current position\n index = indices[self.positions[word]]\n except KeyError:\n # the word is not saved yet\n index = indices[0]\n self.positions[word] = 0\n # increase current position\n self.positions[word] += 1\n else:\n index = indices[0]\n \n # now create the unique identifier\n if self.options.get(\"urirecipe\") == \"offset\":\n uri = \"offset_\"\n uri += str(index) + \"_\"\n uri += str(index + len(word)) + \"_\"\n elif self.options.get(\"urirecipe\") == \"context-hash\":\n con_len = self.options.get(\"context-length\")\n uri = \"hash_\"\n uri += str(con_len) + \"_\"\n uri += str(len(word)) + \"_\"\n context = self.text[max(0,index - con_len):index]\n context += \"(\" + word + \")\"\n context += self.text[index+len(word):min(len(self.text),index+len(word) + con_len)]\n uri += hashlib.md5(context).hexdigest() + \"_\"\n uri += word[:20]\n\n return prefix + urllib.quote(uri)", "def _build_uri(self, **kwargs):\n target_uri, version = str(), None\n\n if kwargs.get('category') not in ['performance', 'common']:\n version = self._build_uri_get_version(kwargs.get('version'),\n kwargs.get('no_version'))\n if version:\n target_uri += '/{version}'.format(version=version)\n\n target_uri += '/{category}'.format(\n category=kwargs.get('category'))\n\n if kwargs.get('resource_level'):\n target_uri += '/{resource_level}'.format(\n resource_level=kwargs.get('resource_level'))\n\n if kwargs.get('resource_level_id'):\n target_uri += '/{resource_level_id}'.format(\n resource_level_id=kwargs.get('resource_level_id'))\n\n if kwargs.get('resource_type'):\n target_uri += '/{resource_type}'.format(\n resource_type=kwargs.get('resource_type'))\n if kwargs.get('resource_type_id'):\n target_uri += '/{resource_type_id}'.format(\n resource_type_id=kwargs.get('resource_type_id'))\n\n if kwargs.get('resource'):\n target_uri += '/{resource}'.format(\n resource=kwargs.get('resource'))\n if kwargs.get('resource_id'):\n target_uri += '/{resource_id}'.format(\n resource_id=kwargs.get('resource_id'))\n\n if kwargs.get('object_type'):\n target_uri += '/{object_type}'.format(\n object_type=kwargs.get('object_type'))\n if kwargs.get('object_type_id'):\n target_uri += '/{object_type_id}'.format(\n object_type_id=kwargs.get('object_type_id'))\n\n return target_uri", "def generate_presigned_GET_url(\n self,\n bucket: str,\n object_name: str,\n **kwargs) -> str:\n # TODO: things like http ranges need to be explicit parameters.\n # users of this API should not need to know the argument names presented\n # to the cloud API.\n raise NotImplementedError()", "def path_for(objectid):", "def _object_path(self, name: str) -> str:\n prefix = name[:2]\n suffix = name[2:]\n return posixpath.join(self._path, \"objects\", prefix, suffix)", "def build(uri):\n return uri if type(uri) is TaxonomyURI else TaxonomyURI.from_str(uri)", "def _make_url(self, path):\n if not self.base_location:\n raise ValueError(\"No base_location set. Cannot construct url.\")\n\n if path:\n path = self._normalise_last_slashes(path)\n path = self._normalise_head_slashes(path)\n\n return \"\".join((self.base_location, self.endpoint, path))", "def generate_storage_url(filename, request=None, *args):\n\n path = generate_object_storage_name(os.path.splitext(filename)[0], filename)\n\n # There are three scenarios where Studio might be run as:\n #\n # 1. In normal kubernetes, nginx will proxy for us. We'll know we're in kubernetes when the\n # environment variable RUN_MODE=k8s\n #\n # 2. In Docker Compose and bare metal runserver, we'll be running in runserver, and minio\n # will be exposed in port 9000 in the host's localhost network.\n\n # Note (aron): returning the true storage URL (e.g. https://storage.googleapis.com/storage/a.mp4)\n # isn't too important, because we have CDN in front of our servers, so it should be cached.\n # But change the logic here in case there is a potential for bandwidth and latency improvement.\n\n # Detect our current state first\n run_mode = os.getenv(\"RUN_MODE\")\n\n # if we're running inside k8s, then just serve the normal /content/{storage,databases} URL,\n # and let nginx handle proper proxying.\n if run_mode == \"k8s\":\n url = \"/content/{path}\".format(\n path=path,\n )\n\n # if we're in docker-compose or in baremetal, just return the object storage URL as localhost:9000\n elif run_mode == \"docker-compose\" or run_mode is None:\n # generate the minio storage URL, so we can get the GET parameters that give everyone\n # access even if they don't need to log in\n params = urllib.parse.urlparse(default_storage.url(path)).query\n host = \"localhost\"\n port = 9000 # hardcoded to the default minio IP address\n url = \"http://{host}:{port}/{bucket}/{path}?{params}\".format(\n host=host,\n port=port,\n bucket=settings.AWS_S3_BUCKET_NAME,\n path=path,\n params=params,\n )\n\n return url", "def get_uri(self):\n if self._uri is None:\n self._uri = \"{0}{1}/{2}\".format(\n self.session.resource_prefix,\n self.base_uri,\n self.ip_or_ifname_or_group_name,\n )\n\n return self._uri", "def __repr__(self) -> str:\n return (f'dicomweb_client.URI(base_url={self.base_url!r}, '\n f'study_instance_uid={self.study_instance_uid!r}, '\n f'series_instance_uid={self.series_instance_uid!r}, '\n f'sop_instance_uid={self.sop_instance_uid!r}, '\n f'frames={self.frames!r}, suffix={self.suffix!r})')", "def href(obj):\n if isinstance(obj, Filing):\n return reverse('filing', args=(obj.region, obj.name, obj.period_name))\n else:\n raise ValueError('cannot build a URL for {}.{} objects'.format(\n type(obj).__module__, type(obj).__name__))" ]
[ "0.60313696", "0.58099467", "0.57456857", "0.55637884", "0.55138993", "0.54192", "0.53465", "0.53278655", "0.53194463", "0.5285201", "0.52395695", "0.5174997", "0.51727", "0.51406395", "0.51248956", "0.5074739", "0.50008357", "0.49282068", "0.48954654", "0.48884666", "0.4888155", "0.48549575", "0.4814644", "0.4813384", "0.48094732", "0.48076263", "0.47912684", "0.4746778", "0.47445935", "0.47135764" ]
0.80973625
0
Helper to instantiate gslib.WildcardIterator, passing self.bucket_storage_uri_class to support mocking/testing. Args are same as gslib.WildcardIterator interface, but without the bucket_storage_uri_class param (which is instead filled in from Command class state).
def CmdWildcardIterator(self, uri_or_str, result_type=ResultType.URIS, headers=None, debug=0): return wildcard_iterator.wildcard_iterator( uri_or_str, result_type=result_type, headers=headers, debug=debug, bucket_storage_uri_class=self.bucket_storage_uri_class)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __init__(self,args):\n storage_client = storage.Client()\n self.parsed = urlparse(args.input_dir)\n \n #parse gcp path\n self.bucket = storage_client.get_bucket(self.parsed.hostname) \n images=self.bucket.list_blobs(prefix=self.parsed.path[1:])\n \n #image list\n self.image_list=[]\n for image in images:\n self.image_list.append(\"gs://\" + self.bucket.name +\"/\"+ str(image.name))\n \n #if no ceiling, process all arguments\n if not args.limit:\n limit=images.num_results\n else:\n limit=args.limit", "def __init__(self, bucket):\n self.bucket = bucket", "def set_bucket_class(self, bucket_class):\r\n self.bucket_class = bucket_class", "def _get_container_iterator(\n self, cloud_url, recursion_level):\n # End URL with '/*', so WildcardIterator won't filter out its contents.\n new_url_string = cloud_url.versionless_url_string\n if cloud_url.versionless_url_string[-1] != cloud_url.delimiter:\n new_url_string += cloud_url.delimiter\n new_cloud_url = storage_url.storage_url_from_string(new_url_string + '*')\n\n fields_scope = _translate_display_detail_to_fields_scope(\n self._display_detail, is_bucket_listing=False)\n iterator = wildcard_iterator.CloudWildcardIterator(\n new_cloud_url,\n all_versions=self._all_versions,\n error_on_missing_key=False,\n fields_scope=fields_scope)\n return self._recursion_helper(iterator, recursion_level)", "def iterator(self, **kwargs: Any) -> \"AbstractQueueIterator\":\n\n return QueueIterator(self, **kwargs)", "def __init__(self):\n self.bucket = 1000\n self.bucketItem = 1000\n \n self.hashset = [None] * self.bucket", "def __init__(self):\n self.bucket_length = 997\n self.bucket_array = [Bucket() for i in range(self.bucket_length)]", "def blob_generator(bucket_name, pattern):\n cloud_bucket = get_gcsbucket(bucket_name)\n for blob in cloud_bucket.objects():\n if blob.key.endswith(pattern):\n yield blob.uri", "def __init__(self, iterator):\n self.iterator = []\n while iterator.hasNext():\n self.iterator.append(iterator.next())", "def ExpandWildcardsAndContainers(self, uri_strs, sub_opts=None, headers=None,\n debug=0):\n # The algorithm we use is:\n # 1. Build a first level expanded list from uri_strs consisting of all\n # URIs that aren't file wildcards, plus expansions of the file wildcards.\n # 2. Build dict from above expanded list.\n # We do so that we can properly handle the following example:\n # gsutil cp file0 dir0 gs://bucket\n # where dir0 contains file1 and dir1/file2.\n # If we didn't do the first expansion, this cp command would end up\n # with this expansion:\n # {file://file0:[file://file0],file://dir0:[file://dir0/file1,\n # file://dir0/dir1/file2]}\n # instead of the (correct) expansion:\n # {file://file0:[file://file0],file://dir0/file1:[file://dir0/file1],\n # file://dir0/dir1:[file://dir0/dir1/file2]}\n # The latter expansion is needed so that in the \"Copying...\" loop of\n # CopyObjsCommand we know that dir0 was being copied, so we create an\n # object called gs://bucket/dir0/dir1/file2. (Otherwise it would look\n # like a single file was being copied, so we'd create an object called\n # gs://bucket/file2.)\n\n should_recurse = False\n if sub_opts:\n for o, unused_a in sub_opts:\n if o == '-r' or o == '-R':\n should_recurse = True\n\n # Step 1.\n uris_to_expand = []\n for uri_str in uri_strs:\n uri = self.StorageUri(uri_str, debug=debug, validate=False)\n if uri.is_file_uri() and ContainsWildcard(uri_str):\n uris_to_expand.extend(list(\n self.CmdWildcardIterator(uri, headers=headers, debug=debug)))\n else:\n uris_to_expand.append(uri)\n\n # Step 2.\n result = {}\n for uri in uris_to_expand:\n if uri.names_container():\n if not should_recurse:\n if uri.is_file_uri():\n desc = 'directory'\n else:\n desc = 'bucket'\n print 'Omitting %s \"%s\".' % (desc, uri.uri)\n result[uri] = []\n continue\n if uri.is_file_uri():\n # dir -> convert to implicit recursive wildcard.\n uri_to_iter = '%s/**' % uri.uri\n else:\n # bucket -> convert to implicit wildcard.\n uri_to_iter = uri.clone_replace_name('*')\n else:\n uri_to_iter = uri\n result[uri] = list(self.CmdWildcardIterator(\n uri_to_iter, headers=headers, debug=debug))\n return result", "def __init__(self, iterator: BucketIterator, pad_index: int, sos_index: int, eos_index: int) -> None:\n self.iterator = iterator\n self.pad_index = pad_index\n self.sos_index = sos_index\n self.eos_index = eos_index", "def __init__(self):\r\n raise BotoClientError('Attempt to instantiate abstract StorageUri '\r\n 'class')", "def __new__(cls, *rate_limit_groups):\n self = object.__new__(cls)\n self.rate_limit_groups = rate_limit_groups\n return self", "def __init__(self, project_id, bucket_name):\n self.project_id = project_id\n self.bucket_name = bucket_name\n self.client = storage.Client(project=project_id)\n self.bucket = self.client.get_bucket(bucket_name)", "def list(\n self,\n bucket: str,\n prefix: str=None,\n delimiter: str=None,\n ) -> typing.Iterator[str]:\n raise NotImplementedError()", "def __init__(self, quota_driver_class=None):\n\n self._resources = {}\n self._driver = DbQuotaDriver()", "def __iter__(self, *item_types):\n return self.storage().__iter__(*item_types)", "def iter_cls(*classes, blacklist=tuple()):\n for bases in permutations(classes):\n if bases not in blacklist:\n yield type('_'.join(c.__name__ for c in bases), bases, {})", "def __init__(self, storage, datasets, batch_size, max_batches,\n with_weights=False):\n super(RandomStorageIterator, self).__init__(storage, datasets,\n batch_size, with_weights)\n self._max_batches = max_batches\n self._sample = self.__random_sample()", "def __init__(self):\n self.size = 1000\n self.bucket = [None] * self.size", "def __init__(self, iterator):\n self._iter = iterator", "def __init__(\n self, *,\n requests: int,\n time_unit: int,\n cooldown: int,\n count_failed_requests: bool = True,\n bypass: _BYPASS_TYPE = lambda: False,\n ):\n # Instance management\n self.request_id = itertools.count(0)\n self.state: __BucketBase._STATE = {}\n\n # Bucket Params\n self.ROUTE_NAME: typing.Optional[str] = None\n self.ROUTES: list[str] = []\n self.BYPASS = bypass\n\n _limits_type = namedtuple(\"LIMITS\", \"requests, time_unit, cooldown\")\n self.LIMITS: _limits_type = _limits_type(requests, time_unit, cooldown)\n\n self.COUNT_FAILED = count_failed_requests\n\n self._post_init()", "def n1qlQueryEx(self, cls, *args, **kwargs):\n kwargs['itercls'] = cls\n o = super(AsyncBucket, self).n1ql_query(*args, **kwargs)\n if not self.connected:\n self.connect().addCallback(lambda x: o.start())\n else:\n o.start()\n return o", "def match_class_glob(_class, saltclass_path):\n straight, sub_init, sub_straight = get_class_paths(_class, saltclass_path)\n classes = []\n matches = []\n matches.extend(glob.glob(straight))\n matches.extend(glob.glob(sub_straight))\n matches.extend(glob.glob(sub_init))\n if not matches:\n log.warning(\"%s: Class globbing did not yield any results\", _class)\n for match in matches:\n classes.append(get_class_from_file(match, saltclass_path))\n return classes", "def ListCommand(self, args, sub_opts=None, headers=None, debug=0):\n listing_style = ListingStyle.SHORT\n get_bucket_info = False\n if sub_opts:\n for o, unused_a in sub_opts:\n if o == '-b':\n get_bucket_info = True\n if o == '-l':\n listing_style = ListingStyle.LONG\n if o == '-L':\n listing_style = ListingStyle.LONG_LONG\n if not args:\n # default to listing all gs buckets\n args = ['gs://']\n\n total_objs = 0\n total_bytes = 0\n for uri_str in args:\n uri = self.StorageUri(uri_str, debug=debug, validate=False)\n\n if not uri.bucket_name:\n # Provider URI: add bucket wildcard to list buckets.\n for uri in self.CmdWildcardIterator('%s://*' % uri.scheme,\n headers=headers, debug=debug):\n (bucket_objs, bucket_bytes) = self.PrintBucketInfo(uri, listing_style,\n headers=headers,\n debug=debug)\n total_bytes += bucket_bytes\n total_objs += bucket_objs\n\n elif not uri.object_name:\n if get_bucket_info:\n # ls -b request on provider+bucket URI: List info about bucket(s).\n for uri in self.CmdWildcardIterator(uri, headers=headers,\n debug=debug):\n (bucket_objs, bucket_bytes) = self.PrintBucketInfo(uri,\n listing_style,\n headers=headers,\n debug=debug)\n total_bytes += bucket_bytes\n total_objs += bucket_objs\n else:\n # ls request on provider+bucket URI: List objects in the bucket(s).\n for obj in self.CmdWildcardIterator(uri.clone_replace_name('*'),\n ResultType.KEYS,\n headers=headers, debug=debug):\n total_bytes += self.PrintObjectInfo(uri, obj, listing_style,\n headers=headers, debug=debug)\n total_objs += 1\n\n else:\n # Provider+bucket+object URI -> list the object(s).\n for obj in self.CmdWildcardIterator(uri, ResultType.KEYS,\n headers=headers, debug=debug):\n total_bytes += self.PrintObjectInfo(uri, obj, listing_style,\n headers=headers, debug=debug)\n total_objs += 1\n if listing_style != ListingStyle.SHORT:\n print ('TOTAL: %d objects, %d bytes (%s)' %\n (total_objs, total_bytes, MakeHumanReadable(float(total_bytes))))", "def from_class(cls, cls_to_use, iteratable, **kwargs) -> 'ThreadCollection':\n return ThreadCollection([cls_to_use(it, **kwargs) for it in iteratable])", "def search_instance(search):\n while search.bucket_names:\n bucket_name = search.bucket_names.pop(0) #Pops from start of array, use no param for end\n check_s3_bucket(bucket_name=bucket_name, access_key=search.access_key, secret_key=search.secret_key, output_file=search.output_file)\n \n time.sleep(sleep_sec_between_attempts)\n search.progress()\n if search.print_bucket_names:\n print bucket_name", "def __init__(self, *args, **kwargs):\n _gdi_.RegionIterator_swiginit(self,_gdi_.new_RegionIterator(*args, **kwargs))", "def __init__(self, *args):\n _snap.TRowIterator_swiginit(self, _snap.new_TRowIterator(*args))", "def GetScaffolderClasses(cls) -> Iterator[Type[interface.Scaffolder]]:\n for scaffolder_class in cls._scaffolder_classes.values():\n yield scaffolder_class" ]
[ "0.54212415", "0.52524996", "0.5115315", "0.50989044", "0.49958292", "0.49308628", "0.49241865", "0.4795049", "0.47617844", "0.47472653", "0.4741023", "0.47379297", "0.47305182", "0.4728124", "0.47225723", "0.46678272", "0.46606597", "0.4623661", "0.46185184", "0.46099395", "0.45900026", "0.45887643", "0.45875618", "0.4566583", "0.45498466", "0.45440003", "0.45433354", "0.45377046", "0.45276207", "0.45074123" ]
0.64149827
0
Implementation of setacl command.
def SetAclCommand(self, args, unused_sub_opts=None, headers=None, debug=0): acl_arg = args[0] uri_args = args[1:] provider = None first_uri = None # Do a first pass over all matched objects to disallow multi-provider # setacl requests, because there are differences in the ACL models. for uri_str in uri_args: for uri in self.CmdWildcardIterator(uri_str, headers=headers, debug=debug): if not provider: provider = uri.scheme elif uri.scheme != provider: raise CommandException('"setacl" command spanning providers not ' 'allowed.') if not first_uri: first_uri = uri # Get ACL object from connection for the first URI, for interpreting the # ACL. This won't fail because the main startup code insists on 1 arg # for this command. storage_uri = first_uri acl_class = storage_uri.acl_class() canned_acls = storage_uri.canned_acls() # Determine whether acl_arg names a file containing XML ACL text vs. the # string name of a canned ACL. if os.path.isfile(acl_arg): acl_file = open(acl_arg, 'r') acl_txt = acl_file.read() acl_file.close() acl_obj = acl_class() h = handler.XmlHandler(acl_obj, storage_uri.get_bucket()) try: xml.sax.parseString(acl_txt, h) except xml.sax._exceptions.SAXParseException, e: raise CommandException('Requested ACL is invalid: %s at line %s, ' 'column %s' % (e.getMessage(), e.getLineNumber(), e.getColumnNumber())) acl_arg = acl_obj else: # No file exists, so expect a canned ACL string. if acl_arg not in canned_acls: raise CommandException('Invalid canned ACL "%s".' % acl_arg) # Now iterate over URIs and set the ACL on each. for uri_str in uri_args: for uri in self.CmdWildcardIterator(uri_str, headers=headers, debug=debug): print 'Setting ACL on %s...' % uri uri.set_acl(acl_arg, uri.object_name, False, headers)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __acl__():", "def test_set_get_bad_acl(self):\n path_one = \"%s/one\" % (self.tests_path)\n auth_id = \"username_password:user:user\"\n self.shell.onecmd(\"create %s 'hello'\" % (path_one))\n self.shell.onecmd(\"set_acls %s 'world:anyone:r %s'\" % (\n path_one, auth_id))\n expected_output = \"Failed to set ACLs: \"\n expected_output += \"Bad ACL: username_password:user:user. \"\n expected_output += \"Format is scheme:id:perms.\\n\"\n self.assertEqual(expected_output, self.output.getvalue())", "def change_acl(self, acl):\n try:\n bucket_name = app.config['S3_BUCKET_NAME']\n s3_client = app.config['S3']\n\n keys = []\n list_objects = s3_client.list_objects(Bucket=bucket_name,\n Prefix=self.build_s3_base_prefix())\n if list_objects is not None and 'Contents' in list_objects:\n for ob in s3_client \\\n .list_objects(Bucket=bucket_name,\n Prefix=self.build_s3_base_prefix())['Contents']:\n keys.append(ob['Key'])\n\n for key in keys:\n s3_client.put_object_acl(Bucket=bucket_name, Key=key,\n ACL=acl)\n except Exception as e:\n app.logger.error(e)\n return False\n return True", "def config_mgmt_acl(zdcli, **kwargs):\n option = {}\n if kwargs: option.update(kwargs)\n \n logging.info(\"Create acl %s\" % option)\n cmd_block = _define_mgmt_ip_acl_cmd_block(option)\n zdcli.do_cfg(cmd_block)", "def acl_config_set(host_id, acl_field, acl_param, acl_config_fields, acl_config_param, user_name):\n global sqlalche_obj\n global html\n sqlalche_obj.sql_alchemy_db_connection_open()\n result = ''\n dictarr = []\n form_name = []\n err1 = []\n param = []\n count = 0\n resultarray = {}\n check_result = ''\n err_acl = 0\n oidname = oid_name['RU.RA.1.RAACLConfig.#.macAddress']\n oidtype = oid_type['RU.RA.1.RAACLConfig.#.macAddress']\n device_param_list = sqlalche_obj.session.query(Hosts.snmp_version_id, Hosts.snmp_write_community, Hosts.ip_address, Hosts.snmp_port, Hosts.config_profile_id, Hosts.snmp_read_community).\\\n filter(Hosts.host_id == host_id).all()\n acl_table = sqlalche_obj.session.query(SetOdu16RAConfTable).filter(\n SetOdu16RAConfTable.config_profile_id == device_param_list[0][4]).all()\n ra_acl_config = sqlalche_obj.session.query(SetOdu16RAAclConfigTable).filter(\n SetOdu16RAAclConfigTable.config_profile_id == device_param_list[0][4]).order_by(SetOdu16RAAclConfigTable.index).all()\n acl_oid = oid_name[acl_field]\n acl_oid_type = oid_type[acl_field]\n result += snmp_set(\n device_param_list[0][0], device_param_list[0][\n 1], device_param_list[0][2],\n device_param_list[0][3], acl_oid, acl_oid_type, acl_param)\n if 'aclMode.1' in result:\n err_acl = 1\n if err_acl == 1:\n acl_table[0].acl_mode = acl_param\n sqlalche_obj.session.commit()\n dic_acl = {}\n dic_acl['name'] = 'ACL Mode'\n dic_acl['value'] = acl_param\n dic_acl['textbox'] = 'RU.RA.1.RAConfTable.aclMode'\n dic_acl['status'] = err_acl\n if len(ra_acl_config) >= len(acl_config_fields):\n count = len(ra_acl_config)\n else:\n count = len(acl_config_fields)\n name_get = oidname.replace('#', '1')\n result += snmp_get(device_param_list[0][0], device_param_list[0][5],\n device_param_list[0][2], device_param_list[0][3], name_get)\n\n check_result = result.find('No Such Instance currently exists at this OID')\n\n if int(check_result) == -1:\n\n for i in range(0, count):\n if i < 10:\n\n err1.append(0)\n form_name.append('Mac Address %s' % (i + 1))\n param.append('macaddress.1.%s' % (i + 1))\n name = oidname.replace('#', str(i + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[i] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[i]\n result += snmp_set(\n device_param_list[0][\n 0], device_param_list[0][1], device_param_list[0][2],\n device_param_list[0][3], name, type, \"%s \" % (oidvalue))\n elif i >= 10:\n\n if len(ra_acl_config) > len(acl_config_fields):\n for j in range(len(ra_acl_config), len(acl_config_fields), -1):\n name = oidname.replace('#', str(j))\n type = oidtype.replace('#', 's')\n result += snmp_set(\n device_param_list[0][\n 0], device_param_list[\n 0][1], device_param_list[0][2],\n device_param_list[0][3], '.1.3.6.1.4.1.26149.2.2.13.5.1.3.1.%s' % (j), 'i', '6')\n for k in range(10, len(acl_config_fields)):\n err1.append(0)\n form_name.append('Mac Address %s' % (k + 1))\n param.append('macaddress.1.%s' % (k + 1))\n name = oidname.replace('#', str(k + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[k] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[k]\n result += snmp_set(device_param_list[0][0], device_param_list[0][1], device_param_list[0][\n 2], device_param_list[0][3], name, type, \"%s \" % (oidvalue))\n break\n elif int(len(ra_acl_config)) == int(len(acl_config_fields)):\n for i in range(10, len(acl_config_fields)):\n err1.append(0)\n form_name.append('Mac Address %s' % (i + 1))\n param.append('macaddress.1.%s' % (i + 1))\n name = oidname.replace('#', str(i + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[i] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[i]\n result += snmp_set(device_param_list[0][0], device_param_list[0][1], device_param_list[0][\n 2], device_param_list[0][3], name, type, \"%s \" % (oidvalue))\n else:\n for k in range(10, len(ra_acl_config)):\n err1.append(0)\n form_name.append('Mac Address %s' % (k + 1))\n param.append('macaddress.1.%s' % (k + 1))\n name = oidname.replace('#', str(k + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[k] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[k]\n result += snmp_set(device_param_list[0][0], device_param_list[0][1], device_param_list[0][\n 2], device_param_list[0][3], name, type, \"%s \" % (oidvalue))\n for k in range(len(ra_acl_config), count):\n err1.append(0)\n form_name.append('Mac Address %s' % (k + 1))\n param.append('macaddress.1.%s' % (k + 1))\n name = oidname.replace('#', str(k + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[k] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[k]\n result += snmp_setmultiple(\n device_param_list[0][0], device_param_list[0][\n 1], device_param_list[\n 0][2], device_param_list[0][3],\n '.1.3.6.1.4.1.26149.2.2.13.5.1.3.1.%s' % (k + 1), 'i', '4', name, type, \"%s \" % (oidvalue))\n\n break\n else:\n for k in range(0, len(acl_config_fields)):\n err1.append(0)\n form_name.append('Mac Address %s' % (k + 1))\n param.append('macaddress.1.%s' % (k + 1))\n name = oidname.replace('#', str(k + 1))\n type = oidtype.replace('#', 's')\n if acl_config_param[k] == \"\":\n oidvalue = \" \"\n else:\n oidvalue = acl_config_param[k]\n result += snmp_setmultiple(\n device_param_list[0][0], device_param_list[\n 0][1], device_param_list[0][2], device_param_list[0][3],\n '.1.3.6.1.4.1.26149.2.2.13.5.1.3.1.%s' % (k + 1), 'i', '4', name, type, \"%s \" % (oidvalue))\n\n err = error_odu16(result, param, err1)\n dictarr.append(dic_acl)\n try:\n el = EventLog()\n if 1 in err1:\n el.log_event(\"Values Updated in UBR ACL Form\", \"%s\" % (user_name))\n for j in range(0, len(acl_config_fields)):\n dict = {}\n dict[\"name\"] = form_name[j]\n dict[\"value\"] = acl_config_param[j]\n dict[\"textbox\"] = acl_config_fields[j]\n dict[\"status\"] = err1[j]\n dictarr.append(dict)\n del_acl_config = sqlalche_obj.session.query(SetOdu16RAAclConfigTable).filter(\n between(SetOdu16RAAclConfigTable.index, 11, int(len(ra_acl_config)))).all()\n if del_acl_config == []:\n val = 1\n else:\n for i in range(0, len(del_acl_config)):\n sqlalche_obj.session.delete(del_acl_config[i])\n sqlalche_obj.session.commit()\n for i in range(0, len(acl_config_fields)):\n if i < 10:\n if err1[i] == 1:\n ra_acl_config[i].mac_address = acl_config_param[i]\n ra_acl_config[i].index = i + 1\n if i >= 10:\n if err1[i] == 1:\n sqlalche_obj.session.add(SetOdu16RAAclConfigTable('%s' % (\n device_param_list[0][4]), '%s' % (acl_config_param[i]), '%s' % (i + 1)))\n sqlalche_obj.session.commit()\n if err != '':\n raise Set_exception\n except Set_exception as e:\n resultarray[\"result\"] = dictarr\n resultarray[\"tableName\"] = 'SetOdu16RAAclConfigTable'\n resultarray['formAction'] = 'Acl_Cancel_Configuration.py'\n sqlalche_obj.sql_alchemy_db_connection_close()\n return str(resultarray)", "def _UpdateAclRule(self, entry):\n\n print 'Update Acl rule: %s' % (entry.GetEditLink().href)\n roleValue = \"http://schemas.google.com/gCal/2005#%s\" % (\"read\")\n entry.role = gdata.acl.data.AclRole(value=roleValue)\n returned_rule = self.cal_client.Update(entry)", "def set_permission(StackId=None, IamUserArn=None, AllowSsh=None, AllowSudo=None, Level=None):\n pass", "def test_control_acl_update(self):\n with factories.single_commit():\n control = factories.ControlFactory()\n person = factories.PersonFactory()\n control.add_person_with_role_name(person, \"Admin\")\n access_control_list = {\n \"Admin\": [\n {\n \"email\": \"[email protected]\",\n \"name\": \"user1\",\n },\n {\n \"email\": \"[email protected]\",\n \"name\": \"user2\",\n },\n ]\n }\n self.setup_people(access_control_list)\n\n response = self.api.put(control, control.id, {\n \"access_control_list\": access_control_list,\n })\n\n self.assert200(response)\n control = all_models.Control.query.get(control.id)\n self.assert_obj_acl(control, access_control_list)", "def test_set_get_acls(self):\n self.shell.onecmd(\"create %s/one 'hello'\" % (self.tests_path))\n self.shell.onecmd(\"set_acls %s/one 'world:anyone:r digest:%s:cdrwa'\" % (\n self.tests_path, self.auth_digest))\n self.shell.onecmd(\"get_acls %s/one\" % (self.tests_path))\n\n if PYTHON3:\n user_id = \"Id(scheme='digest', id='%s')\" % (self.auth_digest)\n else:\n user_id = \"Id(scheme=u'digest', id=u'%s')\" % (self.auth_digest)\n\n user_acl = \"ACL(perms=31, acl_list=['ALL'], id=%s)\" % (user_id)\n expected_output = \"/tests/one: ['WORLD_READ', %s]\\n\" % (user_acl)\n self.assertEqual(expected_output, self.output.getvalue())", "def put_object_acl(ACL=None, AccessControlPolicy=None, Bucket=None, GrantFullControl=None, GrantRead=None, GrantReadACP=None, GrantWrite=None, GrantWriteACP=None, Key=None, RequestPayer=None, VersionId=None):\n pass", "def __acl__(self):\n # type: () -> AccessControlListType\n acl = []\n if self.owner_user_id:\n acl.append((Allow, self.owner_user_id, ALL_PERMISSIONS))\n if self.owner_group_id:\n acl.append((Allow, \"group:%s\" % self.owner_group_id, ALL_PERMISSIONS))\n return acl", "def create_acl(self, context, sg):\n self.security_group_driver.create_acl(context, sg)", "def run_setperms(self, expanded, unexpanded) : \n\t\toptions, args = self.getopt([\"noacquire\"], unexpanded)\n\t\tif (options is None) and (args is None) :\n\t\t\treturn -1\t# message was already displayed in self.getopt()\n\t\tif len(args) == 2 :\n\t\t\t# no permission name, we want all of them\n\t\t\targs.append(\"*\")\n\t\tif len(args) < 3 :\n\t\t\treturn self.errormessage(\"Needs an object id, a comma separated list of roles and a list of permissions\")\n\t\tobject = self.toObject(self.__context, args[0])\n\t\tif object is None :\n\t\t\treturn self.errormessage(\"Object %s doesn't exist\" % args[0])\n\t\tif not self.HasPerms(object, \"Change permissions\") :\n\t\t\treturn -1\n\t\troles = filter(None, map(string.strip, string.split(args[1], ',')))\n\t\tacquire = not options.has_key(\"noacquire\")\n\t\tfor perm in object.ac_inherited_permissions(all=1) :\n\t\t\tpname = perm[0]\n\t\t\tfor pattern in unexpanded[1:] :\n\t\t\t\t# I'd prefer to have an fnmatch.fnmatchUNcase()\n\t\t\t\t# to be less strict...\n\t\t\t\tif fnmatch.fnmatchcase(pname, pattern) :\n\t\t\t\t\tobject.manage_permission(permission_to_manage=pname, roles=roles, acquire=acquire)\n\t\t\t\t\tself.htmlmessage(\"Permission '%s' on object %s was given to roles %s and %s acquired otherwise\" % (pname, self.ObjectPath(object), roles, (((not acquire) and 'not') or '')))\n\t\t\t\t\tbreak", "def test_make_role_acl(self, make_acl_mock):\n zk = zksasl.SASLZkClient()\n zk.make_role_acl('servers', 'ra')\n\n make_acl_mock.assert_called_once_with(\n scheme='sasl', credential='file:///treadmill/roles/servers',\n read=True, write=False, delete=False, create=False, admin=True\n )", "def test_acl_configuration(self, env):\n # Create ACL Expression\n self.suite_logger.debug(\"Create and Verify ACL Expression\")\n expressions = [(1, 'DstMac', 'FF:FF:FF:FF:FF:FF', '00:00:00:01:01:01'), ]\n env.switch[1].ui.create_acl(expressions=expressions)\n # Verify ACL Expression\n expression = env.switch[1].ui.get_table_acl(\"ACLExpressions\")[0]\n assert expression['data'] == '00:00:00:01:01:01'\n assert expression['mask'] == 'FF:FF:FF:FF:FF:FF'\n assert expression['expressionId'] == 1\n assert expression['field'] == 'DstMac'\n\n # Create ACL Actions\n self.suite_logger.debug(\"Create and Verify ACL Action\")\n actions = [(1, 'Drop', ''), ]\n env.switch[1].ui.create_acl(actions=actions)\n # Verify ACL Action\n action = env.switch[1].ui.get_table_acl(\"ACLActions\")[0]\n assert action['action'] == 'Drop'\n assert action['param'] == ''\n assert action['actionId'] == 1\n\n # Create ACL Rule\n self.suite_logger.debug(\"Create and Verify ACL Rule\")\n rules = [(1, 1, 1, 'Ingress', 'Enabled', 0), ]\n # Note: ACL Rule should be assigned to ports\n env.switch[1].ui.create_acl(ports=[1, ], rules=rules)\n # Verify ACL Rule\n rule = env.switch[1].ui.get_table_acl(\"ACLRules\")[0]\n assert rule['ruleId'] == 1\n assert rule['expressionId'] == 1\n assert rule['actionId'] == 1\n assert rule['stage'] == 'Ingress'\n assert rule['enabled'] == 'Enabled'\n assert rule['priority'] == 0", "def ApplyAclChanges(self, uri_or_expansion_result):\n if isinstance(uri_or_expansion_result, name_expansion.NameExpansionResult):\n uri = self.suri_builder.StorageUri(\n uri_or_expansion_result.expanded_uri_str)\n else:\n uri = uri_or_expansion_result\n\n try:\n current_acl = uri.get_acl()\n except GSResponseError as e:\n if (e.code == 'AccessDenied' and e.reason == 'Forbidden'\n and e.status == 403):\n self._WarnServiceAccounts()\n self.logger.warning('Failed to set acl for {0}: {1}'\n .format(uri, e.reason))\n return\n\n modification_count = 0\n for change in self.changes:\n modification_count += change.Execute(uri, current_acl)\n if modification_count == 0:\n self.logger.info('No changes to {0}'.format(uri))\n return\n\n # TODO: Remove the concept of forcing when boto provides access to\n # bucket generation and metageneration.\n headers = dict(self.headers)\n force = uri.names_bucket()\n if not force:\n key = uri.get_key()\n headers['x-goog-if-generation-match'] = key.generation\n headers['x-goog-if-metageneration-match'] = key.metageneration\n\n # If this fails because of a precondition, it will raise a\n # GSResponseError for @Retry to handle.\n try:\n uri.set_acl(current_acl, uri.object_name, False, headers)\n except GSResponseError as e:\n # Don't retry on bad requests, e.g. invalid email address.\n if getattr(e, 'status', None) == 400:\n raise CommandException('Received bad request from server: %s' % str(e))\n raise\n self.logger.info('Updated ACL on {0}'.format(uri))", "def test_set_get_acls_recursive(self):\n path_one = \"%s/one\" % (self.tests_path)\n path_two = \"%s/one/two\" % (self.tests_path)\n self.shell.onecmd(\"create %s 'hello'\" % (path_one))\n self.shell.onecmd(\"create %s 'goodbye'\" % (path_two))\n self.shell.onecmd(\"set_acls %s 'world:anyone:r digest:%s:cdrwa' true\" % (\n path_one, self.auth_digest))\n self.shell.onecmd(\"get_acls %s 0\" % (path_one))\n\n if PYTHON3:\n user_id = \"Id(scheme='digest', id='%s')\" % (self.auth_digest)\n else:\n user_id = \"Id(scheme=u'digest', id=u'%s')\" % (self.auth_digest)\n\n user_acl = \"ACL(perms=31, acl_list=['ALL'], id=%s)\" % (user_id)\n expected_output = \"\"\"/tests/one: ['WORLD_READ', %s]\n/tests/one/two: ['WORLD_READ', %s]\n\"\"\" % (user_acl, user_acl)\n\n self.assertEqual(expected_output, self.output.getvalue())", "def create_acl_rule(self, context, sgr):\n self.security_group_driver.create_acl_rule(context, sgr)", "def _add_acl_sequence_numbers(self):\n\n ipv4_acl_sw = 'ip access-list'\n # ipv6_acl_sw = ('ipv6 access-list')\n if self.host.os in ['ios']:\n acl_line_sw = ('permit', 'deny')\n else:\n acl_line_sw = ('permit', 'deny', 'remark')\n for child in self.children:\n if child.text.startswith(ipv4_acl_sw):\n sn = 10\n for sub_child in child.children:\n if sub_child.text.startswith(acl_line_sw):\n sub_child.text = \"{} {}\".format(sn, sub_child.text)\n sn += 10\n\n return self", "def get_acl(registry=None):", "def ApplyAclChanges(self, uri):\n try:\n current_acl = uri.get_def_acl()\n except GSResponseError as e:\n if (e.code == 'AccessDenied' and e.reason == 'Forbidden'\n and e.status == 403):\n self._WarnServiceAccounts()\n self.logger.warning('Failed to set default acl for {0}: {1}'\n .format(uri, e.reason))\n return\n\n modification_count = 0\n for change in self.changes:\n modification_count += change.Execute(uri, current_acl)\n if modification_count == 0:\n self.logger.info('No changes to {0}'.format(uri))\n return\n\n # TODO: Add if-metageneration-match when boto provides access to bucket\n # metageneration.\n\n # If this fails because of a precondition, it will raise a\n # GSResponseError for @Retry to handle.\n try:\n uri.set_def_acl(current_acl, validate=False)\n except GSResponseError as e:\n # Don't retry on bad requests, e.g. invalid email address.\n if getattr(e, 'status', None) == 400:\n raise CommandException('Received bad request from server: %s' % str(e))\n raise\n self.logger.info('Updated default ACL on {0}'.format(uri))", "def __base_acl__(self) -> list:\n _acls = [\n (Allow, 'g:briefy_qa', ['add', 'delete', 'edit', 'list', 'view'])\n ]\n return _acls", "def _access_control(self, instance, host, mask=32, port=None,\n protocol='tcp', access_type='allow'):\n\n if access_type == 'allow':\n access_type = 'ACCEPT'\n elif access_type == 'deny':\n access_type = 'REJECT'\n else:\n LOG.error('Invalid access_type: %s' % access_type)\n raise exception.Error('Invalid access_type: %s' % access_type)\n\n if port == None:\n port = ''\n else:\n port = '--dport %s' % (port,)\n\n # Create our table instance\n tables = [\n linux_net.iptables_manager.ipv4['filter'],\n linux_net.iptables_manager.ipv6['filter']\n ]\n\n rule = '-s %s/%s -p %s %s -j %s' % \\\n (host, mask, protocol, port, access_type)\n\n for table in tables:\n table.add_rule(instance['name'], rule)\n\n # Apply the rules\n linux_net.iptables_manager.apply()", "def add_vlan_acl(self, vlan, acl):\n raise NotImplementedError # pragma: no cover", "def add_acl_rule_to_acl(self, acl_name=None, rule_id='', action=None, conditions=None):\n pass", "def add_port_acl(self, port, acl):\n raise NotImplementedError # pragma: no cover", "def set_ownership(self):\n\n os.chmod(os.path.join(\"%s\" % NetworkManager_conf_dir, self.connection._id), 0600)", "def set(self, layer='', name='', uid='', params={}):\n return self.__common_client._post_with_layer('set-access-rule', layer, name, uid, params)", "def svn_fs_set_access(*args):\r\n return _fs.svn_fs_set_access(*args)", "def _set_rw_permissions_for_all(self, nms, path):\n nms.appliance.execute('chmod ugo+rw %s' % path)" ]
[ "0.6792879", "0.65921074", "0.61917096", "0.6073873", "0.59864783", "0.59574765", "0.5841796", "0.58087355", "0.5784863", "0.5783401", "0.57787913", "0.5765398", "0.5761703", "0.5728288", "0.5710621", "0.570529", "0.5671081", "0.56375176", "0.5577266", "0.5576396", "0.55727315", "0.5572205", "0.55486345", "0.5546548", "0.5537407", "0.54973197", "0.5482423", "0.5471506", "0.5457016", "0.54487604" ]
0.73653114
0
Explains what to do if sudo needed to update gsutil software. Happens if gsutil was previously installed by a different user (typically if someone originally installed in a shared file system location, using sudo).
def ExplainIfSudoNeeded(self, tf, dirs_to_remove): system = platform.system() # If running under Windows we don't need (or have) sudo. if system.lower().startswith('windows'): return user_id = os.getuid() if (os.stat(self.gsutil_bin_dir).st_uid == user_id and os.stat(self.boto_lib_dir).st_uid == user_id): return # Won't fail - this command runs after main startup code that insists on # having a config file. config_file = self.config_file_list self.CleanUpUpdateCommand(tf, dirs_to_remove) raise CommandException( ('Since it was installed by a different user previously, you will need ' 'to update using the following commands.\nYou will be prompted for ' 'your password, and the install will run as "root". If you\'re unsure ' 'what this means please ask your system administrator for help:' '\n\tchmod 644 %s\n\tsudo env BOTO_CONFIG=%s gsutil update' '\n\tchmod 600 %s') % (config_file, config_file, config_file), informational=True)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _ExplainIfSudoNeeded(self, tf, dirs_to_remove, old_cwd):\n # If running under Windows or Cygwin we don't need (or have) sudo.\n if system_util.IS_CYGWIN or system_util.IS_WINDOWS:\n return\n\n user_id = os.getuid()\n if os.stat(gslib.GSUTIL_DIR).st_uid == user_id:\n return\n\n # Won't fail - this command runs after main startup code that insists on\n # having a config file.\n config_file_list = GetConfigFilePaths()\n config_files = ' '.join(config_file_list)\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n\n # Pick current protection of each boto config file for command that restores\n # protection (rather than fixing at 600) to support use cases like how GCE\n # installs a service account with an /etc/boto.cfg file protected to 644.\n chmod_cmds = []\n for config_file in config_file_list:\n mode = oct(stat.S_IMODE((os.stat(config_file)[stat.ST_MODE])))\n chmod_cmds.append('\\n\\tsudo chmod %s %s' % (mode, config_file))\n\n raise CommandException('\\n'.join(\n textwrap.wrap(\n 'Since it was installed by a different user previously, you will need '\n 'to update using the following commands. You will be prompted for your '\n 'password, and the install will run as \"root\". If you\\'re unsure what '\n 'this means please ask your system administrator for help:')) + (\n '\\n\\tsudo chmod 0644 %s\\n\\tsudo env BOTO_CONFIG=\"%s\" %s update'\n '%s') % (config_files, config_files, self.gsutil_path,\n ' '.join(chmod_cmds)),\n informational=True)", "def update_os_packages(self):\n self.summarize_operation(\"Updating OS Packages\")\n print subprocess.call(shlex.split(\"sudo apt-get update -y\"))", "def upgrade_os_packages(self):\n self.summarize_operation(\"Upgrading OS Packages\")\n print subprocess.call(shlex.split(\"sudo apt-get upgrade -y\"))", "def install_sudo():\n import vars\n vars = vars.Vars()\n with settings(warn_only=True):\n if run(\"which sudo\").failed:\n run(vars.os.package_install_cmd % \"sudo\")", "def sudo():\n try:\n run('sudo whoami')\n return 'sudo'\n except:\n return ''", "def UpdateCommand(self, unused_args, sub_opts=None, headers=None, debug=0):\n installed_version_string = self.LoadVersionString()\n\n dirs_to_remove = []\n # Retrieve gsutil tarball and check if it's newer than installed code.\n # TODO: Store this version info as metadata on the tarball object and\n # change this command's implementation to check that metadata instead of\n # downloading the tarball to check the version info.\n tmp_dir = tempfile.mkdtemp()\n dirs_to_remove.append(tmp_dir)\n os.chdir(tmp_dir)\n print 'Checking for software update...'\n self.CopyObjsCommand(['gs://pub/gsutil.tar.gz', 'file://gsutil.tar.gz'], [],\n headers, debug)\n tf = tarfile.open('gsutil.tar.gz')\n tf.errorlevel = 1 # So fatal tarball unpack errors raise exceptions.\n tf.extract('./gsutil/VERSION')\n ver_file = open('gsutil/VERSION', 'r')\n latest_version_string = ver_file.read().rstrip('\\n')\n ver_file.close()\n\n # The force_update option works around a problem with the way the\n # first gsutil \"update\" command exploded the gsutil and boto directories,\n # which didn't correctly install boto. People running that older code can\n # run \"gsutil update\" (to update to the newer gsutil update code) followed\n # by \"gsutil update -f\" (which will then update the boto code, even though\n # the VERSION is already the latest version).\n force_update = False\n if sub_opts:\n for o, unused_a in sub_opts:\n if o == '-f':\n force_update = True\n if not force_update and installed_version_string == latest_version_string:\n self.CleanUpUpdateCommand(tf, dirs_to_remove)\n raise CommandException('You have the latest version of gsutil installed.',\n informational=True)\n\n print(('This command will update to the \"%s\" version of\\ngsutil at %s') %\n (latest_version_string, self.gsutil_bin_dir))\n self.ExplainIfSudoNeeded(tf, dirs_to_remove)\n\n answer = raw_input('Proceed (Note: experimental command)? [y/N] ')\n if not answer or answer.lower()[0] != 'y':\n self.CleanUpUpdateCommand(tf, dirs_to_remove)\n raise CommandException('Not running update.', informational=True)\n\n # Ignore keyboard interrupts during the update to reduce the chance someone\n # hitting ^C leaves gsutil in a broken state.\n signal.signal(signal.SIGINT, signal.SIG_IGN)\n\n # gsutil_bin_dir lists the path where the code should end up (like\n # /usr/local/gsutil), which is one level down from the relative path in the\n # tarball (since the latter creates files in ./gsutil). So, we need to\n # extract at the parent directory level.\n gsutil_bin_parent_dir = os.path.dirname(self.gsutil_bin_dir)\n\n # Extract tarball to a temporary directory in a sibling to gsutil_bin_dir.\n old_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)\n new_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)\n dirs_to_remove.append(old_dir)\n dirs_to_remove.append(new_dir)\n self.EnsureDirsSafeForUpdate(dirs_to_remove)\n try:\n tf.extractall(path=new_dir)\n except Exception, e:\n self.CleanUpUpdateCommand(tf, dirs_to_remove)\n raise CommandException('Update failed: %s.' % e)\n\n # Move old installation aside and new into place.\n os.rename(self.gsutil_bin_dir, old_dir + os.sep + 'old')\n os.rename(new_dir + os.sep + 'gsutil', self.gsutil_bin_dir)\n self.CleanUpUpdateCommand(tf, dirs_to_remove)\n signal.signal(signal.SIGINT, signal.SIG_DFL)\n print 'Update complete.'", "def apt_update():\n print('>> apt update')\n with hide('output'):\n r = sudo('apt update')\n if r.find('packages can be upgraded') == -1:\n raise FabricCommandError(f'Result = {r}')\n print('>>> Success apt update')", "def check_req_utils():\n utils = (['dmenu', 'gpg', 'pass', 'xclip', 'exo-open', 'pkill'])\n for util in utils:\n if find_executable(util) is None:\n print(\"ERROR: Util '{}' is missing, install it before proceeding! Exiting!\".format(util))\n sys.exit(1)", "def in_sudo_mode():\n if not 'SUDO_UID' in os.environ.keys():\n print(\"Try running this program with sudo.\")\n exit()", "def addSudoers(User_String):\n SudoerString = \"\"\"User_Alias DEVOPS = %s\nDEVOPS ALL=NOPASSWD: ALL\"\"\" % User_String\n _hazSudoers = sudo('[ -f /etc/sudoers.d/webtelemetry-devops ] && echo \"yes\" || echo \"no\"')\n # We wont overwrite anything until we can verify & compare the contents\n if _hazSudoers == \"no\":\n sudo('echo \"%s\" >> /etc/sudoers.d/webtelemetry-devops' % SudoerString)\n sudo('chmod 440 /etc/sudoers.d/webtelemetry-devops')\n else:\n print \"[Info] webtelemetry-devops Sudoers file already exists.\"", "def RunCommand(self):\n\n if gslib.IS_PACKAGE_INSTALL:\n raise CommandException(\n 'The update command is only available for gsutil installed from a '\n 'tarball. If you installed gsutil via another method, use the same '\n 'method to update it.')\n\n if system_util.InvokedViaCloudSdk():\n raise CommandException(\n 'The update command is disabled for Cloud SDK installs. Please run '\n '\"gcloud components update\" to update it. Note: the Cloud SDK '\n 'incorporates updates to the underlying tools approximately every 2 '\n 'weeks, so if you are attempting to update to a recently created '\n 'release / pre-release of gsutil it may not yet be available via '\n 'the Cloud SDK.')\n\n https_validate_certificates = CERTIFICATE_VALIDATION_ENABLED\n if not https_validate_certificates:\n raise CommandException(\n 'Your boto configuration has https_validate_certificates = False.\\n'\n 'The update command cannot be run this way, for security reasons.')\n\n DisallowUpdateIfDataInGsutilDir()\n\n force_update = False\n no_prompt = False\n if self.sub_opts:\n for o, unused_a in self.sub_opts:\n if o == '-f':\n force_update = True\n if o == '-n':\n no_prompt = True\n\n dirs_to_remove = []\n tmp_dir = tempfile.mkdtemp()\n dirs_to_remove.append(tmp_dir)\n old_cwd = os.getcwd()\n os.chdir(tmp_dir)\n\n if not no_prompt:\n self.logger.info('Checking for software update...')\n if self.args:\n update_from_url_str = self.args[0]\n if not update_from_url_str.endswith('.tar.gz'):\n raise CommandException(\n 'The update command only works with tar.gz files.')\n for i, result in enumerate(self.WildcardIterator(update_from_url_str)):\n if i > 0:\n raise CommandException(\n 'Invalid update URL. Must name a single .tar.gz file.')\n storage_url = result.storage_url\n if storage_url.IsFileUrl() and not storage_url.IsDirectory():\n if not force_update:\n raise CommandException(\n ('\"update\" command does not support \"file://\" URLs without the '\n '-f option.'))\n elif not (storage_url.IsCloudUrl() and storage_url.IsObject()):\n raise CommandException(\n 'Invalid update object URL. Must name a single .tar.gz file.')\n else:\n update_from_url_str = GSUTIL_PUB_TARBALL\n\n # Try to retrieve version info from tarball metadata; failing that; download\n # the tarball and extract the VERSION file. The version lookup will fail\n # when running the update system test, because it retrieves the tarball from\n # a temp file rather than a cloud URL (files lack the version metadata).\n tarball_version = LookUpGsutilVersion(self.gsutil_api, update_from_url_str)\n if tarball_version:\n tf = None\n else:\n tf = self._FetchAndOpenGsutilTarball(update_from_url_str)\n tf.extractall()\n with open(os.path.join('gsutil', 'VERSION'), 'r') as ver_file:\n tarball_version = ver_file.read().strip()\n\n if not force_update and gslib.VERSION == tarball_version:\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n if self.args:\n raise CommandException('You already have %s installed.' %\n update_from_url_str,\n informational=True)\n else:\n raise CommandException(\n 'You already have the latest gsutil release '\n 'installed.',\n informational=True)\n\n if not no_prompt:\n CheckAndMaybePromptForAnalyticsEnabling()\n (_, major) = CompareVersions(tarball_version, gslib.VERSION)\n if major:\n print(('\\n'.join(\n textwrap.wrap(\n 'This command will update to the \"%s\" version of gsutil at %s. '\n 'NOTE: This a major new version, so it is strongly recommended '\n 'that you review the release note details at %s before updating to '\n 'this version, especially if you use gsutil in scripts.' %\n (tarball_version, gslib.GSUTIL_DIR, RELEASE_NOTES_URL)))))\n else:\n print(('This command will update to the \"%s\" version of\\ngsutil at %s' %\n (tarball_version, gslib.GSUTIL_DIR)))\n self._ExplainIfSudoNeeded(tf, dirs_to_remove, old_cwd)\n\n if no_prompt:\n answer = 'y'\n else:\n answer = input('Proceed? [y/N] ')\n if not answer or answer.lower()[0] != 'y':\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n raise CommandException('Not running update.', informational=True)\n\n if not tf:\n tf = self._FetchAndOpenGsutilTarball(update_from_url_str)\n\n # Ignore keyboard interrupts during the update to reduce the chance someone\n # hitting ^C leaves gsutil in a broken state.\n RegisterSignalHandler(signal.SIGINT, signal.SIG_IGN)\n\n # gslib.GSUTIL_DIR lists the path where the code should end up (like\n # /usr/local/gsutil), which is one level down from the relative path in the\n # tarball (since the latter creates files in ./gsutil). So, we need to\n # extract at the parent directory level.\n gsutil_bin_parent_dir = os.path.normpath(\n os.path.join(gslib.GSUTIL_DIR, '..'))\n\n # Extract tarball to a temporary directory in a sibling to GSUTIL_DIR.\n old_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)\n new_dir = tempfile.mkdtemp(dir=gsutil_bin_parent_dir)\n dirs_to_remove.append(old_dir)\n dirs_to_remove.append(new_dir)\n self._EnsureDirsSafeForUpdate(dirs_to_remove)\n try:\n tf.extractall(path=new_dir)\n except Exception as e:\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n raise CommandException('Update failed: %s.' % e)\n\n # For enterprise mode (shared/central) installation, users with\n # different user/group than the installation user/group must be\n # able to run gsutil so we need to do some permissions adjustments\n # here. Since enterprise mode is not not supported for Windows\n # users, we can skip this step when running on Windows, which\n # avoids the problem that Windows has no find or xargs command.\n if not system_util.IS_WINDOWS:\n # Make all files and dirs in updated area owner-RW and world-R, and make\n # all directories owner-RWX and world-RX.\n for dirname, subdirs, filenames in os.walk(new_dir):\n for filename in filenames:\n fd = os.open(os.path.join(dirname, filename), os.O_RDONLY)\n os.fchmod(fd,\n stat.S_IWRITE | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH)\n os.close(fd)\n for subdir in subdirs:\n fd = os.open(os.path.join(dirname, subdir), os.O_RDONLY)\n os.fchmod(\n fd, stat.S_IRWXU | stat.S_IXGRP | stat.S_IXOTH | stat.S_IRGRP |\n stat.S_IROTH)\n os.close(fd)\n\n # Make main gsutil script owner-RWX and world-RX.\n fd = os.open(os.path.join(new_dir, 'gsutil', 'gsutil'), os.O_RDONLY)\n os.fchmod(\n fd, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH |\n stat.S_IXOTH)\n os.close(fd)\n\n # Move old installation aside and new into place.\n os.rename(gslib.GSUTIL_DIR, os.path.join(old_dir, 'old'))\n os.rename(os.path.join(new_dir, 'gsutil'), gslib.GSUTIL_DIR)\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n RegisterSignalHandler(signal.SIGINT, signal.SIG_DFL)\n self.logger.info('Update complete.')\n return 0", "def sudoers():\n return \"\"\"ALL=NOPASSWD: /sbin/multipath, /sbin/multipathd, /etc/init.d/multipathd, /usr/bin/sg_persist, /bin/mount, /bin/umount, /bin/kill, /usr/bin/lsof, /usr/bin/systemctl, /usr/sbin/lsof, /usr/sbin/xfs_repair, /usr/bin/mkdir, /sbin/vgscan, /sbin/pvscan, /sbin/lvscan, /sbin/vgchange, /sbin/lvdisplay\"\"\"", "def ubuntu_add():\n gmsh_installed = shutil.which('gmsh')\n if not gmsh_installed:\n print('Installing gmsh')\n command_line = \"sudo apt-get install gmsh\"\n subprocess.check_call(command_line, shell=True)\n else:\n print('gmsh present')\n ccx_installed = shutil.which('ccx')\n if not ccx_installed:\n print('Installing calculix (ccx)')\n command_line = \"sudo apt-get install calculix-ccx\"\n subprocess.check_call(command_line, shell=True)\n else:\n print('calculix (ccx) present')", "def update_server():\n log('Atualizando pacotes', yellow)\n sudo('apt-get -y update')", "def ubuntu_remove():\n ccx_installed = shutil.which('ccx')\n if not ccx_installed:\n print('calculix (ccx) is not on your system')\n else:\n print('Removing calculix (ccx)')\n command_line = \"sudo apt-get remove calculix-ccx\"\n subprocess.check_call(command_line, shell=True)\n gmsh_installed = shutil.which('gmsh')\n if not gmsh_installed:\n print('gmsh is not on your system')\n else:\n print('Removing gmsh')\n command_line = \"sudo apt-get remove gmsh\"\n subprocess.check_call(command_line, shell=True)", "def GetGsutilPath():\n sh_stdout, sh_stderr, ret_code = cmd_utils.ExecuteOneShellCommand(\n \"which gsutil\")\n if ret_code == 0:\n return sh_stdout.strip()\n else:\n logging.error(\"`gsutil` doesn't exist on the host; \"\n \"please install Google Cloud SDK before retrying.\")\n return None", "def upgrade_server():\n log('Atualizando programas', yellow)\n sudo('apt-get -y upgrade')", "def sub_install_packages():\n sudo('apt-get update') # Update repository links\n sudo('apt-get -y upgrade') # Upgrade the system\n package_str = ' '.join(INSTALL_PACKAGES)\n sudo('apt-get -y install ' + package_str) # Install the packages", "def export_for_nfs(rootpassword, path, ip):\n def __darwin_check_option(existing_opt):\n flag = False\n entry = existing_opt.clients\n for opt in entry:\n if opt.name == ip:\n flag = True\n break\n\n if not flag:\n return flag\n\n # now we check other options including userid, gid\n flag = False\n for opt in entry:\n if opt.name == \"-alldirs\":\n flag = True\n break\n\n if not flag:\n return flag\n\n cstr = \"-mapall={}:{}\".format(os.getuid(), os.getgid())\n flag = False\n for opt in entry:\n if opt.name == cstr:\n flag = True\n break\n return flag\n\n def __darwin_update():\n cmd = \"echo {} | sudo -S chmod o+w /etc/exports\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n return 1\n\n line = '\\\\\"{}\\\\\" {} -alldirs -mapall={}:{}'.format(path,\n ip,\n os.getuid(),\n os.getgid())\n cmd = 'echo \\\"{}\\\" >> /etc/exports'.format(line)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n\n cmd = \"echo {} | sudo -S chmod o-w /etc/exports\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n\n cmd = \"echo {} | sudo -S nfsd update\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n return 0\n\n def __linux_check_option(existing_opt):\n flag = False\n entry = existing_opt.clients\n for opt in entry:\n if opt.name == ip:\n flag = True\n break\n if not flag:\n return flag\n\n def __linux_update():\n cmd = \"echo {} | sudo -S chmod o+w /etc/exports\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n return 1\n\n line = '(rw,no_subtree_check,all_squash,anonuid={},anongid={},fsid=1777472711)'\n line = '\\\\\"{}\\\\\" {}'+line\n line = line.format(path, ip, os.getuid(), os.getgid())\n cmd = 'echo \\\"{}\\\" >> /etc/exports'.format(line)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n\n cmd = \"echo {} | sudo -S chmod o-w /etc/exports\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n\n cmd = \"echo {} | sudo exportfs -ra\".format(rootpassword)\n ret_code, ret_info = run_this(cmd)\n if ret_code != 0:\n slab_logger.error(ret_info)\n return 1\n return 0\n\n vagrant_responsibility = True\n if not vagrant_responsibility:\n if platform.system() == 'Darwin':\n __check = __darwin_check_option\n __update = __darwin_update\n elif platform.system() == 'Linux':\n __check = __linux_check_option\n __update = __linux_update\n else:\n ret_info = \"servicelab support nfs mount for mac os or redhat/linux only\"\n slab_logger.error(ret_info)\n return 1\n\n # check if the ip exist with the options\n exp_list = ExportsConfig(path=\"/etc/exports\")\n exp_list.load()\n for opt in exp_list.tree.exports:\n if opt.name == path and __check(opt) is True:\n return 0\n\n # add the mount\n return __update()", "def _common_setup(self):\n with settings(hide('running', 'stdout', 'stderr', 'warnings')):\n sudo('''\n export DEBIAN_FRONTEND=noninteractive;\n apt-get update -qq -o Acquire::http::No-Cache=True;\n apt-get upgrade %s\n ''' % self.apt_opts)\n sudo('''\n export DEBIAN_FRONTEND=noninteractive;\n apt-get install %s %s\n ''' % (self.apt_opts,\n ' '.join(self.keyrings + self.general_tools)))\n sudo('mv /etc/localtime /etc/localtime.old')\n if sudo('test -e /usr/share/zoneinfo/UTC').succeeded:\n sudo('cp /usr/share/zoneinfo/UTC /etc/localtime')", "def user_should_be_able_to_use_sudo(driver):\n assert \"lectured\" in sudo_results, str(sudo_results)", "def sudo_restart ( self, ):\r\n pass\r\n \"sudo reboot\"", "def check_sudo(self, uid: str) -> None:\n stdout, stderr = self.syscall(os.popen(\"which sudo\").read().strip(), \"-nu\", uid, \"-S\", \"true\", \"/bin/bash\")\n if stdout or stderr:\n raise GateException(\"Access denied to UID '{}' via sudo.\".format(uid))", "def tweak_new_filesystem(root_dir):\n\n # create a symlink for insserv\n force_symlink('../usr/lib/insserv/insserv',\n os.path.join(root_dir, 'sbin/insserv'))\n\n # create a symlink for awk\n force_symlink('mawk', os.path.join(root_dir, 'usr/bin/awk'))\n\n # Nvidia keeps packaging up a broken post-install script for their cudnn\n # deb. Freaking nvidia\n cudnn_postinst_path = 'var/lib/dpkg/info/libcudnn6-dev.postinst'\n cudnn_postinst_path = os.path.join(root_dir, cudnn_postinst_path)\n\n if os.path.exists(cudnn_postinst_path):\n with open(cudnn_postinst_path, 'r') as infile:\n content = infile.read()\n if not content.startswith(\"#!\"):\n with open(cudnn_postinst_path, 'w') as outfile:\n outfile.write('#! /bin/sh\\n')\n outfile.write(content)\n\n # NOTE(josh): patch the base-packages post-install hook so it doesn't\n # complain about files in /var/run\n basefiles_path = os.path.join(root_dir,\n 'var/lib/dpkg/info/base-files.postinst')\n if os.path.exists(basefiles_path):\n apply_patch_text(BASE_FILES_PATCH, root_dir)\n\n # NOTE(josh): ifupdown should depend on initscripts, but it doesn't\n status_path = os.path.join(root_dir, 'var/lib/dpkg/status')\n tempfile_path = status_path + '.tmp'\n with open(tempfile_path, 'wb') as outfile:\n with open(status_path, 'rb') as infile:\n for line in infile:\n outfile.write(line)\n if line.strip() == 'Package: ifupdown':\n break\n\n for line in infile:\n if line.startswith('Depends: '):\n line = ', '.join(line.strip().split(', ') + ['initscripts']) + '\\n'\n outfile.write(line)\n break\n else:\n outfile.write(line)\n\n for line in infile:\n outfile.write(line)\n os.rename(tempfile_path, status_path)\n\n # NOTE(josh): resolvconf tries to a write a file in this directory\n try:\n target_path = os.path.join(root_dir, 'run/resolvconf/interface')\n os.makedirs(target_path)\n except OSError:\n if not os.path.isdir(target_path):\n raise\n\n # NOTE(josh): Can't postinst makedev without CAP_MKNOD\n if os.getuid() != 0:\n makedev_postinst = os.path.join(root_dir,\n 'var/lib/dpkg/info/makedev.postinst')\n if os.path.exists(makedev_postinst):\n os.rename(makedev_postinst, makedev_postinst + '.bak')\n\n # remove temporary/boostrap files\n files_to_remove = ['etc/apt/sources.list.d/bootstrap.list']\n\n for filename in files_to_remove:\n file_path = os.path.join(root_dir, filename)\n if os.path.exists(file_path):\n os.remove(file_path)", "def fix_user_agent_for_gsutil_shim():\n if properties.VALUES.storage.run_by_gsutil_shim.GetBool():\n command_path_string = properties.VALUES.metrics.command_name.Get().replace(\n 'gcloud.storage.', 'gcloud.gslibshim.')\n properties.VALUES.SetInvocationValue(\n properties.VALUES.metrics.command_name, command_path_string, None)", "def provision():\n sudo('chef-client')", "def test_smart_update(self):\n if os.getuid() != 0:\n return self.skipTest(\"root privileges required to opt in\")\n updater = AptMirrorUpdater()\n # Remove all existing package lists.\n updater.clear_package_lists()\n # Verify that package lists aren't available.\n assert not have_package_lists()\n # Run `apt-get update' to download the package lists.\n updater.smart_update()\n # Verify that package lists are again available.\n assert have_package_lists()", "def git_user_updates(self):\n\n suffixes = ['.pacnew', '.pacsave', '.pacorig']\n etc_files = {n: EtcPath(self.root_dir, n) for n in\n list_rpaths(self.root_dir, ROOT_SUBDIR,\n suffixes=suffixes)}\n etc_tracked = self.repo.tracked_files('etc-tmp')\n\n # Build the list of etc-tmp files that are different from their\n # counterpart in /etc.\n self.repo.checkout('etc-tmp')\n to_check_in_master = []\n for rpath in etc_files:\n if rpath in etc_tracked:\n # Issue #16. Do not add an /etc file that has been made not\n # readable after a pacman upgrade.\n if (etc_files[rpath].digest != b'' and\n etc_files[rpath] != etc_tracked[rpath]):\n to_check_in_master.append(rpath)\n\n master_tracked = self.repo.tracked_files('master-tmp')\n\n # Build the list of master-tmp files:\n # * To add when the file does not exist in master-tmp and its\n # counterpart in etc-tmp is different from the /etc file.\n # * To update when the file exists in master-tmp and is different\n # from the /etc file.\n for rpath in to_check_in_master:\n if rpath not in master_tracked:\n self.master_commits.user_updated.rpaths.append(rpath)\n self.repo.checkout('master-tmp')\n for rpath in etc_files:\n if (rpath in master_tracked and rpath not in\n self.master_commits.added.rpaths):\n if etc_files[rpath].digest == b'':\n warn('cannot read %s' % etc_files[rpath].path)\n elif etc_files[rpath] != master_tracked[rpath]:\n self.master_commits.user_updated.rpaths.append(rpath)\n\n for rpath in self.master_commits.user_updated.rpaths:\n copy_file(rpath, self.root_dir, self.repodir)\n self.master_commits.user_updated.commit()", "def maint_brew():\n os.system('brew update')", "def ensure_correct_user(self):\n username = getpass.getuser()\n # xxx thow a suitable exception.\n assert username == 'debrepo', ('this command must be run as user `debrepo`, not', username)" ]
[ "0.6868656", "0.622022", "0.586455", "0.5762399", "0.57385385", "0.5685918", "0.5684922", "0.56211627", "0.55667186", "0.55461246", "0.55407524", "0.5524709", "0.5474953", "0.54246354", "0.539118", "0.53812087", "0.5301683", "0.52991027", "0.52959615", "0.52026653", "0.519367", "0.5179692", "0.51474994", "0.5129886", "0.509196", "0.50857794", "0.5032022", "0.50299364", "0.5019144", "0.50161797" ]
0.71584785
0
Throws Exception if any of dirs is known to be unsafe for gsutil update. This provides a failsafe check to ensure we don't try to overwrite or delete any important directories. (That shouldn't happen given the way we construct tmp dirs, etc., but since the gsutil update cleanup use shutil.rmtree() it's prudent to add extra checks.)
def EnsureDirsSafeForUpdate(self, dirs): for d in dirs: if not d: d = 'null' if d.lstrip(os.sep).lower() in self.unsafe_update_dirs: raise CommandException('EnsureDirsSafeForUpdate: encountered unsafe ' 'directory (%s); aborting update' % d)
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def _EnsureDirsSafeForUpdate(self, dirs):\n for d in dirs:\n if not d:\n d = 'null'\n if d.lstrip(os.sep).lower() in self.unsafe_update_dirs:\n raise CommandException('EnsureDirsSafeForUpdate: encountered unsafe '\n 'directory (%s); aborting update' % d)", "def update_dirs(dirs):\n index = len(dirs) - 1\n for i, d in enumerate(reversed(dirs)):\n if d in dir_ignore:\n del dirs[index - i]", "def purge_deleted_directories(self):\n registered = {safe_filename(obj.name) for obj in self}\n bad_directories = [\n self._base_data_dir / dirname\n for dirname in os.listdir(self._base_data_dir)\n if (self._base_data_dir / dirname).is_dir() and dirname not in registered\n ]\n\n for fp in bad_directories:\n shutil.rmtree(fp)\n\n return len(bad_directories)", "def _check_dirs(self):\r\n for dir in [self.papers_dir,\r\n self.buffer_dir]:\r\n if not os.path.exists(dir):\r\n message = f'Dir not exists: {dir}. Making it.'\r\n logging.warning(message)\r\n os.mkdir(dir)", "def try_clean(self):\n for f in self.FILES_TO_CLEAN:\n if not os.path.exists(f):\n continue\n\n if os.path.isdir(f):\n # don't care on error\n shutil.rmtree(f, onerror=lambda *x, **y: None)\n else:\n self.safe_delete(f)", "def ExplainIfSudoNeeded(self, tf, dirs_to_remove):\n system = platform.system()\n # If running under Windows we don't need (or have) sudo.\n if system.lower().startswith('windows'):\n return\n\n user_id = os.getuid()\n if (os.stat(self.gsutil_bin_dir).st_uid == user_id and\n os.stat(self.boto_lib_dir).st_uid == user_id):\n return\n\n # Won't fail - this command runs after main startup code that insists on\n # having a config file.\n config_file = self.config_file_list\n self.CleanUpUpdateCommand(tf, dirs_to_remove)\n raise CommandException(\n ('Since it was installed by a different user previously, you will need '\n 'to update using the following commands.\\nYou will be prompted for '\n 'your password, and the install will run as \"root\". If you\\'re unsure '\n 'what this means please ask your system administrator for help:'\n '\\n\\tchmod 644 %s\\n\\tsudo env BOTO_CONFIG=%s gsutil update'\n '\\n\\tchmod 600 %s') % (config_file, config_file, config_file),\n informational=True)", "def _check_required_directories(self) -> None:\n\n if self._all_stages:\n for stage in self._all_stages:\n stage_cfg = self._app_cfg['stages'][stage]\n processor_cfg = stage_cfg['configuration']\n\n # Populate all the directories requested in the configuration.\n for dir_key, dir_id in processor_cfg['dirs'].items():\n dir_path_value = os.path.join(self._data_dir_path, self._app_cfg['dir-paths'][dir_id])\n # Rebuild the key by replacing 'id' with 'path'\n dir_path_key = dir_key.replace('id', 'path')\n processor_cfg[dir_path_key] = dir_path_value\n\n # Create the directory if it doesn't exist.\n self._validate_path(dir_path_value)\n\n # Add the temporary directory.\n processor_cfg['tmp-dir-path'] = self._tmp_dir_path\n\n del processor_cfg['dirs']", "def verify_directories_empty(self):\n if self.install_type == \"rpm\":\n return # For RPM install we don't want to clean anything\n\n instance_dir = \"frontend_%(service)s-%(instance)s\" % \\\n { \"service\": self.service_name(), \n \"instance\": self.glidein.instance_name(), }\n #-- directories to check ---\n dirs = {}\n dirs[\"logs........\"] = os.path.join(self.logs_dir(), instance_dir)\n dirs[\"install.....\"] = os.path.join(self.install_location(), instance_dir) \n dirs[\"config......\"] = self.config_dir()\n for subdir in [\"monitor\", \"stage\"]:\n dirs[\"web %s \" % subdir] = os.path.join(self.glidein.web_location(), subdir, instance_dir)\n #--- check them --\n for type in dirs.keys():\n if os.path.isdir(dirs[type]): \n if len(os.listdir(dirs[type])) == 0:\n os.rmdir(dirs[type])\n del dirs[type] # remove from dict\n else:\n del dirs[type] # it does not exist, remove from dict\n\n #--- if all are empty, return \n if len(dirs) == 0:\n time.sleep(3)\n return # all directories are empty\n\n #--- See if we can remove them --- \n common.logit(\"\"\"The following directories must be empty for the install to succeed: \"\"\")\n types = sorted(dirs.keys())\n for type in types:\n common.logit(\"\"\" %(type)s: %(dir)s\"\"\" % \\\n { \"type\" : type, \"dir\" : dirs[type] })\n common.ask_continue(\"... can we remove their contents\")\n for type in dirs.keys():\n common.remove_dir_contents(dirs[type])\n os.rmdir(dirs[type])\n time.sleep(3)\n return", "def _check_directories(self, dist, component):\n path = join(self.repository, 'dists', dist, component, 'source')\n\n if not isdir(path):\n makedirs(path)", "def _ExplainIfSudoNeeded(self, tf, dirs_to_remove, old_cwd):\n # If running under Windows or Cygwin we don't need (or have) sudo.\n if system_util.IS_CYGWIN or system_util.IS_WINDOWS:\n return\n\n user_id = os.getuid()\n if os.stat(gslib.GSUTIL_DIR).st_uid == user_id:\n return\n\n # Won't fail - this command runs after main startup code that insists on\n # having a config file.\n config_file_list = GetConfigFilePaths()\n config_files = ' '.join(config_file_list)\n self._CleanUpUpdateCommand(tf, dirs_to_remove, old_cwd)\n\n # Pick current protection of each boto config file for command that restores\n # protection (rather than fixing at 600) to support use cases like how GCE\n # installs a service account with an /etc/boto.cfg file protected to 644.\n chmod_cmds = []\n for config_file in config_file_list:\n mode = oct(stat.S_IMODE((os.stat(config_file)[stat.ST_MODE])))\n chmod_cmds.append('\\n\\tsudo chmod %s %s' % (mode, config_file))\n\n raise CommandException('\\n'.join(\n textwrap.wrap(\n 'Since it was installed by a different user previously, you will need '\n 'to update using the following commands. You will be prompted for your '\n 'password, and the install will run as \"root\". If you\\'re unsure what '\n 'this means please ask your system administrator for help:')) + (\n '\\n\\tsudo chmod 0644 %s\\n\\tsudo env BOTO_CONFIG=\"%s\" %s update'\n '%s') % (config_files, config_files, self.gsutil_path,\n ' '.join(chmod_cmds)),\n informational=True)", "def check_appropriate_dirs(self, dirs):\n\n add_up = []\n\n for d in dirs:\n path = join(self.base_dir, d)\n files = [f for f in listdir(path) if isfile(join(path, f))]\n rcts = [f for f in files if f.startswith(self.reactant_pre) and f.endswith(\".mol\")]\n pros = [f for f in files if f.startswith(self.product_pre) and f.endswith(\".mol\")]\n\n rct_mols = [get_molecule(join(self.base_dir, d, r)) for r in rcts]\n pro_mols = [get_molecule(join(self.base_dir, d, p)) for p in pros]\n\n total_pro_length = sum([len(p) for p in pro_mols])\n total_rct_length = sum([len(r) for r in rct_mols])\n\n if total_pro_length == total_rct_length:\n add_up.append(d)\n\n return add_up", "def _CleanUpUpdateCommand(self, tf, dirs_to_remove, old_cwd):\n if tf:\n tf.close()\n self._EnsureDirsSafeForUpdate(dirs_to_remove)\n for directory in dirs_to_remove:\n try:\n shutil.rmtree(directory)\n except OSError:\n # Ignore errors while attempting to remove old dirs under Windows. They\n # happen because of Windows exclusive file locking, and the update\n # actually succeeds but just leaves the old versions around in the\n # user's temp dir.\n if not system_util.IS_WINDOWS:\n raise\n if old_cwd:\n try:\n os.chdir(old_cwd)\n except OSError:\n pass", "def dir_exception_handler(dpath: str,\n dryrun: bool,\n dirs_created: list = [],\n overwrite: bool = False) -> bool:\n # If this dir was created during this session, do not create it again\n if dpath in dirs_created:\n return False\n elif os.path.exists(dpath):\n if dryrun == False:\n # Get user input\n while overwrite not in ['Y', 'y', 'N', 'n', True]:\n overwrite = input(f\"\\n*** WARNING: Your directory {dpath} already exists. Overwrite? Y/N: \")\n if overwrite == True or overwrite.lower() == 'y':\n print(f\"Your directory {dpath} will be overwritten\")\n shutil.rmtree(dpath)\n return True\n else:\n return False\n else: # If dry run:\n print(f\"\\n*** WARNING: This is a dry run but if you run cp_packager in normal mode,\")\n print(f\"*** your directory {dpath} may be overwritten\")\n else:\n return True", "def test_skipped_update(self):\n dir0, dir1, dir2 = self.make_temp_dirs(3)\n self.write_file(dir0, \"foo\", \"bar\")\n self.sync_all()\n\n # Update dir0 and sync dir0/dir1 but not dir2\n self.write_file(dir0, \"foo\", \"baz\")\n self.sync_dirs(dir0, dir1)\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")\n self.assertFile(dir2, \"foo\", \"bar\")\n\n # dir2 should pick up the change when all are sync'd\n self.sync_all()\n self.assertFile(dir0, \"foo\", \"baz\")\n self.assertFile(dir1, \"foo\", \"baz\")\n self.assertFile(dir2, \"foo\", \"baz\")", "def test_bad_paths(self):\n self.do_test_bad_path('frog', '/frog') # no permission to write", "def check_axe_dirs():\n safe_mkdir( AXE_IMAGE_PATH )\n safe_mkdir( AXE_OUTPUT_PATH )\n safe_mkdir( AXE_CONFIG_PATH )\n safe_mkdir( AXE_DRIZZLE_PATH )", "def check_dir(p, fix_guards=False):\n\n def prune(d):\n if d[0] == '.' or d == 'third_party':\n return True\n return False\n\n for root, dirs, paths in os.walk(p):\n # Prune dot directories like .git\n [dirs.remove(d) for d in list(dirs) if prune(d)]\n for path in paths:\n check_file(os.path.join(root, path), fix_guards=fix_guards)", "def update_errdir(self):\n self.errdir = []\n if len(self.rundir) == 0:\n pass\n else:\n err_index = len(self.errdir)\n while os.path.isdir(self.rundir[-1] + \"_err.\" + str(err_index)):\n self.errdir.append(self.rundir[-1] + \"_err.\" + str(err_index))\n err_index += 1", "def _ensure_dirs(dirpath):\n if not os.path.isdir(dirpath):\n if os.path.exists(dirpath):\n err = \"log path ({}) exists but is not a directory\"\n raise ConfigError(err.format(dirpath))\n os.makedirs(dirpath, 0o777)", "def _remove_files_dirs(self):\n if self.remove_remote_files_dirs:\n self._remove_remote_files_dirs()", "def clean():\n folders = ['utils_dfn/temp', 'utils_dfn/img', 'utils_dfn/mask', 'utils_dfn/output']\n for folder in folders:\n for item in os.listdir(folder):\n item_path = os.path.join(folder, item)\n if os.path.isdir(item_path):\n shutil.rmtree(item_path)\n elif os.path.isfile(item_path):\n os.remove(item_path)", "def CleanUpUpdateCommand(self, tf, dirs_to_remove):\n tf.close()\n self.EnsureDirsSafeForUpdate(dirs_to_remove)\n for directory in dirs_to_remove:\n shutil.rmtree(directory)", "def check_paths(self):\n self.data[\"app_path\"] = list(map(\n self.replace_vars_path, self.data[\"app_path\"]))\n self.data[\"icons_path\"] = list(map(\n self.replace_vars_path, self.data[\"icons_path\"]))\n new_app_path = []\n for app_path in self.data[\"app_path\"]:\n if path.isdir(app_path) or path.isfile(app_path):\n new_app_path.append(app_path)\n self.data[\"app_path\"] = new_app_path\n if not len(self.data[\"app_path\"]) == 0:\n new_icons_path = []\n for icon_path in self.data[\"icons_path\"]:\n if (self.data[\"force_create_folder\"] and\n not path.exists(icon_path)):\n log(\"Creating application folder for {0}\".format(self.data[\"name\"]))\n create_dir(icon_path)\n if path.isdir(icon_path):\n if (\"binary\" in self.data.keys()\n and path.isfile(icon_path + self.data[\"binary\"])):\n new_icons_path.append(icon_path)\n elif \"binary\" not in self.data.keys():\n new_icons_path.append(icon_path)\n self.data[\"icons_path\"] = new_icons_path", "def verify_cache_dirs_exists():\n verify_dir_helper(config.cache_dir)\n for dir in config._directories:\n verify_dir_helper(dir)", "def clean_all_folder():\n LOGGER.warning('removal of old files has been temporarily disabled')\n # paths_to_clean = CFG.remove_files\n # if paths_to_clean: # pylint: disable=using-constant-test\n # for remove_config in paths_to_clean: # pylint: disable=not-an-iterable\n # name = tuple(remove_config.keys())[0]\n # LOGGER.info(f'processing: {name}')\n # remove_config = remove_config[name]\n # if 'folder' not in remove_config.keys():\n # LOGGER.error(f'missing \"folder\" in {name}')\n # return\n # if 'age' not in remove_config.keys():\n # LOGGER.error(f'missing \"age\" in {name}')\n # return\n # if not os.path.exists(remove_config['folder']):\n # LOGGER.error(f'path does not exist: {remove_config[\"folder\"]}')\n # return\n # _remove_old_files_from_folder(**remove_config)\n # else:\n # LOGGER.debug('no folder to clean')", "def cleanup_adwcleaner():\n source_path = r'{SYSTEMDRIVE}\\AdwCleaner'.format(**global_vars['Env'])\n source_quarantine = r'{}\\Quarantine'.format(source_path)\n\n # Quarantine\n if os.path.exists(source_quarantine):\n os.makedirs(global_vars['QuarantineDir'], exist_ok=True)\n dest_name = r'{QuarantineDir}\\AdwCleaner_{Date-Time}'.format(\n **global_vars)\n dest_name = non_clobber_rename(dest_name)\n shutil.move(source_quarantine, dest_name)\n\n # Delete source folder if empty\n delete_empty_folders(source_path)\n\n # Main folder\n if os.path.exists(source_path):\n os.makedirs(global_vars['LogDir'], exist_ok=True)\n dest_name = r'{LogDir}\\Tools\\AdwCleaner'.format(\n **global_vars)\n dest_name = non_clobber_rename(dest_name)\n shutil.move(source_path, dest_name)", "def cleanup(self):\n\tprint \"clean up on \" + self.dest\n for root, folders, files in os.walk(self.dest):\n for ignore_dir in self.ignore_dirs:\n if ignore_dir in folders:\n folders.remove(ignore_dir)\n\t\t \n for folder in folders:\n backupdir = os.path.join(root,folders)\n sourcedir = bakupdir.replace(destination,source) \n if not os.path.exists(sourcedir):\n trash = backupdir.replace(destination,trash_dir)\n # shutil.move(backupdir, trash)\n print(\"move\",backupdir,\"to\",trash)\n # os.utime(trash, None)\n \n for filename in files:\n checkfile = root + \"/\" + filename\n checkfile = checkfile.replace(self.dest, self.source)\n print(\"checking if \", checkfile, \"exists\")\n if not os.path.exists(checkfile): \n print os.path.join(root,filename)\n\t\t backupfile = checkfile.replace(self.source,self.dest)\n trash = self.trash + checkfile.replace(self.source, \"\")\n # shutil.move(backupfile, trash)\n print(\"move\",backupfile,\"to\",trash)\n # os.utime(trash, None)", "def _cleanup_files(self):\n\n for root, dirs, files in os.walk(self.build_directory):\n dirs_to_delete = [\n Path(root).joinpath(x) for x in dirs if x == '__pycache__'\n ]\n files_to_delete = [\n Path(root).joinpath(x) for x in files if Path(x).suffix == '.pyc'\n ]\n for d in dirs_to_delete:\n logger.info('Deleting: %s', d)\n shutil.rmtree(d)\n for f in files_to_delete:\n logger.info('Deleting: %s', f)\n f.unlink()", "def ensuredirs(dpath, *dpaths):\n try:\n makedirs(path.join(dpath, *dpaths))\n except OSError as e:\n if e.errno != EEXIST:\n raise # Re-raise the exception.", "def ensure_dirs(cls, folder_path):\n try:\n cls.mkdirs(folder_path)\n except exceptions.PlotlyRequestError as e:\n if \"already exists\" in e.message:\n pass\n else:\n raise e" ]
[ "0.8285823", "0.62391114", "0.6160257", "0.60179305", "0.5956509", "0.5822186", "0.58019644", "0.57795304", "0.5769533", "0.55969536", "0.5537894", "0.5536626", "0.5512073", "0.5500965", "0.54969025", "0.549504", "0.54783744", "0.5458695", "0.5420346", "0.5400437", "0.5382054", "0.53780574", "0.53391004", "0.5302818", "0.5294924", "0.52900887", "0.5271824", "0.5265157", "0.5259", "0.5248575" ]
0.8231699
1
Loads version string for currently installed gsutil command.
def LoadVersionString(self): ver_file_path = self.gsutil_bin_dir + os.sep + 'VERSION' if not os.path.isfile(ver_file_path): raise CommandException( '%s not found. Did you install the\ncomplete gsutil software after ' 'the gsutil "update" command was implemented?' % ver_file_path) ver_file = open(ver_file_path, 'r') installed_version_string = ver_file.read().rstrip('\n') ver_file.close() return installed_version_string
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def getversion():\r\n\r\n global VERSION\r\n\r\n if len(VERSION) == 3:\r\n return '{}.{}.{}'.format(VERSION[0], VERSION[1], VERSION[2])\r\n else:\r\n return '{}.{}.{}-{}'.format(VERSION[0], VERSION[1], VERSION[2], VERSION[3])", "def get_version() -> str:\n version = read(\"pdf_utils/__version__.py\")\n return re.search(r\"__version__ = \\\"(.*?)\\\"\", version).group(1)", "def get_version():\n init = read(\"src\", \"{{cookiecutter.module_name}}\", \"__init__.py\")\n return VERSION_RE.search(init).group(1)", "def get_version():\n init_py = open(os.path.join(PACKAGE_NAME, '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)", "def GetGsutilPath():\n sh_stdout, sh_stderr, ret_code = cmd_utils.ExecuteOneShellCommand(\n \"which gsutil\")\n if ret_code == 0:\n return sh_stdout.strip()\n else:\n logging.error(\"`gsutil` doesn't exist on the host; \"\n \"please install Google Cloud SDK before retrying.\")\n return None", "def get_version_string():\n\n version_string = get_version()\n if not version_string:\n version_string = \"unknown\"\n\n return \"ImageSplit version \" + version_string", "def VerCommand(self, unused_args, unused_sub_opts=None, unused_headers=None,\n unused_debug=None):\n config_ver = ''\n for path in BotoConfigLocations:\n try:\n f = open(path, 'r')\n while True:\n line = f.readline()\n if not line:\n break\n if line.find('was created by gsutil version') != -1:\n config_ver = ', config file version %s' % line.split('\"')[-2]\n break\n # Only look at first first config file found in BotoConfigLocations.\n break\n except IOError:\n pass\n\n print 'gsutil version %s%s' % (self.LoadVersionString(), config_ver)", "def _get_package_version():\n file = join(get_root(), 'VERSION')\n\n if exists(file):\n with open(file) as file:\n return file.read()\n\n return ''", "def _getversion(self):\n\n import_module(self.packagename)\n version = pkg_resources.get_distribution(self.packagename).version\n return \"Version {} of {} is installed.\".format(version, self.packagename)", "def get_version(program):\n\n return \"%s from mrtools %s\" % (program, mrtools_version)", "def get_version():\n click.echo(get_current_version_number())", "def get_version() -> str:\n return __version__", "def _get_version(self):\n version = self.job_config.get(\"os_version\")\n if not version:\n version = DEFAULT_OS_VERSION.get(self.os_type)\n\n return str(version)", "def get_cloud_sdk_version():\n gcloud_info = subprocess.check_output(['gcloud', 'version'])\n for line in gcloud_info.split('\\n'):\n m = re.match(r'Google Cloud SDK (.+)', line)\n if m:\n return m.group(1)\n print('ERROR: Unable to parse \"gcloud version\" output: %s' % gcloud_info,\n file=sys.stderr)\n exit(1)", "def version():\n cmd = \"{} -v\".format(_detect_os())\n out = __salt__[\"cmd.run\"](cmd).splitlines()\n ret = out[0].split(\": \")\n return ret[1]", "def get_version():\n version_file = Path(__file__).resolve().parent / \"clinker\" / \"__init__.py\"\n version_match = re.search(\n r\"^__version__ = ['\\\"]([^'\\\"]*)['\\\"]\", version_file.read_text(), re.M\n )\n if version_match:\n return version_match.group(1)\n raise RuntimeError(\"Failed to find version string\")", "def get_version_from_package() -> str:\n path = join(dirname(__file__), \"__init__.py\")\n path = normpath(abspath(path))\n with open(path) as f:\n for line in f:\n if line.startswith(\"__version__\"):\n token, version = line.split(\" = \", 1)\n version = version.replace(\"'\", \"\").strip()\n return version", "def get_version():\n path = CWD / \"pettingzoo\" / \"__init__.py\"\n content = path.read_text()\n\n for line in content.splitlines():\n if line.startswith(\"__version__\"):\n return line.strip().split()[-1].strip().strip('\"')\n raise RuntimeError(\"bad version data in __init__.py\")", "def _parse_version_string(cmd_result: str) -> str:\n lines = cmd_result.splitlines()\n split_lines = [line.split(\" \") for line in lines]\n version_line = [\n line for line in split_lines if len(line) > 0 and line[1] == \"version\"\n ][0]\n version_string = version_line[2].replace('\"', \"\")\n return version_string", "def _get_version():\n return re.search(r'^__version__\\s*=\\s*[\\'\"]([^\\'\"]*)[\\'\"]',\n _read(\"cfunits/__init__.py\"),\n re.MULTILINE).group(1)", "def detect_version_str(self):\n c3d_bin_path = op.dirname(self.locate_command())\n if platform.system() == 'Linux':\n libname = os.listdir(op.join(c3d_bin_path, '..', 'lib'))[0]\n version_str = libname.split('-')[-1]\n elif platform.system() == 'Darwin':\n info_list_path = op.join(c3d_bin_path, '..', 'Info.plist')\n info_etree = xml.etree.ElementTree.parse(info_list_path)\n elem_bodies = [e.text for e in info_etree.iter()]\n version_str = elem_bodies[\n elem_bodies.index('CFBundleShortVersionString') + 1]\n else:\n raise ArcanaVersionNotDetectableError(\n \"Can't detect c3d version on Windows\")\n return version_str", "def version() -> str:\n with open(join(dirname(__file__), 'resources', 'VERSION')) as f:\n return f.read().strip()", "def read_version():\n # code parts were taken from here https://stackoverflow.com/a/67692\n\n path2setup = os.path.dirname(__file__)\n version_file = os.path.abspath(\n os.path.join(path2setup, \"diffusion_maps\", \"version.py\"))\n\n spec = importlib.util.spec_from_file_location(\"version\", version_file)\n version = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(version)\n return version.version.v_short", "def get_version():\n return \"0.0.1 (prerelease prototype)\"", "def get_string_version(name,\n default=DEFAULT_STRING_NOT_FOUND,\n allow_ambiguous=True):\n # get filename of callar\n callar = inspect.getouterframes(inspect.currentframe())[1][1]\n if callar.startswith('<doctest'):\n # called from doctest, find written script file\n callar = inspect.getouterframes(inspect.currentframe())[-1][1]\n # get version info from distribution\n try:\n di = get_distribution(name)\n installed_directory = os.path.join(di.location, name)\n if not callar.startswith(installed_directory) and not allow_ambiguous:\n # not installed, but there is another version that *is*\n raise DistributionNotFound\n except DistributionNotFound:\n return default\n else:\n return di.version", "def get_setup_version():\n if os.path.isdir(\".git\"):\n process = subprocess.Popen(COMMAND_DESCRIBE_VERSION, **SUBPROCESS_KWARGS)\n process.wait()\n version = process.communicate()[0].decode(\"utf-8\").strip()\n return re.match(re_version, version).group(1)\n else:\n return \"0.1\"", "def get_version():\n init_py = open(path.join(here, 'silverstrike', '__init__.py')).read()\n return re.search(\"__version__ = ['\\\"]([^'\\\"]+)['\\\"]\", init_py).group(1)", "def _gcs_load(path):\n return Command(\"gsutil cat {}\".format(path)).output", "def get_version():\n import subprocess\n proc = subprocess.Popen(\n 'hg log -r tip --template \"{latesttagdistance}\"',\n shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n pending, _ = proc.communicate()\n return \"%(tag)sd%(pending)s\" % dict(tag=config.TAG, pending=pending)", "def get_version():\n version = \"unknown\"\n try:\n version_file = open(VERSIONFILE, \"r\")\n for line in version_file:\n if line.startswith('__version__'):\n version = line.split(\"'\")[1]\n break\n except EnvironmentError:\n pass # Okay, there is no version file.\n return version" ]
[ "0.6453992", "0.6367098", "0.6274387", "0.62553936", "0.6181244", "0.6180381", "0.617818", "0.6162138", "0.6157807", "0.6086163", "0.6051202", "0.60485405", "0.60243237", "0.60125846", "0.5997708", "0.5940522", "0.59365326", "0.5930519", "0.59246886", "0.59088504", "0.5905622", "0.5888918", "0.5868489", "0.58648694", "0.58583647", "0.58487135", "0.5836789", "0.5832423", "0.58311975", "0.58200467" ]
0.8584573
0
Checks whether copying src_uri into dst_path is not possible. This happens if a directory exists in local file system where a file needs to go or vice versa. In that case we print an error message and
def CheckForDirFileConflict(self, src_uri, dst_path): final_dir = os.path.dirname(dst_path) if os.path.isfile(final_dir): raise CommandException('Cannot retrieve %s because it a file exists ' 'where a directory needs to be created (%s).' % (src_uri, final_dir)) if os.path.isdir(dst_path): raise CommandException('Cannot retrieve %s because a directory exists ' '(%s) where the file needs to be created.' % (src_uri, dst_path))
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def safecopy(src, dst):\r\n abs_src = os.path.abspath(src)\r\n abs_dst = os.path.abspath(dst)\r\n if (abs_src != abs_dst) \\\r\n and os.path.isfile(abs_src): \r\n dirname = os.path.dirname(abs_dst)\r\n recurse_mkdir(dirname)\r\n shutil.copy(abs_src, abs_dst)", "def ErrorCheckCopyRequest(self, src_uri_expansion, dst_uri_str, headers,\n debug, command='cp'):\n for src_uri in src_uri_expansion:\n if src_uri.is_cloud_uri() and not src_uri.bucket_name:\n raise CommandException('Provider-only src_uri (%s)')\n\n if ContainsWildcard(dst_uri_str):\n matches = list(self.CmdWildcardIterator(dst_uri_str, headers=headers,\n debug=debug))\n if len(matches) > 1:\n raise CommandException('Destination (%s) matches more than 1 URI' %\n dst_uri_str)\n base_dst_uri = matches[0]\n else:\n base_dst_uri = self.StorageUri(dst_uri_str, debug=debug)\n\n # Make sure entire expansion didn't result in nothing to copy. This can\n # happen if user request copying a directory w/o -r option, for example.\n have_work = False\n for v in src_uri_expansion.values():\n if v:\n have_work = True\n break\n if not have_work:\n raise CommandException('Nothing to copy')\n\n # If multi-object copy request ensure base_dst_uri names a container.\n multi_src_request = (len(src_uri_expansion) > 1 or\n len(src_uri_expansion.values()[0]) > 1)\n if multi_src_request:\n self.InsistUriNamesContainer(command, base_dst_uri)\n\n # Ensure no src/dest pairs would overwrite src. Note that this is\n # more restrictive than the UNIX 'cp' command (which would, for example,\n # allow \"mv * dir\" and just skip the implied mv dir dir). We disallow such\n # partial completion operations in cloud copies because they are risky.\n for src_uri in iter(src_uri_expansion):\n for exp_src_uri in src_uri_expansion[src_uri]:\n new_dst_uri = self.ConstructDstUri(src_uri, exp_src_uri, base_dst_uri)\n if self.SrcDstSame(exp_src_uri, new_dst_uri):\n raise CommandException('cp: \"%s\" and \"%s\" are the same object - '\n 'abort.' % (exp_src_uri.uri, new_dst_uri.uri))\n\n return (base_dst_uri, multi_src_request)", "def _copy_if_not_exists(source: pl.Path, destination: pl.Path) -> None:\n if destination.is_dir():\n destination_file = destination / source.name\n else:\n destination_file = destination\n if not destination_file.exists():\n su.copy(source, destination)", "def copy_file(src, dst, ignore=None):\n # Sanity checkpoint\n src = re.sub('[^\\w/\\-\\.\\*]', '', src)\n dst = re.sub('[^\\w/\\-\\.\\*]', '', dst)\n if len(re.sub('[\\W]', '', src)) < 5 or len(re.sub('[\\W]', '', dst)) < 5:\n debug.log(\"Error: Copying file failed. Provided paths are invalid! src='%s' dst='%s'\"%(src, dst))\n else:\n # Check destination\n check = False\n if dst[-1] == '/':\n if os.path.exists(dst):\n check = True # Valid Dir\n else:\n debug.log(\"Error: Copying file failed. Destination directory does not exist (%s)\"%(dst)) #DEBUG\n elif os.path.exists(dst):\n if os.path.isdir(dst):\n check = True # Valid Dir\n dst += '/' # Add missing slash\n else:\n debug.log(\"Error: Copying file failed. %s exists!\"%dst)\n elif os.path.exists(os.path.dirname(dst)):\n check = True # Valid file path\n else:\n debug.log(\"Error: Copying file failed. %s is an invalid distination!\"%dst)\n if check:\n # Check source\n files = glob.glob(src)\n if ignore is not None: files = [fil for fil in files if not ignore in fil]\n if len(files) != 0:\n debug.log(\"Copying File(s)...\", \"Copy from %s\"%src, \"to %s\"%dst) #DEBUG\n for file_ in files:\n # Check file exists\n if os.path.isfile(file_):\n debug.log(\"Copying file: %s\"%file_) #DEBUG\n shutil.copy(file_, dst)\n else:\n debug.log(\"Error: Copying file failed. %s is not a regular file!\"%file_) #DEBUG\n else: debug.log(\"Error: Copying file failed. No files were found! (%s)\"%src) #DEBUG", "def copy_file_check(self):\n pass", "def copyanything(src, dst):\n try:\n copytree(src, dst, dirs_exist_ok=True)\n except FileExistsError as e: # noqa\n pass\n except OSError as err:\n # TODO(dittrich): This causes a pylint error\n # Not sure what test cases would trigger this, or best fix.\n if err.errno == os.errno.ENOTDIR: # type: ignore\n copy(src, dst)\n else:\n raise\n finally:\n remove_other_perms(dst)", "def _symlink_or_copy(src, dst):\n # try to symlink file\n try:\n os.symlink(src, dst)\n print('Creating symlink \"%s\" pointing to \"%s\"' % (dst, src))\n except Exception as ex_symlink:\n # try to copy file\n try:\n shutil.copyfile(src, dst)\n print('Copying file from \"%s\" to \"%s\"' % (src, dst))\n except Exception as ex_copy:\n raise RuntimeError('Could neither symlink nor copy file \"%s\" to \"%s\":\\n- %s\\n- %s' % (src, dst, str(ex_symlink), str(ex_copy)))", "def copy_if_needed(src: str, dst: str, filter: str|List[str]|None = None) -> bool:\n #console(f'COPY {src} --> {dst}')\n if os.path.isdir(src):\n return copy_dir(src, dst, filter)\n else:\n return copy_file(src, dst, filter)", "def _tryLink(self,src, dst):\n\n hiero.core.log.info(\"Attempting to link %s to %s\" % (src, dst))\n \n try:\n os.link(util.asUnicode(src), util.asUnicode(dst))\n except OSError as err:\n # If the OS returns an ENOTSUP error (45), for example when trying to set\n # flags on an NFS mounted volume that doesn't support them, Python should\n # absorb this. However, a regression in Python 2.7.3 causes this not to\n # be the case, and the error is thrown as an exception. We therefore\n # catch this explicitly as value 45, since errno.ENOTSUP is not defined\n # in Python 2.7.2 (which is part of the problem). See the following\n # link for further information: http://bugs.python.org/issue14662\n # See TP 199072.\n if err.errno == 45: # ENOTSUP\n pass\n elif err.errno == 17: # FILE EXISTS\n raise\n else:\n raise", "def HandleMultiSrcCopyRequst(self, src_uri_expansion, dst_uri):\n # If src_uri and dst_uri both name containers, handle\n # two cases to make copy command work like UNIX \"cp -r\" works:\n # a) if dst_uri names a non-existent directory, copy objects to a new\n # directory with the dst_uri name. In this case,\n # gsutil gs://bucket/a dir\n # should create dir/a.\n # b) if dst_uri names an existing directory, copy objects under that\n # directory. In this case,\n # gsutil gs://bucket/a dir\n # should create dir/bucket/a.\n src_uri_to_check = src_uri_expansion.keys()[0]\n if (src_uri_to_check.names_container() and dst_uri.names_container() and\n os.path.exists(dst_uri.object_name)):\n new_name = ('%s%s%s' % (dst_uri.object_name, os.sep,\n src_uri_to_check.bucket_name)).rstrip('/')\n dst_uri = dst_uri.clone_replace_name(new_name)\n # Create dest directory if needed.\n if dst_uri.is_file_uri() and not os.path.exists(dst_uri.object_name):\n os.makedirs(dst_uri.object_name)\n return dst_uri", "def move_file(src, dst):\n # Sanity checkpoint\n src = re.sub('[^\\w/\\-\\.\\*]', '', src)\n dst = re.sub('[^\\w/\\-\\.\\*]', '', dst)\n if len(re.sub('[\\W]', '', src)) < 5 or len(re.sub('[\\W]', '', dst)) < 5:\n debug.log(\"Error: Moving file failed. Provided paths are invalid! src='%s' dst='%s'\"%(src, dst))\n else:\n # Check destination\n check = False\n if dst[-1] == '/':\n if os.path.exists(dst):\n check = True # Valid Dir\n else:\n debug.log(\"Error: Moving file failed. Destination directory does not exist (%s)\"%(dst)) #DEBUG\n elif os.path.exists(dst):\n if os.path.isdir(dst):\n check = True # Valid Dir\n dst += '/' # Add missing slash\n else:\n debug.log(\"Error: Moving file failed. %s exists!\"%dst)\n elif os.path.exists(os.path.dirname(dst)):\n check = True # Valid file path\n else:\n debug.log(\"Error: Moving file failed. %s is an invalid distination!\"%dst)\n if check:\n # Check source\n files = glob.glob(src)\n if len(files) != 0:\n debug.log(\"Moving File(s)...\", \"Move from %s\"%src, \"to %s\"%dst)\n for file_ in files:\n # Check if file contains invalid symbols:\n invalid_chars = re.findall('[^\\w/\\-\\.\\*]', os.path.basename(file_))\n if invalid_chars:\n debug.graceful_exit((\"Error: File %s contains invalid \"\n \"characters %s!\"\n )%(os.path.basename(file_), invalid_chars))\n continue\n # Check file exists\n if os.path.isfile(file_):\n debug.log(\"Moving file: %s\"%file_)\n shutil.move(file_, dst)\n else:\n debug.log(\"Error: Moving file failed. %s is not a regular file!\"%file_)\n else: debug.log(\"Error: Moving file failed. No files were found! (%s)\"%src)", "def copy_tree_checker(src, dst):\n copy_tree(src, dst)\n return True", "def is_broken_link(self):\n if not os.path.exists(self.dst):\n if os.path.lexists(self.dst):\n return True\n return False", "def SrcDstSame(self, src_uri, dst_uri):\n if src_uri.is_file_uri() and dst_uri.is_file_uri():\n # Translate a/b/./c to a/b/c, so src=dst comparison below works.\n new_src_path = re.sub('%s+\\.%s+' % (os.sep, os.sep), os.sep,\n src_uri.object_name)\n new_src_path = re.sub('^.%s+' % os.sep, '', new_src_path)\n new_dst_path = re.sub('%s+\\.%s+' % (os.sep, os.sep), os.sep,\n dst_uri.object_name)\n new_dst_path = re.sub('^.%s+' % os.sep, '', new_dst_path)\n return (src_uri.clone_replace_name(new_src_path).uri ==\n dst_uri.clone_replace_name(new_dst_path).uri)\n else:\n return src_uri.uri == dst_uri.uri", "def safe_copy(file_path, out_dir, dst=None):\n name = dst or os.path.basename(file_path)\n if not os.path.exists(os.path.join(out_dir, name)):\n shutil.copy(file_path, os.path.join(out_dir, name))", "def copy_file(source, destination):\n\n try:\n shutil.copy(source, destination)\n except (OSError, IOError):\n return False\n else:\n return True", "def __checkDestination(self):\n return os.path.exists(self.__targetPath)", "def checkExisting(self, dst):\n if dst.exists():\n msg = 'Refusing to clobber existing file \"%s\"' % (\n dst.path,)\n logging.msg(msg)\n raise errors.NoClobber(msg)", "def copyFile(src, dest):\n try:\n shutil.copy(src,dest)\n except shutil.Error as e:\n print(\"Error: \" + str(e))\n except IOError as e:\n print(\"Error: \" + e.strerror)", "def copyFile(source,destination):\r\n logging.info(\"source\",source)\r\n logging.info(\"destination\",destination)\r\n try:\r\n shutil.copy(source, destination)\r\n logging.info(\"File copied successfully.\")\r\n \"\"\"If source and destination are same\"\"\"\r\n except shutil.SameFileError:\r\n logging.info(\"File not copied sucessfuly.\")\r\n \"\"\"List files and directories\"\"\"\r\n logging.info(\"After copying file:\")\r\n logging.info(os.listdir(destination))\r\n \"\"\"logging.info path of newly\r\n created file\"\"\"\r\n logging.info(\"Destination path:\", destination)", "def __copyfile(source, destination):\n logger.info(\"copyfile: %s -> %s\" % (source, destination))\n try:\n __create_destdir(destination)\n shutil.copy(source, destination)\n return True\n except Exception as e:\n logger.error(\n \"copyfile: %s -> %s failed! Error: %s\", source, destination, e\n )\n return False", "def copy_file(src: str, dst: str, filter: str|List[str]|None = None) -> bool:\n if _passes_filter(src, filter):\n if os.path.isdir(dst):\n dst = os.path.join(dst, os.path.basename(src))\n if _should_copy(src, dst):\n #console(f'copy {src}\\n --> {dst}')\n shutil.copyfile(src, dst, follow_symlinks=True)\n shutil.copystat(src, dst, follow_symlinks=True)\n return True\n return False", "def copyFile(src_dir, dst_dir, f_name):\n\n try:\n src_file = open(osp.join(src_dir, f_name),\"rb\")\n dst_file = open(osp.join(dst_dir, f_name),\"wb\")\n dst_file.write(src_file.read())\n dst_file.close()\n src_file.close()\n except Exception, e:\n msg = \"!!! In copying files from < %s > dir to < %s > dir exception occur. Details: %s.\" % (src_dir,dst_dir, str(e))\n print >> import_out, msg\n LOG('performImportToPortal',INFO,'copyFile', msg)", "def copy(src, dst):\n try:\n shutil.copytree(src, dst)\n except OSError as exc:\n if exc.errno == errno.ENOTDIR:\n shutil.copy(src, dst)\n else:\n raise", "def copy(src, dst):\n try:\n shutil.copytree(src, dst)\n except OSError as exc:\n if exc.errno == errno.ENOTDIR:\n shutil.copy(src, dst)\n else:\n raise", "def to_move(src: str, dst: str) -> bool:\n\n if not islink(src):\n with suppress(Exception):\n move(src, dst)\n return True\n return False", "def copy(self, src_path: str, tgt_path: str) -> None:", "def copy_folder(source, destination):\n\n try:\n shutil.copytree(source, destination)\n except (OSError, IOError):\n return False\n else:\n return True", "def copy_to_local(src_file):\r\n if not_exists(src_file, \"Source File\"):\r\n return 1, 0\r\n _local_file = os.path.basename(src_file)\r\n if wrap_cp_file(src_file, _local_file):\r\n return 1, 0\r\n return 0, _local_file", "def copy_to_local(src_file):\r\n if not_exists(src_file, \"Source File\"):\r\n return 1, 0\r\n _local_file = os.path.basename(src_file)\r\n if wrap_cp_file(src_file, _local_file):\r\n return 1, 0\r\n return 0, _local_file" ]
[ "0.6901259", "0.68268985", "0.673754", "0.6538259", "0.6497284", "0.64965814", "0.63711923", "0.6340625", "0.6311761", "0.6251322", "0.6174213", "0.61602324", "0.61496025", "0.60806686", "0.6078184", "0.6070296", "0.60689217", "0.59689426", "0.5968641", "0.5960017", "0.5958965", "0.59260416", "0.5925199", "0.5918626", "0.5918626", "0.5915713", "0.59059995", "0.5881383", "0.5815507", "0.5815507" ]
0.75770116
0
Implementation of getacl command.
def GetAclCommand(self, args, unused_sub_opts=None, headers=None, debug=0): # Wildcarding is allowed but must resolve to just one object. uris = list(self.CmdWildcardIterator(args[0], headers=headers, debug=debug)) if len(uris) != 1: raise CommandException('Wildcards must resolve to exactly one object for ' '"getacl" command.') uri = uris[0] if not uri.bucket_name: raise CommandException('"getacl" command must specify a bucket or ' 'object.') acl = uri.get_acl(False, headers) # Pretty-print the XML to make it more easily human editable. parsed_xml = xml.dom.minidom.parseString(acl.to_xml().encode('utf-8')) print parsed_xml.toprettyxml(indent=' ')
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "def __acl__():", "def get_acl(registry=None):", "def _get_acl(self, path, replacement):\n auth = (properties.GMN_USER, properties.GMN_PASSWD)\n eml_acl = None\n url = self._url.replace(path, replacement)\n r = adapter_utilities.requests_get_url_wrapper(url=url, auth=auth)\n if r is not None:\n eml_acl = r.text.strip()\n acl = []\n if eml_acl is not None:\n tree = ET.ElementTree(ET.fromstring(eml_acl))\n for allow_rule in tree.iter('allow'):\n principal = allow_rule.find('./principal')\n permission = allow_rule.find('./permission')\n acl.append(\n {'principal': principal.text,\n 'permission': permission.text})\n if self._owner is not None:\n acl.append({'principal': self._owner,\n 'permission': 'changePermission'})\n return acl", "def __acl__(self):\n # type: () -> AccessControlListType\n acl = []\n if self.owner_user_id:\n acl.append((Allow, self.owner_user_id, ALL_PERMISSIONS))\n if self.owner_group_id:\n acl.append((Allow, \"group:%s\" % self.owner_group_id, ALL_PERMISSIONS))\n return acl", "def accessControlList(self):\n return allACL", "def _fetch_appropriate_acl(self, ctx):\n\n bimodal_checker = ctx.req.environ[utils.ENV_BIMODAL_CHECKER]\n\n if ctx.req.method in ('GET', 'HEAD') and ctx.container_name:\n container_info = get_container_info(\n ctx.req.environ, bimodal_checker,\n swift_source=\"PFS\")\n return container_info['read_acl']\n elif ctx.object_name and ctx.req.method in (\n 'PUT', 'POST', 'DELETE', 'COALESCE'):\n container_info = get_container_info(\n ctx.req.environ, bimodal_checker,\n swift_source=\"PFS\")\n return container_info['write_acl']\n else:\n return None", "def get_acl(self):\n\n return 'private'", "def get_acls():\n return config.get_cfg_storage(ID_ACL)", "def GetResourceAclSample():\n client = CreateClient()\n for resource in client.GetResources(limit=5).entry:\n acl_feed = client.GetResourceAcl(resource)\n for acl in acl_feed.entry:\n print acl.role.value, acl.scope.type, acl.scope.value", "def _RetrieveAclRule(self, username):\n\n aclEntryUri = \"http://www.google.com/calendar/feeds/\"\n aclEntryUri += \"default/acl/full/user:%s\" % (username)\n entry = self.cal_client.GetCalendarAclEntry(aclEntryUri)\n print '\\t%s' % (entry.title.text,)\n print '\\t\\t Role: %s' % (entry.role.value,)\n print '\\t\\t Scope %s - %s' % (entry.scope.type, entry.scope.value)\n return entry", "def get_object_acl(Bucket=None, Key=None, VersionId=None, RequestPayer=None):\n pass", "def _PrintAclFeed(self):\n\n feed = self.cal_client.GetCalendarAclFeed()\n print feed.title.text\n for i, a_rule in zip(xrange(len(feed.entry)), feed.entry):\n print '\\t%s. %s' % (i, a_rule.title.text,)\n print '\\t\\t Role: %s' % (a_rule.role.value,)\n print '\\t\\t Scope %s - %s' % (a_rule.scope.type, a_rule.scope.value)", "def acl_in_dp():\n return {}", "def __acl__(self):\n # type: () -> AccessControlListType\n user = self.request.user\n # allow if role MAGPIE_ADMIN_PERMISSION is somehow directly set instead of inferred via members of admin-group\n acl = [(Allow, get_constant(\"MAGPIE_ADMIN_PERMISSION\", self.request), ALL_PERMISSIONS)]\n admin_group_name = get_constant(\"MAGPIE_ADMIN_GROUP\", self.request)\n admins = GroupService.by_group_name(admin_group_name, db_session=self.request.db)\n if admins:\n # need to add explicit admin-group ALL_PERMISSIONS otherwise views with other permissions than the\n # default MAGPIE_ADMIN_PERMISSION will be refused access (e.g.: views with MAGPIE_LOGGED_PERMISSION)\n acl += [(Allow, \"group:{}\".format(admins.id), ALL_PERMISSIONS)]\n if user:\n # user-specific permissions (including group memberships)\n permissions = UserService.permissions(user, self.request.db)\n user_acl = permission_to_pyramid_acls(permissions)\n # allow views that require minimally to be logged in (regardless of who is the user)\n auth_acl = [(Allow, user.id, Authenticated)]\n acl += user_acl + auth_acl\n return acl", "def parse_binary_acl(entry, entrytype, acl, objecttype_guid_map):\n if not acl:\n return entry, []\n sd = SecurityDescriptor(BytesIO(acl))\n relations = []\n # Parse owner\n osid = str(sd.owner_sid)\n ignoresids = [\"S-1-3-0\", \"S-1-5-18\"]\n # Ignore Creator Owner or Local System\n if osid not in ignoresids:\n relations.append(build_relation(osid, 'Owner', inherited=False))\n for ace_object in sd.dacl.aces:\n if ace_object.ace.AceType != 0x05 and ace_object.ace.AceType != 0x00:\n # These are the only two aces we care about currently\n logging.debug('Don\\'t care about acetype %d', ace_object.ace.AceType)\n continue\n # Check if sid is ignored\n sid = str(ace_object.acedata.sid)\n # Ignore Creator Owner or Local System\n if sid in ignoresids:\n continue\n if ace_object.ace.AceType == 0x05:\n is_inherited = ace_object.has_flag(ACE.INHERITED_ACE)\n # ACCESS_ALLOWED_OBJECT_ACE\n if not ace_object.has_flag(ACE.INHERITED_ACE) and ace_object.has_flag(ACE.INHERIT_ONLY_ACE):\n # ACE is set on this object, but only inherited, so not applicable to us\n continue\n\n # Check if the ACE has restrictions on object type (inherited case)\n if ace_object.has_flag(ACE.INHERITED_ACE) \\\n and ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_INHERITED_OBJECT_TYPE_PRESENT):\n # Verify if the ACE applies to this object type\n if not ace_applies(ace_object.acedata.get_inherited_object_type().lower(), entrytype, objecttype_guid_map):\n continue\n\n mask = ace_object.acedata.mask\n # Now the magic, we have to check all the rights BloodHound cares about\n\n # Check generic access masks first\n if mask.has_priv(ACCESS_MASK.GENERIC_ALL) or mask.has_priv(ACCESS_MASK.WRITE_DACL) \\\n or mask.has_priv(ACCESS_MASK.WRITE_OWNER) or mask.has_priv(ACCESS_MASK.GENERIC_WRITE):\n # For all generic rights we should check if it applies to our object type\n if ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT) \\\n and not ace_applies(ace_object.acedata.get_object_type().lower(), entrytype, objecttype_guid_map):\n # If it does not apply, break out of the loop here in order to\n # avoid individual rights firing later on\n continue\n # Check from high to low, ignore lower privs which may also match the bitmask,\n # even though this shouldn't happen since we check for exact matches currently\n if mask.has_priv(ACCESS_MASK.GENERIC_ALL):\n # Report this as LAPS rights if it's a computer object AND laps is enabled\n if entrytype == 'computer' and \\\n ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT) and \\\n entry['Properties']['haslaps']:\n if ace_object.acedata.get_object_type().lower() == objecttype_guid_map['ms-mcs-admpwd']:\n relations.append(build_relation(sid, 'ReadLAPSPassword', inherited=is_inherited))\n else:\n relations.append(build_relation(sid, 'GenericAll', inherited=is_inherited))\n continue\n if mask.has_priv(ACCESS_MASK.GENERIC_WRITE):\n relations.append(build_relation(sid, 'GenericWrite', inherited=is_inherited))\n # Don't skip this if it's the domain object, since BloodHound reports duplicate\n # rights as well, and this might influence some queries\n if entrytype != 'domain' and entrytype != 'computer':\n continue\n\n # These are specific bitmasks so don't break the loop from here\n if mask.has_priv(ACCESS_MASK.WRITE_DACL):\n relations.append(build_relation(sid, 'WriteDacl', inherited=is_inherited))\n\n if mask.has_priv(ACCESS_MASK.WRITE_OWNER):\n relations.append(build_relation(sid, 'WriteOwner', inherited=is_inherited))\n\n # Property write privileges\n writeprivs = ace_object.acedata.mask.has_priv(ACCESS_MASK.ADS_RIGHT_DS_WRITE_PROP)\n if writeprivs:\n # GenericWrite\n if entrytype in ['user', 'group'] and not ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT):\n relations.append(build_relation(sid, 'GenericWrite', inherited=is_inherited))\n if entrytype == 'group' and can_write_property(ace_object, EXTRIGHTS_GUID_MAPPING['WriteMember']):\n relations.append(build_relation(sid, 'WriteProperty', 'AddMember', inherited=is_inherited))\n\n # Property read privileges\n if ace_object.acedata.mask.has_priv(ACCESS_MASK.ADS_RIGHT_DS_READ_PROP):\n if entrytype == 'computer' and \\\n ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT) and \\\n entry['Properties']['haslaps']:\n if ace_object.acedata.get_object_type().lower() == objecttype_guid_map['ms-mcs-admpwd']:\n relations.append(build_relation(sid, 'ReadLAPSPassword', inherited=is_inherited))\n\n # Extended rights\n control_access = ace_object.acedata.mask.has_priv(ACCESS_MASK.ADS_RIGHT_DS_CONTROL_ACCESS)\n if control_access:\n # All Extended\n if entrytype in ['user', 'domain', 'computer'] and not ace_object.acedata.has_flag(ACCESS_ALLOWED_OBJECT_ACE.ACE_OBJECT_TYPE_PRESENT):\n relations.append(build_relation(sid, 'ExtendedRight', 'All', inherited=is_inherited))\n if entrytype == 'domain' and has_extended_right(ace_object, EXTRIGHTS_GUID_MAPPING['GetChanges']):\n relations.append(build_relation(sid, 'ExtendedRight', 'GetChanges', inherited=is_inherited))\n if entrytype == 'domain' and has_extended_right(ace_object, EXTRIGHTS_GUID_MAPPING['GetChangesAll']):\n relations.append(build_relation(sid, 'ExtendedRight', 'GetChangesAll', inherited=is_inherited))\n if entrytype == 'user' and has_extended_right(ace_object, EXTRIGHTS_GUID_MAPPING['UserForceChangePassword']):\n relations.append(build_relation(sid, 'ExtendedRight', 'User-Force-Change-Password', inherited=is_inherited))\n\n # print(ace_object.acedata.sid)\n if ace_object.ace.AceType == 0x00:\n is_inherited = ace_object.has_flag(ACE.INHERITED_ACE)\n mask = ace_object.acedata.mask\n # ACCESS_ALLOWED_ACE\n if mask.has_priv(ACCESS_MASK.GENERIC_ALL):\n # Generic all includes all other rights, so skip from here\n relations.append(build_relation(sid, 'GenericAll', inherited=is_inherited))\n continue\n\n if mask.has_priv(ACCESS_MASK.ADS_RIGHT_DS_WRITE_PROP):\n # Genericwrite is only for properties, don't skip after\n relations.append(build_relation(sid, 'GenericWrite', inherited=is_inherited))\n\n if mask.has_priv(ACCESS_MASK.WRITE_OWNER):\n relations.append(build_relation(sid, 'WriteOwner', inherited=is_inherited))\n\n # For users and domain, check extended rights\n if entrytype in ['user', 'domain'] and mask.has_priv(ACCESS_MASK.ADS_RIGHT_DS_CONTROL_ACCESS):\n relations.append(build_relation(sid, 'ExtendedRight', 'All', inherited=is_inherited))\n\n if mask.has_priv(ACCESS_MASK.WRITE_DACL):\n relations.append(build_relation(sid, 'WriteDacl', inherited=is_inherited))\n\n # pprint.pprint(entry)\n # pprint.pprint(relations)\n return entry, relations", "def get_computed_acl(userids=[], request=None):", "def test_set_get_acls(self):\n self.shell.onecmd(\"create %s/one 'hello'\" % (self.tests_path))\n self.shell.onecmd(\"set_acls %s/one 'world:anyone:r digest:%s:cdrwa'\" % (\n self.tests_path, self.auth_digest))\n self.shell.onecmd(\"get_acls %s/one\" % (self.tests_path))\n\n if PYTHON3:\n user_id = \"Id(scheme='digest', id='%s')\" % (self.auth_digest)\n else:\n user_id = \"Id(scheme=u'digest', id=u'%s')\" % (self.auth_digest)\n\n user_acl = \"ACL(perms=31, acl_list=['ALL'], id=%s)\" % (user_id)\n expected_output = \"/tests/one: ['WORLD_READ', %s]\\n\" % (user_acl)\n self.assertEqual(expected_output, self.output.getvalue())", "def acl_name(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"acl_name\")", "def __base_acl__(self) -> list:\n _acls = [\n (Allow, 'g:briefy_qa', ['add', 'delete', 'edit', 'list', 'view'])\n ]\n return _acls", "def __base_acl__(self) -> list:\n _acls = [\n (Allow, 'g:professionals', ['list', 'view', 'edit']),\n ]\n return _acls", "def get_bucket_acl(Bucket=None):\n pass", "def acl_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"acl_id\")", "def __str__(self):\n return self.GenerateACLString()", "def get(resource_name: str,\n id: pulumi.Input[str],\n opts: Optional[pulumi.ResourceOptions] = None,\n arn: Optional[pulumi.Input[str]] = None,\n minimum_engine_version: Optional[pulumi.Input[str]] = None,\n name: Optional[pulumi.Input[str]] = None,\n name_prefix: Optional[pulumi.Input[str]] = None,\n tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n tags_all: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,\n user_names: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None) -> 'Acl':\n opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))\n\n __props__ = _AclState.__new__(_AclState)\n\n __props__.__dict__[\"arn\"] = arn\n __props__.__dict__[\"minimum_engine_version\"] = minimum_engine_version\n __props__.__dict__[\"name\"] = name\n __props__.__dict__[\"name_prefix\"] = name_prefix\n __props__.__dict__[\"tags\"] = tags\n __props__.__dict__[\"tags_all\"] = tags_all\n __props__.__dict__[\"user_names\"] = user_names\n return Acl(resource_name, opts=opts, __props__=__props__)", "def get_processed_acl(self):\n return self.get_storage().default_acl", "def parse_acl(*args, **kwargs):\n version = kwargs.pop('version', None)\n if version in (1, None):\n return parse_acl_v1(*args)\n elif version == 2:\n return parse_acl_v2(*args, **kwargs)\n else:\n raise ValueError('Unknown ACL version: parse_acl(%r, %r)' %\n (args, kwargs))", "def describe_acls(self, acl_resource):\n\n request = DescribeAclsRequest_v0(\n resource_type=acl_resource.resource_type,\n resource_name=acl_resource.name,\n principal=acl_resource.principal,\n host=acl_resource.host,\n operation=acl_resource.operation,\n permission_type=acl_resource.permission_type\n )\n\n response = self.send_request_and_get_response(request)\n\n if response.error_code != self.SUCCESS_CODE:\n self.close()\n self.module.fail_json(\n msg='Error while describing ACL %s. '\n 'Error %s: %s.' % (\n acl_resource, response.error_code,\n response.error_message\n )\n )\n\n return response.resources", "def list_acl_policies(client, container_name, **kwargs):\n return _get_acl(client, container_name, **kwargs)", "def project_acl(project):\n if \"acl\" not in project:\n cherrypy.log.error(\"Project missing ACL: %s\" % project)\n return {\"administrators\":{}, \"writers\":{}, \"readers\":{}}\n return project[\"acl\"]", "def default_acl(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"default_acl\")" ]
[ "0.72654945", "0.719457", "0.6704867", "0.66731185", "0.65172094", "0.6427574", "0.6421802", "0.6291308", "0.6276502", "0.6174854", "0.61273634", "0.5993839", "0.59550744", "0.59331304", "0.5928578", "0.5918684", "0.5900413", "0.58834785", "0.5881256", "0.5859684", "0.5852145", "0.5845291", "0.57867587", "0.578093", "0.57789034", "0.5761158", "0.57506865", "0.57219964", "0.57043576", "0.56854147" ]
0.7747878
0