text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(line, toignore_re): return (SONAR_WARN_RE.match(line) and not SONAR_WARN_TO_IGNORE_RE.match(line) and (toignore_re is None or not toignore_re.match(line)))
[ 137, 10889, 3437 ]
def METHOD_NAME(): """ Perform an application-level health check. This is not part of the published API because it is intended to be used by monitoring tools. This returns a 200 response if the application is alive and able to serve requests. It returns a 500 response otherwise. """ current_app.logger.info("A healthcheck request was received") try: start_time = timer() db.session.execute("SELECT 1 FROM request LIMIT 0").fetchall() end_time = timer() - start_time current_app.logger.info("The healthcheck database query took %f seconds", end_time) except SQLAlchemyError: current_app.logger.exception("The healthcheck failed when querying the database") raise InternalServerError() return ("OK", 200, [("Content-Type", "text/plain")])
[ 14711 ]
def METHOD_NAME(change_curdir_fixtures): """ Check that when no exec is given that by default the input is returned as is and validated. """ output = tackle('no-exec.yaml') assert output['no_arg_call']['target'] == 'world' assert output['arg_call']['target'] == 'things'
[ 9, 559, 654, 1005 ]
def METHOD_NAME(dirname): if not have_gitpython: return try: repo = Repo(dirname, search_parent_directories=True) except InvalidGitRepositoryError: print("Couldn't find git info") return cur_commit = repo.commit() cur_branch = repo.active_branch print("Git info:") print(f"\tCurrent commit {cur_commit.hexsha} from branch {cur_branch.name}") try: # EDG: Git is insane, but this should work 99% of the time cur_tb = cur_branch.tracking_branch() if cur_tb.is_remote(): remote_name = cur_tb.remote_name remote_url = repo.remotes[remote_name].url print(f"\tChecked out from remote {remote_name}: {remote_url}") else: print("Tracking local branch %s" % cur_tb.name) except Exception: # pylint: disable=broad-except print("Could not resolve tracking branch or remote info!")
[ 38, 1493, 100 ]
def METHOD_NAME(self): parameters = { **self.serialize_query_param( "api-version", "2022-01-01", required=True, ), } return parameters
[ 539, 386 ]
def METHOD_NAME(self, name): if name.startswith("kernel-") or name == "virtual/kernel": return name if name.startswith("rtld"): return name if name.endswith("-crosssdk"): return name if name.endswith("-" + self.extname): name = name.replace("-" + self.extname, "") if name.startswith("virtual/"): subs = name.split("/", 1)[1] if not subs.startswith(self.extname): return "virtual/" + self.extname + "-" + subs return name if name.startswith("/") or (name.startswith("${") and name.endswith("}")): return name if not name.startswith(self.extname): return self.extname + "-" + name return name
[ 978, 156 ]
def METHOD_NAME(val: Any, output_format: str = "standard", errors: str = "coarse") -> Any: """ Reformat a number string with proper separators and whitespace. Parameters ---------- val The value of number string. output_format If output_format = 'compact', return string without any separators or whitespace. If output_format = 'standard', return string with proper separators and whitespace. Note: in the case of VAT, the compact format is the same as the standard one. """ val = str(val) if val in NULL_VALUES: return [np.nan] if not validate_de_vat(val): if errors == "raise": raise ValueError(f"Unable to parse value {val}") error_result = val if errors == "ignore" else np.nan return [error_result] if output_format in {"compact", "standard"}: result = [vat.compact(val)] return result
[ 275 ]
def METHOD_NAME(self, expect, *args): if len(args) == 0: args = [expect] self.group.set_joint_value_target(*args) res = self.group.get_joint_value_target() self.assertTrue( np.all(np.asarray(res) == np.asarray(expect)), "Setting failed for %s, values: %s" % (type(args[0]), res), )
[ 250, 1030, 1333 ]
def METHOD_NAME(cls, mei_files, siglum_slug, id, **options): for file_name in mei_files: ngrams = cls.process_file(file_name, siglum_slug, id, **options) yield file_name, ngrams
[ 356, 623, 771 ]
def METHOD_NAME(): """ Cover the lifecycle related tests including: 1. reboot vm 2. suspend -> resume 3. save -> restore 4. managedsave """ test.log.info("TEST_STEP1: Attach a hostdev interface/device to VM") iface_dev = sriov_test_obj.create_iface_dev(dev_type, iface_dict) libvirt.add_vm_device(vm_xml.VMXML.new_from_dumpxml(vm_name), iface_dev) test.log.info("TEST_STEP2: Start the VM") vm.start() vm.cleanup_serial_console() vm.create_serial_console() vm_session = vm.wait_for_serial_login(timeout=240) test.log.info("TEST_STEP3: Check network accessibility") check_points.check_vm_network_accessed(vm_session, tcpdump_iface=br_name, tcpdump_status_error=True) check_points.check_vm_iface_num(vm_session, expr_iface_no) test.log.info("TEST_STEP4: Reboot the vm") virsh.reboot(vm.name, debug=True, ignore_status=False) vm_session = vm.wait_for_serial_login(timeout=240) test.log.info("TEST_STEP3: Check network accessibility") check_points.check_vm_network_accessed(vm_session, tcpdump_iface=br_name, tcpdump_status_error=True) check_points.check_vm_iface_num(vm_session, expr_iface_no) test.log.info("TEST_STEP4: Suspend and resume VM.") virsh.suspend(vm.name, debug=True, ignore_status=False) virsh.resume(vm.name, debug=True, ignore_status=False) test.log.info("TEST_STEP5: Check network accessibility") check_points.check_vm_network_accessed(vm_session, tcpdump_iface=br_name, tcpdump_status_error=True) check_points.check_vm_iface_num(vm_session, expr_iface_no) test.log.info("TEST_STEP6: Save and restore VM.") save_file = os.path.join(data_dir.get_tmp_dir(), "save_file") virsh.save(vm_name, save_file, debug=True, ignore_status=False, timeout=10) if not libvirt.check_vm_state(vm_name, "shut off"): test.fail("The guest should be down after executing 'virsh save'.") virsh.restore(save_file, debug=True, ignore_status=False) if not libvirt.check_vm_state(vm_name, "running"): test.fail("The guest should be running after executing 'virsh restore'.") vm.cleanup_serial_console() vm.create_serial_console() vm_session = vm.wait_for_serial_login() test.log.info("TEST_STEP7: Check network accessibility") check_points.check_vm_network_accessed(vm_session, tcpdump_iface=br_name, tcpdump_status_error=True) check_points.check_vm_iface_num(vm_session, expr_iface_no) test.log.info("TEST_STEP8: Managedsave the VM.") virsh.managedsave(vm_name, debug=True, ignore_status=False, timeout=10) vm.start() vm.cleanup_serial_console() vm.create_serial_console() vm_session = vm.wait_for_serial_login() test.log.info("TEST_STEP9: Check network accessibility") check_points.check_vm_network_accessed(vm_session, tcpdump_iface=br_name, tcpdump_status_error=True) check_points.check_vm_iface_num(vm_session, expr_iface_no)
[ 22, 9 ]
def METHOD_NAME(self): # NOTE: original init in pytorch/examples # initrange = 0.1 # self.encoder.weight.data.uniform_(-initrange, initrange) # self.decoder.bias.data.zero_() # self.decoder.weight.data.uniform_(-initrange, initrange) # NOTE: our default.py:RNNLM init for param in self.parameters(): param.data.uniform_(-0.1, 0.1)
[ 176, 733 ]
def METHOD_NAME(region, name, architecture, image, public): """Import an AMI image""" client = boto3.client('ec2', region_name=region) resource = boto3.resource('ec2', region_name=region) description = '%s (%s)' % (name, architecture) snapshot_id = create_snapshot(region=region, description=description, image=image) client.get_waiter('snapshot_completed').wait(SnapshotIds=[snapshot_id]) image = client.register_image(Architecture=architecture, BlockDeviceMappings=[{ 'DeviceName': '/dev/sda1', 'Ebs': { 'SnapshotId': snapshot_id, 'VolumeType': 'standard', }, }], EnaSupport=True, Name=description, RootDeviceName='/dev/sda1', SriovNetSupport='simple', VirtualizationType='hvm') image_id = image['ImageId'] client.get_waiter('image_available').wait(ImageIds=[image_id]) if public: resource.Image(image_id).modify_attribute(Attribute='launchPermission', OperationType='add', UserGroups=['all']) return image_id
[ 512, 660 ]
def METHOD_NAME(self, *, predictions=None, references=None, passage_non_null_threshold=2, span_non_null_threshold=2, verbose=False, **kwargs) -> Dict[str, Any]: if not predictions: raise ValueError("No predictions provided") elif not references: raise ValueError("No references provided") predictions = dict(map(self._convert_pred_to_entry, predictions)) references = dict(map(self._convert_ref_to_entry, references)) metrics = pretty_print(references, predictions, passage_non_null_threshold=passage_non_null_threshold, span_non_null_threshold=span_non_null_threshold, verbose=verbose) return metrics
[ 226 ]
def METHOD_NAME(generate_csv=False): """Retrieves list of all CUDA compute capability from NVIDIA webpage. Args: generate_csv: Boolean for generating an output file containing the results. Returns: OrderedDict that is a list of all CUDA compute capability listed on the NVIDIA page. Order goes from top to bottom of the webpage content (.html). """ url = "https://developer.nvidia.com/cuda-gpus" source = urllib.urlopen(url) matches = [] while True: line = source.readline() if "</html>" in line: break else: gpu = re.search( r"<a href=.*>([\w\S\s\d\[\]\,]+[^*])</a>(<a href=.*)?.*", line ) capability = re.search( r"([\d]+).([\d]+)(/)?([\d]+)?(.)?([\d]+)?.*</td>.*", line ) if gpu: matches.append(gpu.group(1)) elif capability: if capability.group(3): capability_str = capability.group(4) + "." + capability.group(6) else: capability_str = capability.group(1) + "." + capability.group(2) matches.append(capability_str) return create_gpu_capa_map(matches, generate_csv)
[ 404, 280, 2412 ]
def METHOD_NAME( self, action: WrapperActType ) -> tuple[WrapperObsType, SupportsFloat, bool, bool, dict]: """Transforms the action to a jax array . Args: action: the action to perform as a numpy array Returns: A tuple containing numpy versions of the next observation, reward, termination, truncation, and extra info. """ jax_action = numpy_to_jax(action) obs, reward, terminated, truncated, info = self.env.METHOD_NAME(jax_action) return ( jax_to_numpy(obs), float(reward), bool(terminated), bool(truncated), jax_to_numpy(info), )
[ 367 ]
def METHOD_NAME(points, height): """ Return cv2 style contour from a label. Requires img height so the y values can be correct @param points: list of points from a label in format [{'x': 542.4, 'y': 5454}, {'x': 232.12, 'y': 652}, ...] @param height: height of the image the contour comes from. Use img.shape[0] ex: def cb(label, img): for key in label['Label']: # For each class for polygon in label['Label'][key]: # For each segmentation with that class points = LabelBoxParser.label_to_contour(polygon, img.shape[0]) cv2.drawContours(img, [points], -1, (255, 255, 255), 3) """ polygon = points["polygon"] nppts = np.zeros((len(polygon), 2)) for i, pt in enumerate(polygon): nppts[i, 0] = int(pt["x"]) nppts[i, 1] = int(pt["y"]) return np.array(nppts, dtype=int)
[ 636, 24, 4053 ]
def METHOD_NAME(self): return self.future_spec
[ 19, 3637, 1457 ]
def METHOD_NAME(self) -> 'outputs.SystemDataResponse': """ Required property for system data """ return pulumi.get(self, "system_data")
[ 112, 365 ]
def METHOD_NAME(response, expected_errors): expected_error, expected_message = expected_errors["response_to_dict_error"] predict_raw_response = RawResponseMock(*response) predict_response = HttpPredictResponse(predict_raw_response) with pytest.raises(expected_error) as error: predict_response.to_dict() assert expected_message in str(error.value)
[ 9, 2103, 17, 24, 553, 163, 168 ]
def METHOD_NAME(self, request): serializer = BatchTemplateFormWithSchemesSerializer(data=request.data) serializer.is_valid(raise_exception=True) project_id = serializer.data.get("project_id") template_list = serializer.data["template_list"] # 将项目流程和公共流程分组 template_data = {} project_template_ids = [] common_template_ids = [] for template in template_list: template_id = template["id"] template_data[template_id] = template if template["template_source"] == PROJECT: project_template_ids.append(template_id) else: common_template_ids.append(template_id) # 获取公共流程和项目流程的queryset template_queryset = [] common_template_queryset = [] if project_template_ids: template_queryset = TaskTemplate.objects.select_related("pipeline_template").filter( id__in=project_template_ids, project_id=project_id, is_deleted=False ) if common_template_ids: common_template_queryset = CommonTemplate.objects.select_related("pipeline_template").filter( id__in=common_template_ids, is_deleted=False ) queryset = itertools.chain(template_queryset, common_template_queryset) template_dict = {} pipeline_template_ids = [] for template in queryset: template_dict[template.id] = template pipeline_template_ids.append(template.pipeline_template.id) # 获取各流程对应的执行方案列表 scheme_queryset = TemplateScheme.objects.filter(template_id__in=pipeline_template_ids).values( "template__id", "id", "name", "data" ) scheme_dict = {} for scheme in scheme_queryset: template_id = scheme.pop("template__id") if template_id not in scheme_dict: scheme_dict[template_id] = [scheme] else: scheme_dict[template_id].append(scheme) data = {} for template_id, template in template_dict.items(): data[template_id] = [] # 每个模板要获取当前版本的和最新版本的表单数据 # 两次获取数据只有模版版本不同,使用for循环减少重复逻辑,使用is_current标识是否是当前版本的表单数据 for index in range(2): if index == 0: version = template.version is_current = True else: version = template_data[template_id]["version"] is_current = False scheme_id_list = template_data[template_id]["scheme_id_list"] try: preview_data = preview_template_tree_with_schemes(template, version, scheme_id_list) except Exception as e: message = _(f"请求参数信息失败: 批量获取带执行方案的流程表单失败, 错误信息: {e}, 请重试. 如持续失败可联系管理员处理 | batch form with schemes") logger.error(message) return Response({"result": False, "message": message, "data": {}}) data[template_id].append( { "form": {**preview_data["pipeline_tree"]["constants"], **preview_data["custom_constants"]}, "outputs": preview_data["outputs"], "constants_not_referred": preview_data["constants_not_referred"], "version": preview_data["version"], "is_current": is_current, "scheme_id_list": scheme_id_list, "template_scheme_list": scheme_dict.get(template.pipeline_template.id, []), } ) return Response({"result": True, "data": data, "message": "success"})
[ 72 ]
def METHOD_NAME(self): with self.assertWarns(RuntimeWarning): res, skipped = self.embedder.transform(self.corpus[[0]]) self.assertIsNone(res) self.assertEqual(len(skipped), 1) self.assertEqual(len(EmbedderCache("fasttext-en")._cache_dict), 0)
[ 9, 532, 17 ]
def METHOD_NAME(self): return self.channel_index
[ 19, 307, 724 ]
def METHOD_NAME(self, transaction: SignedTransactionAPI) -> None: validate_london_normalized_transaction( state=self, transaction=transaction, )
[ 187, 1853 ]
def METHOD_NAME(old, new, expected): assert diff.get_list_diff(old, new) == expected
[ 9, 19, 245, 2443 ]
def METHOD_NAME(node): visitor = _ReturnOrYieldFinder() visitor.start_walking(node) return visitor.returns
[ 1413, 29 ]
def METHOD_NAME(self): self.round_trip(b"\x88\x00", Frame(True, OP_CLOSE, b""))
[ 9, 1462 ]
def METHOD_NAME(): faux_gems = [ ProcessSpec( name = "hello world", parameters = [ Parameter( name = "param 1", value = NominalReal(nominal=4.2, units="g") ), Parameter( name = "param 2", value = NominalCategorical(category="foo") ), Parameter( name = "attr 1", value = InChI(inchi="InChI=1S/C8H10N4O2/c1-10-4-9-6-5(10)7(13)12(3)8(14)11(6)2/h4H,1-3H3") ) ], conditions = [ Condition( name = "cond 1", value = NormalReal(mean=4, std=0.5, units="") ) ] ), IngredientSpec( name = "I shouldn't be a row", material=LinkByUID(scope = "faux", id = "abcde"), process=LinkByUID(scope = "foo", id = "bar") ), ProcessRun( name = "process 1", spec = ProcessSpec( name = "nestled Spec", conditions=[ Condition( name = "cond 1", value = NormalReal(mean=6, std=0.3, units="") ), ] ), parameters = [ Parameter( name = "param 1", value = NormalReal(mean=4.2, std = 0.1, units="g") ), Parameter( name = "param 3", value = NominalCategorical(category="bar") ) ], conditions = [ Condition( name = "cond 1", value = NormalReal(mean=4, std=0.5, units="") ), Condition( name = "cond 2", value = NominalCategorical(category="hi") ), Condition( name = "attr 1", value = InChI(inchi="InChI=1S/C34H34N4O4.Fe/c1-7-21-17(3)25-13-26-19(5)23(9-11-33(39)40)31(37-26)16-32-24(10-12-34(41)42)20(6)28(38-32)15-30-22(8-2)18(4)27(36-30)14-29(21)35-25;/h7-8,13-16H,1-2,9-12H2,3-6H3,(H4,35,36,37,38,39,40,41,42);/q;+2/p-2") ), ] ), MaterialSpec( name = "material 1", process = LinkByUID(scope = "faux 2", id = "id2"), properties=[ PropertyAndConditions( property=Property( name = "prop 1", value = NormalReal(mean=100, std=10, units="g/cm**3") ), conditions=[ Condition( name = "cond 2", value = NominalCategorical(category="hi") ) ] ), PropertyAndConditions( property=Property( name = "prop 2", value = NominalReal(nominal=33, units="1/lb") ), conditions=[ Condition( name = "cond 3", value = NominalCategorical(category="citrine") ) ] ), ] ), MeasurementSpec( name = "meas spec 1", parameters = [ Parameter( name = "param 1", value = NominalReal(nominal=2.2, units="kg") ), Parameter( name = "param 2", value = NominalCategorical(category="bar") ) ], ), MeasurementRun( name = "meas run 1", spec = LinkByUID(scope="another fake scope", id = "another fake id"), properties=[ Property( name = "prop 1", value=NominalReal(nominal=4.1, units="") ) ] ) ] return faux_gems
[ 93, 245, 47, 17759 ]
def METHOD_NAME(self): prompt = "a caterpillar smoking a hookah while sitting on a mushroom" stablediff = StableDiffusion(128, 128) text_encoding = stablediff.tokenizer.encode(prompt) self.assertEqual( text_encoding[0:5], [49406, 320, 27111, 9038, 320], )
[ 9, 526, 1345, 7629, 99 ]
def METHOD_NAME(self) -> None: if not self.closer.done(): self.closer.set_result(None)
[ 631, 549 ]
def METHOD_NAME(): m = unit_separator.solve_SepNF(base="salt") assert value( m.fs.NF.permeate.flow_mass_phase_comp[0, "Liq", "H2O"] ) == pytest.approx(0.8682, rel=1e-3) assert value( m.fs.NF.permeate.flow_mass_phase_comp[0, "Liq", "NaCl"] ) == pytest.approx(2.544e-2, rel=1e-3) assert value( m.fs.NF.retentate.flow_mass_phase_comp[0, "Liq", "CaSO4"] ) == pytest.approx(1.168e-3, rel=1e-3) assert value( m.fs.NF.retentate.flow_mass_phase_comp[0, "Liq", "MgSO4"] ) == pytest.approx(1.376e-3, rel=1e-3) assert value( m.fs.NF.retentate.flow_mass_phase_comp[0, "Liq", "MgCl2"] ) == pytest.approx(3.401e-3, rel=1e-3) assert value( m.fs.NF.retentate.flow_mass_phase_comp[0, "Liq", "H2O"] ) == pytest.approx(9.645e-2, rel=1e-3)
[ 9, 805, 4509, 2479, 293, 474, 2229 ]
def METHOD_NAME( mock_request, ge_cloud_runtime_base_url, ge_cloud_runtime_organization_id, ge_cloud_access_token, ): # Ensure that the request fails mock_request.return_value.status_code = 401 with pytest.raises(GXCloudError): get_context( cloud_mode=True, cloud_base_url=ge_cloud_runtime_base_url, cloud_organization_id=ge_cloud_runtime_organization_id, cloud_access_token=ge_cloud_access_token, )
[ 9, 365, 198, 7177, 4054, 854, 41 ]
def METHOD_NAME(cls, job): r"""Inserts new features to the database based on a given job Parameters ---------- job : qiita_db.processing_job.ProcessingJob The Qiita process job_id generating the artifact holding the features to be retrieved or stored. Raises ------ ValueError If the Artifact type is not BIOM If the artifact doesn't have a biom filepath """ with qdb.sql_connection.TRN: acmd = job.command parent = job.input_artifacts[0] parent_pparameters = parent.processing_parameters if parent_pparameters is None: parent_cmd_name = None parent_parameters = None parent_merging_scheme = None else: pcmd = parent_pparameters.command parent_cmd_name = pcmd.name parent_parameters = parent_pparameters.values parent_merging_scheme = pcmd.merging_scheme return qdb.util.human_merging_scheme( acmd.name, acmd.merging_scheme, parent_cmd_name, parent_merging_scheme, job.parameters.values, [], parent_parameters)
[ 19, 4629, 4932, 280, 202 ]
def METHOD_NAME( self, bucket_name: str, prefix: str ) -> bool: """ Checks if content exists at a certain path in a Google Cloud Storage bucket. """ bucket = self.client.get_bucket(bucket_name) blob = bucket.blob(prefix) return blob.METHOD_NAME()
[ 954 ]
def METHOD_NAME(request): """ delete form for LogZipFile objects zip """ req_logger.info("Zip file delete req by user: %s", request.user) form = DeleteZipForm(request.POST) if form.is_valid(): logger.debug("Posted Form is valid") zip_file = form.cleaned_data['zip_file'] logger.info("User %s tryied to delete %s", request.user.username, zip_file) zip_file.delete() messages.info(request, 'delete successful.') return redirect('..') logger.error("Form %s is invalid", form) logger.error("Form error: %s", form.errors) messages.error(request, 'error in form') return redirect('..')
[ 512, 34 ]
def METHOD_NAME(dirFrom, dirTo): """ copy contents of dirFrom and below to dirTo """ global dcount, fcount for file in os.listdir(dirFrom): # for files/dirs here # print file pathFrom = os.path.join(dirFrom, file) pathTo = os.path.join(dirTo, file) # extend both paths if not os.path.isdir(pathFrom): # copy simple files try: if verbose > 1: print("copying", pathFrom, "to", pathTo) cpfile(pathFrom, pathTo) fcount = fcount + 1 except Exception: print("Error copying", pathFrom, "to", pathTo, "--skipped") print(sys.exc_type, sys.exc_value) else: if verbose: print("copying dir", pathFrom, "to", pathTo) try: os.mkdir(pathTo) # make new subdir METHOD_NAME(pathFrom, pathTo) # recur into subdirs dcount = dcount + 1 except Exception: print("Error creating", pathTo, "--skipped") print(sys.exc_type, sys.exc_value)
[ -1 ]
def METHOD_NAME(self): self.raw_pwrite(0, pack_header(self.header))
[ 1308, 572 ]
def METHOD_NAME(): actions.key("cmd-0")
[ 2093, 656 ]
def METHOD_NAME(self, request, format=None): if not request.user.admin_permissions.other_permission(): return api_error(status.HTTP_403_FORBIDDEN, 'Permission denied.') ip_list = [ip.to_dict() for ip in TrustedIP.objects.all()] ip_list = sorted(ip_list, key=cmp_to_key(cmp_ip)) return Response(ip_list)
[ 19 ]
def METHOD_NAME( self, private_key: PrivateKey, chain_id: int = None, # unused until SpuriousDragon ) -> FrontierTransaction: v, r, s = create_transaction_signature(self, private_key) return FrontierTransaction( nonce=self.nonce, gas_price=self.gas_price, gas=self.gas, to=self.to, value=self.value, data=self.data, v=v, r=r, s=s, )
[ 947, 3000, 1853 ]
def METHOD_NAME(line, file_handle): output = f"{line}\n" next_line = file_handle.peek() while next_line and next_line.startswith("//"): output += next_line # Advance the cursor and peek the next line. file_handle.advance() next_line = file_handle.peek() return output
[ 214, 1287, 1591 ]
def METHOD_NAME(self): """Delete schedule (Days) for supplied section""" Day.objects.create(schedule=self.schedule, date=self.today) Day.objects.create(schedule=self.schedule, date=self.tomorrow) other_section = Section.objects.create( conference=self.conference, name="other" ) other_schedule = Schedule.objects.create(section=other_section) other_day = Day.objects.create( schedule=other_schedule, date=self.tomorrow ) self.assertEqual(3, Day.objects.all().count()) data = {"delete": "Delete"} form = ScheduleSectionForm(data=data, schedule=self.schedule) form.delete_schedule() days = Day.objects.all() self.assertEqual(1, days.count()) self.assertIn(other_day, days)
[ 9, 34 ]
def METHOD_NAME(setup_cluster): import numpy as np import pandas as pd from dask import dataframe as dd from pandas._testing import assert_frame_equal data = np.random.randn(10000, 100) df = dd.from_pandas( pd.DataFrame(data, columns=[f"col{i}" for i in range(100)]), npartitions=4 ) df["col0"] = df["col0"] + df["col1"] / 2 col2_mean = df["col2"].mean() df = df[df["col2"] > col2_mean] dask_res = df.compute() assert_frame_equal( dask_res, df.compute(scheduler=mars_scheduler), check_index_type=False ) assert_frame_equal( dask_res, convert_dask_collection(df).execute().fetch(), check_index_type=False )
[ 9, 1816, 1616 ]
def METHOD_NAME(self, x): x = self.conv1(x) x = F.relu(x) x = self.conv2(x) x = F.relu(x) x = F.max_pool2d(x, 2) x = self.dropout1(x) x = torch.flatten(x, 1) x = self.fc1(x) x = F.relu(x) x = self.dropout2(x) x = self.fc2(x) return F.log_softmax(x, dim=1)
[ 76 ]
f METHOD_NAME(self, node, name=None):
[ 1680, 716 ]
def METHOD_NAME(next_link=None): if not next_link: request = build_list_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, cluster_rp=cluster_rp, cluster_name=cluster_name, template_url=self.list.metadata['url'], ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: request = build_list_request( subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, cluster_rp=cluster_rp, cluster_name=cluster_name, template_url=next_link, ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request
[ 123, 377 ]
def METHOD_NAME(self): self.portal = self.layer["portal"] self.request = self.layer["request"] self.tool = getToolByName(self.portal, "plone_utils", None)
[ 0, 1 ]
def METHOD_NAME(self): oRule = assert_statement.rule_400() oRule.alignment = 'report' oRule.indentStyle = 'spaces' self.assertTrue(oRule) self.assertEqual(oRule.name, 'assert') self.assertEqual(oRule.identifier, '400') self.assertEqual(oRule.groups, ['alignment']) lExpected = [9, 14, 29, 34] oRule.analyze(self.oFile) self.assertEqual(lExpected, utils.extract_violation_lines_from_violation_object(oRule.violations))
[ 9, 446, 4157, 2284, 339, 66 ]
def METHOD_NAME(self): """Get the ip.""" return self._ip
[ 1213 ]
def METHOD_NAME(self) -> str: """ Resource Name. """ return pulumi.get(self, "name")
[ 156 ]
def METHOD_NAME(kwarg): prio = kwarg.get('prio', 0) mtu = kwarg.get('mtu', 1600) mpu = kwarg.get('mpu', 0) overhead = kwarg.get('overhead', 0) quantum = kwarg.get('quantum', 0) rate = get_rate(kwarg.get('rate', None)) ceil = get_rate(kwarg.get('ceil', 0)) or rate burst = ( kwarg.get('burst', None) or kwarg.get('maxburst', None) or kwarg.get('buffer', None) ) if rate is not None: if burst is None: burst = rate / get_hz() + mtu burst = calc_xmittime(rate, burst) cburst = ( kwarg.get('cburst', None) or kwarg.get('cmaxburst', None) or kwarg.get('cbuffer', None) ) if ceil is not None: if cburst is None: cburst = ceil / get_hz() + mtu cburst = calc_xmittime(ceil, cburst) return { 'attrs': [ [ 'TCA_HTB_PARMS', { 'buffer': burst, 'cbuffer': cburst, 'quantum': quantum, 'prio': prio, 'rate': rate, 'ceil': ceil, 'ceil_overhead': overhead, 'rate_overhead': overhead, 'rate_mpu': mpu, 'ceil_mpu': mpu, }, ], ['TCA_HTB_RTAB', True], ['TCA_HTB_CTAB', True], ] }
[ 19, 2, 386 ]
def METHOD_NAME(tag): return { 'name': tag['tag'], 'slug': tag['tag'], 'count': tag['count'] }
[ 82, 365 ]
def METHOD_NAME(test): cleanup(test) subprocess.call(["unzip", "-q", # quiet "-o", # overwrite files os.path.join(test.testcase_path, "data/lineOrderOff.zip"), "-d", # save to directory os.path.join(test.testcase_path, "data/")]) test.run_compile() test.run_run() test.run_validator() test.diff("grade.txt","grade.txt_lineOrderOff","-b") test.json_diff("results.json","results.json_lineOrderOff")
[ 534, 852, 3988 ]
def METHOD_NAME(tzid, tzinfo) -> None: ...
[ 372, -1 ]
def METHOD_NAME(text): link = parse_test_marks(text)["link"] return URL(link).path
[ 214, 548 ]
def METHOD_NAME( staff_api_client, checkout, permission_manage_checkouts ): # given checkout.metadata_storage.store_value_in_private_metadata( {PRIVATE_KEY: PRIVATE_VALUE, "to_clear": PRIVATE_VALUE}, ) checkout.metadata_storage.save(update_fields=["private_metadata"]) checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk) # when response = execute_clear_private_metadata_for_item( staff_api_client, permission_manage_checkouts, checkout.token, "Checkout", key="to_clear", ) # then assert item_contains_proper_private_metadata( response["data"]["deletePrivateMetadata"]["item"], checkout.metadata_storage, checkout_id, ) assert item_without_private_metadata( response["data"]["deletePrivateMetadata"]["item"], checkout.metadata_storage, checkout_id, key="to_clear", )
[ 9, 34, 547, 773, 43, 206, 59 ]
def METHOD_NAME(self, parser): parser.add_option('--gen-test-data', dest='gen_test_data', default=False, action='store_true', help='Generate test data') parser.add_option('--test-data', dest='test_data', default='data/rpc_getblockstats.json', action='store', metavar='FILE', help='Test data file')
[ 238, 1881 ]
def METHOD_NAME(_request, message_id): """Expires a message. Sets the end date to now""" message = get_object_or_404(Message, pk=message_id) message.publish_end = datetime.datetime.now() message.save() return HttpResponseRedirect(reverse('messages-view', args=(message_id,)))
[ 7740 ]
def METHOD_NAME(): from airflow import DAG from flowetl.util import get_qa_checks dag = DAG("DUMMY_DAG", start_date=datetime.now()) check_operators = get_qa_checks(dag=dag) assert {op.task_id: op.sql for op in check_operators} == qa_checks
[ 9, 235, 1937, 2676, 622 ]
def METHOD_NAME(self) -> Optional[Mapping[str, str]]: """ The tags of the resource. """ return pulumi.get(self, "tags")
[ 114 ]
def METHOD_NAME(self, db_session): return UserService(default_authority="example.com", session=db_session)
[ 1990 ]
def METHOD_NAME(posterior): """Called at a grid point, the respective disrete solution is returned.""" first_point = posterior.locations[0] np.testing.assert_allclose(posterior(first_point).mean, posterior[0].mean) np.testing.assert_allclose(posterior(first_point).cov, posterior[0].cov) final_point = posterior.locations[-1] np.testing.assert_allclose(posterior(final_point).mean, posterior[-1].mean) np.testing.assert_allclose(posterior(final_point).cov, posterior[-1].cov) mid_point = posterior.locations[4] np.testing.assert_allclose(posterior(mid_point).mean, posterior[4].mean) np.testing.assert_allclose(posterior(mid_point).cov, posterior[4].cov)
[ 9, 128, 24, 3054 ]
def METHOD_NAME(self): np.random.seed(1234) array = np.random.randint(3, 1000, 100) int_coder = IntCode('i', 3, 0, False, 16, True) int_coder.compute_code(array) r = int_coder.encode('232') assert np.array_equal(np.array(r), np.array([32, 30, 2])) decoded = int_coder.decode(r) assert decoded == '232' r = int_coder.encode('nan') assert np.array_equal(np.array(r), np.array([36, 31, 15])) decoded = int_coder.decode(r) assert decoded == 'nan'
[ 9, 962 ]
def METHOD_NAME(self) -> str | None: ...
[ 366 ]
def METHOD_NAME(self, mo_coeff=None, ci0=None, callback=None, _kern=None): if mo_coeff is None: mo_coeff = self.mo_coeff else: # overwrite self.mo_coeff because it is needed in many methods of this class self.mo_coeff = mo_coeff if callback is None: callback = self.callback if _kern is None: _kern = mc1step.METHOD_NAME self.check_sanity() self.dump_flags() log = logger.Logger(self.stdout, self.verbose) # Initialize/overwrite self.fcisolver.orbsym and self.fcisolver.wfnsym mo_coeff = self.mo_coeff = casci_symm.label_symmetry_(self, mo_coeff, ci0) self.converged, self.e_tot, self.e_cas, self.ci, \ self.mo_coeff, self.mo_energy = \ _kern(self, mo_coeff, tol=self.conv_tol, conv_tol_grad=self.conv_tol_grad, ci0=ci0, callback=callback, verbose=self.verbose) log.note('CASSCF energy = %#.15g', self.e_tot) self._finalize() return self.e_tot, self.e_cas, self.ci, self.mo_coeff, self.mo_energy
[ 1885 ]
def METHOD_NAME(self): return self.settings.use_cctld_negotiation
[ 19, 1080, 8618, 2803 ]
def METHOD_NAME(self, key, value): return value
[ 12761, 2065 ]
def METHOD_NAME(self, ele, mva, category, wp): '''return true if ele passes wp''' threshold = self.working_points[wp][category].Eval(ele.pt()) if self.logistic_transform: mva = 2.0/(1.0+exp(-2.0*mva))-1 return mva > threshold
[ 7680 ]
def METHOD_NAME(self): a0 = Vector(1.0, 1.0, 1.0) a1 = Vector(2.0, 2.0, 2.0) b0 = a0 + Vector(3.0, 3.0, 3.0) b1 = a1 + Vector(10.0, -10.0, 14.0) answer = (b0 - a1).magnitude() result1, result2 = Vector(), Vector() for i in range(self.ntests): aa0, aa1, bb0, bb1, l = self.randomDistortion(a0, a1, b0, b1) result = segmentSegmentDistance(aa0, aa1, bb0, bb1) self.assertTrue(fuzzyEqual(result, l*answer), "Distance error: %g != %g" % (result, l*answer))
[ 9, 256, 7181, 256, 1498, 16322 ]
def METHOD_NAME(self, scale=1): frames = [] for img in self.images.values(): # rotation R = qvec2rotmat(img.qvec) # translation t = img.tvec # pose t = -R.T.dot(t) R = R.T # intrinsics cam = self.cameras[img.camera_id] if cam.model in ('SIMPLE_PINHOLE', 'SIMPLE_RADIAL', 'RADIAL'): fx = fy = cam.params[0] cx = cam.params[1] cy = cam.params[2] elif cam.model in ('PINHOLE', 'OPENCV', 'OPENCV_FISHEYE'): fx = cam.params[0] fy = cam.params[1] cx = cam.params[2] cy = cam.params[3] else: raise Exception('unsupported camera model: {}'.format(cam.model)) K = np.identity(3) K[0,0] = fx K[1,1] = fy K[0,2] = cx K[1,2] = cy # create axis, plane and pyramid geometries that will be drawn cam_model = draw_camera(K, R, t, cam.width, cam.height, scale) frames.extend(cam_model) logging.info('add {} frames'.format(len(frames))) for i in frames: self.vis.add_geometry(i)
[ 238, 1427 ]
def METHOD_NAME(qw, qx, qy, qz): ''' Translates from Quaternion to Pitch. @param qw,qx,qy,qz: Quaternion values @type qw,qx,qy,qz: float @return Pitch value translated from Quaternion ''' rotateYa0=-2.0*(qx*qz - qw*qy) rotateY=0.0 if(rotateYa0 >= 1.0): rotateY = pi/2.0 elif(rotateYa0 <= -1.0): rotateY = -pi/2.0 else: rotateY = asin(rotateYa0) return rotateY
[ 2729, 2495 ]
def METHOD_NAME(f, width, height): for key, val in (('width', width), ('height', height)): if not f.get(key): f[key] = val
[ 238, 2327, 61, 1877 ]
def METHOD_NAME(self): """Gets the prefixlength for a row""" return self.end_net.prefixlen() - self.bits_in_matrix
[ 19, 843, 1318 ]
def METHOD_NAME(app_api_client, permission_manage_orders, order): # given test_key = "test_key" metadata = {test_key: "test_val"} invoice = Invoice.objects.create(order=order, metadata=metadata) number = "01/12/2020/TEST" url = "http://www.example.com" graphene_invoice_id = graphene.Node.to_global_id("Invoice", invoice.pk) new_metadata = [{"key": test_key, "value": "test value"}] private_metadata = [{"key": "private test key", "value": "private test value"}] variables = { "id": graphene_invoice_id, "input": { "number": number, "url": url, "metadata": new_metadata, "privateMetadata": private_metadata, }, } # when response = app_api_client.post_graphql( INVOICE_UPDATE_MUTATION, variables, permissions=(permission_manage_orders,) ) # then content = get_graphql_content(response) invoice.refresh_from_db() assert invoice.status == JobStatus.SUCCESS assert invoice.number == content["data"]["invoiceUpdate"]["invoice"]["number"] assert content["data"]["invoiceUpdate"]["invoice"]["metadata"] == new_metadata assert ( content["data"]["invoiceUpdate"]["invoice"]["privateMetadata"] == private_metadata ) assert invoice.url == content["data"]["invoiceUpdate"]["invoice"]["url"] assert content["data"]["invoiceUpdate"]["invoice"]["id"] == graphene_invoice_id
[ 9, 2486, 86, 604, 991 ]
async def METHOD_NAME(self): NUM = 0 async def foo1(): nonlocal NUM NUM += 1 await asyncio.sleep(1) NUM += 1000 return 42 async def foo2(): nonlocal NUM NUM += 1 await asyncio.sleep(2) NUM += 1000 return 11 async def runner(): g = await supervisor.Supervisor.create() g.create_task(foo1()) g.create_task(foo2()) await asyncio.sleep(0.1) await g.cancel() await runner() self.assertEqual(NUM, 2)
[ 9, 5204, 15309 ]
def METHOD_NAME(app: web.Application, config: dict[str, Any]): web.run_app( app, host=config["main"]["host"], port=config["main"]["port"], # this gets overriden by the gunicorn config in /docker/boot.sh access_log_format='%a %t "%r" %s %b --- [%Dus] "%{Referer}i" "%{User-Agent}i"', )
[ 22, 549 ]
def METHOD_NAME(self): i = self.__pos self.__pos = j = i+4 data = self.__buf[i:j] if len(data) < 4: raise EOFError return struct.unpack('>L', data)[0]
[ 789, 11068 ]
def METHOD_NAME() -> None: """Run the bot.""" # Create the Application and pass it your bot's token. application = Application.builder().token("TOKEN").build() # Add conversation handler with the states CHOOSING, TYPING_CHOICE and TYPING_REPLY conv_handler = ConversationHandler( entry_points=[CommandHandler("start", start)], states={ CHOOSING: [ MessageHandler( filters.Regex("^(Age|Favourite colour|Number of siblings)$"), regular_choice ), MessageHandler(filters.Regex("^Something else...$"), custom_choice), ], TYPING_CHOICE: [ MessageHandler( filters.TEXT & ~(filters.COMMAND | filters.Regex("^Done$")), regular_choice ) ], TYPING_REPLY: [ MessageHandler( filters.TEXT & ~(filters.COMMAND | filters.Regex("^Done$")), received_information, ) ], }, fallbacks=[MessageHandler(filters.Regex("^Done$"), done)], ) application.add_handler(conv_handler) # Run the bot until the user presses Ctrl-C application.run_polling(allowed_updates=Update.ALL_TYPES)
[ 57 ]
def METHOD_NAME(self, theta, init_state, tol=1e-14, nshots=100000): """ Args: theta: list or numpy.array with the angles to be used in the circuit init_state: numpy.array with the quantum state to be Schmidt-decomposed nshots: int number of runs of the circuit during the sampling process (default=10000) Returns: numpy.float64 with the value of the Von Neumann entropy for the given bipartition """ Schmidt = self.Schmidt_coeff(theta, init_state, nshots=nshots) Schmidt = Schmidt**2 non_zero_coeff = np.array([coeff for coeff in Schmidt if coeff > tol]) return -np.sum(non_zero_coeff * np.log2(non_zero_coeff))
[ 7924, 7925, 3227 ]
METHOD_NAME( self, child, expand=False, horizontalAlignment=None, verticalAlignment=None ) :
[ 238, 186 ]
def METHOD_NAME(self, item): if self._pending_removals: self._commit_removals() self.data.METHOD_NAME(ref(item, self._remove))
[ 238 ]
def METHOD_NAME(self, context): node = self.get_node(context) if not node: return {'CANCELLED'} if not any(socket.is_linked for socket in node.outputs): return {'CANCELLED'} try: node.inputs['Subd Obj'].sv_get()[0] except: return {'CANCELLED'} node.Approximate(node) updateNode(node,context) return {'FINISHED'}
[ 750 ]
def METHOD_NAME(window): """Test setting projection matrix directly""" window.ctx.projection_2d_matrix = Mat4() with pytest.raises(ValueError): window.ctx.projection_2d_matrix = "moo"
[ 9, 1958, 430 ]
def METHOD_NAME(cls, value, rng, options): return cls
[ 2501 ]
def METHOD_NAME(): ############################# Enums ############################## # yesno_values have already been created, so use postgres enum object # type to get around "already created" issue - works okay with mysql yesno_values = ENUM(*YESNO_VALUES, name=YESNO_NAME, create_type=False) # for some reason when using 'add_column' if you don't create the enum # first it will think it already exists and fail pjsip_redirect_method_values = sa.Enum( *PJSIP_REDIRECT_METHOD_VALUES, name=PJSIP_REDIRECT_METHOD_NAME) check = False if context.is_offline_mode() else True pjsip_redirect_method_values.create(op.get_bind(), checkfirst=check) pjsip_transport_method_values = sa.Enum( *PJSIP_TRANSPORT_METHOD_VALUES, name=PJSIP_TRANSPORT_METHOD_NAME) pjsip_transport_protocol_values = sa.Enum( *PJSIP_TRANSPORT_PROTOCOL_VALUES, name=PJSIP_TRANSPORT_PROTOCOL_NAME) ######################### create tables ########################## op.create_table( 'ps_systems', sa.Column('id', sa.String(40), nullable=False, unique=True), sa.Column('timer_t1', sa.Integer), sa.Column('timer_b', sa.Integer), sa.Column('compact_headers', yesno_values), sa.Column('threadpool_initial_size', sa.Integer), sa.Column('threadpool_auto_increment', sa.Integer), sa.Column('threadpool_idle_timeout', sa.Integer), sa.Column('threadpool_max_size', sa.Integer), ) op.create_index('ps_systems_id', 'ps_systems', ['id']) op.create_table( 'ps_globals', sa.Column('id', sa.String(40), nullable=False, unique=True), sa.Column('max_forwards', sa.Integer), sa.Column('user_agent', sa.String(40)), sa.Column('default_outbound_endpoint', sa.String(40)), ) op.create_index('ps_globals_id', 'ps_globals', ['id']) op.create_table( 'ps_transports', sa.Column('id', sa.String(40), nullable=False, unique=True), sa.Column('async_operations', sa.Integer), sa.Column('bind', sa.String(40)), sa.Column('ca_list_file', sa.String(200)), sa.Column('cert_file', sa.String(200)), sa.Column('cipher', sa.String(200)), sa.Column('domain', sa.String(40)), sa.Column('external_media_address', sa.String(40)), sa.Column('external_signaling_address', sa.String(40)), sa.Column('external_signaling_port', sa.Integer), sa.Column('method', pjsip_transport_method_values), sa.Column('local_net', sa.String(40)), sa.Column('password', sa.String(40)), sa.Column('priv_key_file', sa.String(200)), sa.Column('protocol', pjsip_transport_protocol_values), sa.Column('require_client_cert', yesno_values), sa.Column('verify_client', yesno_values), sa.Column('verifiy_server', yesno_values), sa.Column('tos', yesno_values), sa.Column('cos', yesno_values), ) op.create_index('ps_transports_id', 'ps_transports', ['id']) op.create_table( 'ps_registrations', sa.Column('id', sa.String(40), nullable=False, unique=True), sa.Column('auth_rejection_permanent', yesno_values), sa.Column('client_uri', sa.String(40)), sa.Column('contact_user', sa.String(40)), sa.Column('expiration', sa.Integer), sa.Column('max_retries', sa.Integer), sa.Column('outbound_auth', sa.String(40)), sa.Column('outbound_proxy', sa.String(40)), sa.Column('retry_interval', sa.Integer), sa.Column('forbidden_retry_interval', sa.Integer), sa.Column('server_uri', sa.String(40)), sa.Column('transport', sa.String(40)), sa.Column('support_path', yesno_values), ) op.create_index('ps_registrations_id', 'ps_registrations', ['id']) ########################## add columns ########################### # new columns for endpoints op.add_column('ps_endpoints', sa.Column('media_address', sa.String(40))) op.add_column('ps_endpoints', sa.Column('redirect_method', pjsip_redirect_method_values)) op.add_column('ps_endpoints', sa.Column('set_var', sa.Text())) # rename mwi_fromuser to mwi_from_user op.alter_column('ps_endpoints', 'mwi_fromuser', new_column_name='mwi_from_user', existing_type=sa.String(40)) # new columns for contacts op.add_column('ps_contacts', sa.Column('outbound_proxy', sa.String(40))) op.add_column('ps_contacts', sa.Column('path', sa.Text())) # new columns for aors op.add_column('ps_aors', sa.Column('maximum_expiration', sa.Integer)) op.add_column('ps_aors', sa.Column('outbound_proxy', sa.String(40))) op.add_column('ps_aors', sa.Column('support_path', yesno_values))
[ 738 ]
def METHOD_NAME(self, new_data_dict): print("Updating data...") self.image.set_data(new_data_dict["image"]) self.line.set_data(new_data_dict["line"])
[ 86, 365 ]
def METHOD_NAME(self, attrs): if attrs.get("zonefile") is not None: self.parse_zonefile(attrs.get("name"), attrs.pop("zonefile")) return super().METHOD_NAME(attrs)
[ 187 ]
def METHOD_NAME(): return """\
[ 3, 2794 ]
def METHOD_NAME(self, other): return self.apply("min", self, other)
[ 14931, 1835 ]
def METHOD_NAME() -> int: """(internal) Returns the number of tickets for the current account. """ return _baplus.METHOD_NAME()
[ 19, 3392, 598, 3769, 29 ]
def METHOD_NAME(cls): if cls._schema_on_200 is not None: return cls._schema_on_200 cls._schema_on_200 = AAZObjectType() _schema_on_200 = cls._schema_on_200 _schema_on_200.reservation_order = AAZObjectType( serialized_name="reservationOrder", ) _ChangeDirectoryHelper._build_schema_change_directory_result_read(_schema_on_200.reservation_order) _schema_on_200.reservations = AAZListType() reservations = cls._schema_on_200.reservations reservations.Element = AAZObjectType() _ChangeDirectoryHelper._build_schema_change_directory_result_read(reservations.Element) return cls._schema_on_200
[ 56, 135, 69, 1072 ]
def METHOD_NAME(s): """Returns [major, minor, patch]""" arr = [int(i) for i in s.split(".")] while len(arr) < 3: arr.append(0) return arr
[ 214, -1 ]
def METHOD_NAME(self, name): committees = { "House Executive Cmte": "House Executive Committee", "Atchafalaya Basin Oversight": "Atchafalaya Basin Program Oversight Committee", "Homeland Security": "House Select Committee on Homeland Security", "Hurricane Recovery": "Select Committee on Hurricane Recovery", "Legislative Budgetary Control": "Legislative Budgetary Control Council", "Military and Veterans Affairs": "Special Committee on Military and Veterans Affairs", } return committees[name] if name in committees else name
[ 1137, 8846, 156 ]
def METHOD_NAME(self): '''Get a list of layer diff ids''' return [layer.diff_id for layer in self.layers]
[ 19, 94, 2443, 308 ]
def METHOD_NAME(self): self.assertComputeFails(MetadataReader())
[ 9, 35 ]
def METHOD_NAME(self) -> str: """ The provisioning state of the network slice resource. """ return pulumi.get(self, "provisioning_state")
[ 1994, 551 ]
def METHOD_NAME(cls, *args, **kwargs): from azure.cli.core.aaz import AAZResourceIdArg, AAZResourceIdArgFormat, AAZListArg, AAZStrArg, AAZArgEnum, AAZFreeFormDictArg, AAZFreeFormDictArgFormat, AAZBoolArg args_schema = super().METHOD_NAME(*args, **kwargs) args_schema.storage_accounts = AAZListArg( options=["--storage-account"], help="Space-separated list of the destination storage account. It can be the name or resource ID of storage account." ) args_schema.storage_accounts.Element = AAZResourceIdArg( fmt=AAZResourceIdArgFormat(template="/subscriptions/{subscription}/resourceGroups/{resource_group}" "/providers/Microsoft.Storage/storageAccounts/{}") ) args_schema.staging_storage_account = AAZResourceIdArg( options=["--staging-storage-account"], help="Resource Id of the storage account that can be used to copy the vhd for staging.", fmt=AAZResourceIdArgFormat(template="/subscriptions/{subscription}/resourceGroups/{resource_group}" "/providers/Microsoft.Storage/storageAccounts/{}") ) args_schema.resource_group_for_managed_disk = AAZResourceIdArg( options=['--resource-group-for-managed-disk'], help="Resource Group Id of the compute disks.", fmt=AAZResourceIdArgFormat(template="/subscriptions/{subscription}/resourceGroups/{}") ) args_schema.transfer_configuration_type = AAZStrArg( options=['--transfer-configuration-type'], help="Type of the configuration for transfer." ) args_schema.transfer_configuration_type.enum = AAZArgEnum({"TransferAll", "TransferUsingFilter"}) args_schema.transfer_filter_details = AAZFreeFormDictArg( options=["--transfer-filter-details"], help="Path to the map of filter type and the details to filter.", fmt=AAZFreeFormDictArgFormat() ) args_schema.transfer_all_blobs = AAZBoolArg( options=["--transfer-all-blobs"], help="To indicate if all Azure blobs have to be transferred", default=False ) args_schema.transfer_all_files = AAZBoolArg( options=["--transfer-all-files"], help="To indicate if all Azure files have to be transferred", default=False ) args_schema.data_box._registered = False args_schema.data_box_disk._registered = False args_schema.data_box_heavy._registered = False args_schema.data_import_details._registered = False args_schema.data_export_details._registered = False return args_schema
[ 56, 134, 135 ]
def METHOD_NAME(self, mode="triangle_strip"): """Draw collection""" gloo.set_depth_mask(0) Collection.METHOD_NAME(self, mode) gloo.set_depth_mask(1)
[ 1100 ]
def METHOD_NAME( dry_run: bool, thread_pool_size: int = 10, internal: Optional[bool] = None, use_jump_host: bool = True, cluster_name: Optional[str] = None, namespace_name: Optional[str] = None, defer: Optional[Callable] = None, ) -> None: all_namespaces = get_namespaces_minimal() shard_namespaces, duplicates = get_shard_namespaces(all_namespaces) namespaces = filter_namespaces_by_cluster_and_namespace( namespaces=shard_namespaces, cluster_name=cluster_name, namespace_name=namespace_name, ) desired_state = get_desired_state(namespaces) vault_settings = get_app_interface_vault_settings() secret_reader = create_secret_reader(use_vault=vault_settings.vault) oc_map = init_oc_map_from_namespaces( namespaces=namespaces, integration=QONTRACT_INTEGRATION, secret_reader=secret_reader, internal=internal, use_jump_host=use_jump_host, thread_pool_size=thread_pool_size, init_projects=True, ) if defer: defer(oc_map.cleanup) results = threaded.METHOD_NAME( manage_namespaces, desired_state, thread_pool_size, return_exceptions=True, dry_run=dry_run, oc_map=oc_map, ) err = check_results(desired_state=desired_state, results=results) if err or duplicates: sys.exit(ExitCodes.ERROR)
[ 22 ]
def METHOD_NAME(self): self.module = ir.Module() self.datamodel = datamodel.default_manager[self.fe_type]
[ 0, 1 ]
def METHOD_NAME(): actions.key("ctrl-d")
[ 34, 534 ]