text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(ctx, z): if z: n, e = ctx.frexp(abs(z)) if e: return e return ctx.convert(n) return ctx.ninf
[ 4767 ]
def METHOD_NAME(self: drawable.Window, mask: int) -> SelectInput: ...
[ 1472, 362 ]
def METHOD_NAME(command, args): output = subprocess.check_output( "netsh %s %s" % ( command, " ".join(['%s="%s"' % (key, value) for key, value in list(args.items()) if value]), ), timeout=SHORT_REQUEST_TIMEOUT, ) return output.strip().lower().endswith(b"ok.")
[ 22, 8864, 1660 ]
def METHOD_NAME(self): jar = CookieJar.from_dict({"keys": ["name"], "content": {"name": "value"}}) self.assertEqual(jar.get_cookie_header_value(), "name=value")
[ 9, 280, 553 ]
def METHOD_NAME(self, line, is_full_line): if is_full_line: try: self._parse_filesystem_line(line) except ParsingDone: pass return super(Df, self).METHOD_NAME(line, is_full_line)
[ 69, 80, 534 ]
def METHOD_NAME(self): pass
[ 531, 481 ]
f METHOD_NAME(self):
[ 9, 6036, 2227, 1737, 47, 661, 45 ]
def METHOD_NAME(self, module_file, targets): content = "" for alias, aliased in targets.items(): content += textwrap.dedent(f"""\ if(TARGET {aliased} AND NOT TARGET {alias}) add_library({alias} INTERFACE IMPORTED) set_property(TARGET {alias} PROPERTY INTERFACE_LINK_LIBRARIES {aliased}) endif() """) save(self, module_file, content)
[ 129, 334, 298, 533, 465 ]
def METHOD_NAME(field, value): return get_default_field_value(field, value)
[ 89, 982, 1097 ]
def METHOD_NAME(self) -> str: """ The creation date of scope map. """ return pulumi.get(self, "creation_date")
[ 581, 153 ]
f METHOD_NAME(self):
[ 9, 5444, 2698, 2699 ]
def METHOD_NAME(self, params): """Return an instance of the estimator for a combination of parameters""" pass
[ 93, 5175 ]
def METHOD_NAME(self, parser): parser.add_argument( "object", type=ProxyStringType("OriginalFile"), help="Object to download of form <object>:<id>. " "OriginalFile is assumed if <object>: is omitted.") parser.add_argument( "filename", help="Local filename (or path for Fileset) to be saved to. '-' for stdout") parser.set_defaults(func=self.__call__) parser.add_login_arguments()
[ 111 ]
def METHOD_NAME( self, table_progress: Callable[[SqlTable, int, int], ContextManager[None]] = _default_table_progress, ) -> DataWarehouseInferenceContext: """Query the data warehouse for statistics about all tables and populate a context with it.""" table_props_list: List[TableProperties] = [] for i, table in enumerate(self.tables): with table_progress(table, i, len(self.tables)): table_props = self._get_table_properties(table) table_props_list.append(table_props) return DataWarehouseInferenceContext(table_props=table_props_list)
[ 19, 198 ]
def METHOD_NAME(self): qs = super().METHOD_NAME() return qs.transform(Collection.transformer)
[ 19, 2386 ]
def METHOD_NAME(filename, port, per_listener): with open(filename, 'w') as f: f.write("per_listener_settings %s\n" % (per_listener)) f.write("port %d\n" % (port)) f.write("allow_anonymous true\n") f.write("acl_file %s\n" % (filename.replace('.conf', '.acl')))
[ 77, 200 ]
def METHOD_NAME(self, path: str) -> str: if self._file_path_template_map_fn is None: raise ValueError( f"""Converting file paths to fully-qualified object references for "{self.__class__.__name__}" \
[ 19, 324, 171, 157 ]
def METHOD_NAME(items): users = EventUser.for_tags(project_id, [i.value for i in items]) for item in items: item._eventuser = users.get(item.value)
[ 645, 8902 ]
def METHOD_NAME(self, instance): for candidate_result in instance.candidate_results.all(): membership = candidate_result.membership ballot = instance.ballot election = ballot.election source = instance.source change_metadata = self.get_change_metadata_for_bot(source) if membership.elected: ResultEvent.objects.create( election=election, winner=membership.person, post=ballot.post, old_post_id=ballot.post.slug, old_post_name=ballot.post.label, winner_party=membership.party, source=source, user=self.user, ) membership.person.record_version(change_metadata) membership.person.save() LoggedAction.objects.create( user=self.user, action_type=ActionType.SET_CANDIDATE_ELECTED, popit_person_new_version=change_metadata["version_id"], person=membership.person, source=source, edit_type=EditType.BOT.name, ) else: change_metadata[ "information_source" ] = 'Setting as "not elected" by implication' membership.person.record_version(change_metadata) membership.person.save()
[ 1743, 2217, 947, 8950 ]
def METHOD_NAME(): global _shutdown _shutdown = True items = list(_threads_queues.items()) for t, q in items: q.put(None) for t, q in items: t.join()
[ 440, 538 ]
def METHOD_NAME(estimates: Result, metric, component_column_names): sut = estimates.filter(metrics=metric).to_df().columns for col in component_column_names: assert col in sut
[ 9, 527, 69, 1341, 1007, 610, 246 ]
def METHOD_NAME(_): """Get active addresses on the network""" active = Arp.objects.filter(end_time=INFINITY) num_active = active.count() num_active_ipv6 = active.extra(where=["family(ip)=6"]).count() num_active_ipv4 = active.extra(where=["family(ip)=4"]).count() return JsonResponse( {"active": num_active, "ipv6": num_active_ipv6, "ipv4": num_active_ipv4} )
[ 19, 923, 1065 ]
def METHOD_NAME(self, section, option, new_value): self.check_section(section) self.parser.set(section, option, str(bool(new_value))) return
[ 0, 201 ]
def METHOD_NAME(self): # special case: job is not in excluded list self.client.metadata = {'exclude_jobs': ['notfake']} self.assertEqual([('fake', '123'), ('fake', '122')], list(self.client.get_builds(set())))
[ 9, 19, 5545, 982, 245, 654, 590 ]
def METHOD_NAME(base, attribute): result = getattr(base, attribute) if result is None: return "" # Use slug for Category instances return getattr(result, "slug", result)
[ 2522 ]
def METHOD_NAME(self, pid: int) -> float: """Return overall GPU utilization by pid if possible. Otherwise, returns aggregate utilization across all running processes.""" if not self.has_gpu(): return 0 ngpus = self.__ngpus accounting_on = self.__has_per_pid_accounting utilization = 0 for i in range(ngpus): h = self.__handle[i] if accounting_on: with contextlib.suppress(Exception): utilization += pynvml.nvmlDeviceGetAccountingStats( h, pid ).gpuUtilization else: try: utilization += pynvml.nvmlDeviceGetUtilizationRates(h).gpu except pynvml.nvml.NVMLError_Unknown: # Silently ignore NVML errors. "Fixes" https://github.com/plasma-umass/scalene/issues/471. pass return (utilization / ngpus) / 100.0
[ 1667, 5898 ]
def METHOD_NAME(self): self.collect_and_transform() assert self.block_structure.get_xblock_field(self.section_key, EFFORT_ACTIVITIES) == 1 assert self.block_structure.get_xblock_field(self.section_key, EFFORT_TIME) == 121 assert self.block_structure.get_xblock_field(self.subsection_key, EFFORT_ACTIVITIES) == 1 assert self.block_structure.get_xblock_field(self.subsection_key, EFFORT_TIME) == 121
[ 9, 1111 ]
def METHOD_NAME( self, futures_contract: futuresContract ) -> ListOfFills: ## We get this from broker fills, as they have leg by leg information list_of_fills = ( self.db_broker_historic_orders_data.METHOD_NAME( futures_contract ) ) return list_of_fills
[ 19, 3322, 351, 43, 1522 ]
f METHOD_NAME( self, batch_typehint, element_typehint, error_regex):
[ 9, 2801, 1096 ]
def METHOD_NAME(self): d = tempfile.mkdtemp() try: tp = os.path.join(d, "test.tar") with open(tp, "wb") as fh: with self.assertRaisesRegexp(ValueError, "not a regular"): create_tar_from_files(fh, {"test": d}) finally: shutil.rmtree(d)
[ 9, 2413, 15029 ]
def METHOD_NAME(cls): cls.dp_ckpt = CKPT_TRAINER cls.dp_frz = FRZ_TRAINER cls.valid_data = VALID_DATA cls.stop_batch = STOP_BATCH
[ 0, 1, 2 ]
def METHOD_NAME(preds, target, squared): sk_preds = preds.view(-1).numpy() sk_target = target.view(-1).numpy() return _sk_rse(sk_target, sk_preds, squared=squared)
[ 97, 1030, 2360, 1341 ]
def METHOD_NAME(test, checks=None): '''DefaultCNINetwork create operation''' if checks is None: checks = [] test.cmd('az networkcloud defaultcninetwork create --name {name} --extended-location ' ' name={extendedLocation} type="CustomLocation" --location {location} ' ' --cni-bgp-configuration {cniBgpConfiguration} --ip-allocation-type {ipAllocationType}' ' --ipv4-connected-prefix {ipv4prefix} --ipv6-connected-prefix {ipv6prefix} ' ' --l3-isolation-domain-id {l3_isolation_domain_id} --vlan {vlan}' ' --tags {tags} --resource-group {rg}', checks=checks)
[ 367, 129 ]
def METHOD_NAME(data, axis): """ Set and update the `~matplotlib.axis.Axis` units. Parameters ---------- data : str or iterable of str axis : `~matplotlib.axis.Axis` axis on which the data is plotted Returns ------- `.UnitData` object storing string to integer mapping """ # the conversion call stack is default_units -> axis_info -> convert if axis.units is None: axis.set_units(UnitData(data)) else: axis.units.update(data) return axis.units
[ 235, 1878 ]
def METHOD_NAME(self) -> str: """Get the column portion of the well name (e.g. "1").""" return self._column_name
[ 19, 105, 156 ]
def METHOD_NAME(name: Optional[pulumi.Input[str]] = None, scope: Optional[pulumi.Input[str]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetIpSetResult]: """ Retrieves the summary of a WAFv2 IP Set. ## Example Usage ```python import pulumi import pulumi_aws as aws example = aws.wafv2.get_ip_set(name="some-ip-set", scope="REGIONAL") ``` :param str name: Name of the WAFv2 IP Set. :param str scope: Specifies whether this is for an AWS CloudFront distribution or for a regional application. Valid values are `CLOUDFRONT` or `REGIONAL`. To work with CloudFront, you must also specify the region `us-east-1` (N. Virginia) on the AWS provider. """ ...
[ 19, 1213, 0, 146 ]
def METHOD_NAME(openshift_cred_id): """Openshift Source.""" source = Source.objects.create( name="source_saved", source_type=DataSources.OPENSHIFT, port=222, hosts=["1.2.3.4"], options=SourceOptions.objects.create(ssl_cert_verify=True), ) source.credentials.add(openshift_cred_id) source.save() return source
[ 11787, 1458 ]
def METHOD_NAME(self, name: Incomplete | None = None, category: Incomplete | None = None, required: bool = False): ...
[ 19, 1519 ]
def METHOD_NAME(self): self.app.config.update({ 'OAUTH2_TOKEN_EXPIRES_IN': {'password': 1800} }) self.prepare_data() headers = self.create_basic_header( 'password-client', 'password-secret' ) rv = self.client.post('/oauth/token', data={ 'grant_type': 'password', 'username': 'foo', 'password': 'ok', }, headers=headers) resp = json.loads(rv.data) self.assertIn('access_token', resp) self.assertEqual(resp['expires_in'], 1800)
[ 9, 343, 5291, 623 ]
def METHOD_NAME(depth, min_size, max_size): if not depth: return None bin_size = int(round(bp_per_bin / depth)) if bin_size < min_size: logging.info( "Limiting est. bin size %d to given min. %d", bin_size, min_size ) bin_size = min_size elif bin_size > max_size: logging.info( "Limiting est. bin size %d to given max. %d", bin_size, max_size ) bin_size = max_size return bin_size
[ -1 ]
def METHOD_NAME(path): _LOGGER.warning(path) if not DRY_RUN: shutil.rmtree(path)
[ 12578 ]
def METHOD_NAME(self, terminal): """Check if this terminal has gone silent""" time_now = time.mktime(time.gmtime()) if terminal not in self.last_activities: dbg('Terminal %s has no last activity' % terminal) return True dbg('seconds since last activity: %f (%s)' % (time_now - self.last_activities[terminal], terminal)) if time_now - self.last_activities[terminal] >= inactive_period: del(self.last_activities[terminal]) note = Notify.Notification.new(_('Terminator'), _('Silence in: %s') % terminal.get_window_title(), 'terminator') note.show() return True
[ 250, 3148 ]
def METHOD_NAME(f, rule: Dict): data = ", ".join(make_rule(rule)) f.write(data + "\n")
[ 77, 446 ]
def METHOD_NAME(target=None, config=None, product=None, res_tag=None, ext=None, derived=None): if res_tag is None: resstr = '' else: resstr = '_res'+res_tag cube_name = get_cube_filename(target=target, config=config, product=product, ext='pbcorr_trimmed_k'+resstr, casa=False) derived_name = '_'.join([cube_name.replace('.fits',''), derived]) return(derived_name)
[ 19, 1684, -1 ]
def METHOD_NAME(pg_type, *_): """Data type mapping from MySQL to Redshift""" return { 'char': 'CHARACTER VARYING({})'.format(DEFAULT_VARCHAR_LENGTH), 'character': 'CHARACTER VARYING({})'.format(DEFAULT_VARCHAR_LENGTH), 'varchar': 'CHARACTER VARYING({})'.format(DEFAULT_VARCHAR_LENGTH), 'character varying': 'CHARACTER VARYING({})'.format(DEFAULT_VARCHAR_LENGTH), 'text': 'CHARACTER VARYING({})'.format(LONG_VARCHAR_LENGTH), 'bit': 'BOOLEAN', 'varbit': 'NUMERIC NULL', 'bit varying': 'NUMERIC NULL', 'smallint': 'NUMERIC NULL', 'int': 'NUMERIC NULL', 'integer': 'NUMERIC NULL', 'bigint': 'NUMERIC NULL', 'smallserial': 'NUMERIC NULL', 'serial': 'NUMERIC NULL', 'bigserial': 'NUMERIC NULL', 'numeric': 'FLOAT', 'double precision': 'FLOAT', 'real': 'FLOAT', 'bool': 'BOOLEAN', 'boolean': 'BOOLEAN', 'date': 'TIMESTAMP WITHOUT TIME ZONE', 'timestamp': 'TIMESTAMP WITHOUT TIME ZONE', 'timestamp without time zone': 'TIMESTAMP WITHOUT TIME ZONE', 'timestamp with time zone': 'TIMESTAMP WITHOUT TIME ZONE', 'time': 'CHARACTER VARYING({})'.format(SHORT_VARCHAR_LENGTH), 'time without time zone': 'CHARACTER VARYING({})'.format(SHORT_VARCHAR_LENGTH), 'time with time zone': 'CHARACTER VARYING({})'.format(SHORT_VARCHAR_LENGTH), # ARRAY is all uppercase, because postgres stores it in this format in information_schema.columns.data_type 'ARRAY': 'CHARACTER VARYING({})'.format(LONG_VARCHAR_LENGTH), 'json': 'CHARACTER VARYING({})'.format(LONG_VARCHAR_LENGTH), 'jsonb': 'CHARACTER VARYING({})'.format(LONG_VARCHAR_LENGTH), }.get(pg_type, 'CHARACTER VARYING({})'.format(DEFAULT_VARCHAR_LENGTH))
[ 4316, 44, 24, 1030, 44 ]
def METHOD_NAME(self): return self.marketplace.MarketplaceId
[ 7742, 147 ]
async def METHOD_NAME(self): self.dps[PRESET_DPS] = "normal" async with assert_device_properties_set(self.subject._device, {SPEED_DPS: 12}): await self.subject.async_set_percentage(78)
[ 9, 0, 1942, 12288 ]
def METHOD_NAME(self): self.do_test("test_block_add_hook_baseexception")
[ 9, 573, 238, 1021, 15136 ]
def METHOD_NAME(self): """ Find the latest file of the file type and download it to the temp_dir """ files = self.s3_client.list_objects_v2( Bucket=settings.PRIVATE_DATA_BUCKET_NAME, Prefix=f"{self.import_type}/" )["Contents"] latest_file_key = sorted(files, key=lambda f: f["LastModified"])[0]["Key"] print(latest_file_key) file = Path(self.tmp_dir.name) / self.import_type / "full.csv" file.parent.mkdir(exist_ok=True, parents=True) self.file_path = file with file.open("wb") as f: self.s3_client.download_fileobj( settings.PRIVATE_DATA_BUCKET_NAME, latest_file_key, f )
[ 136, 171 ]
f METHOD_NAME(self):
[ 9, 236, 565, 61, 1953, 16018 ]
def METHOD_NAME(self, request, context): logging.warning( "Purge request not implemented: %s %s", request.DESCRIPTOR.full_name, MessageToJson(request), ) pur = s6a_proxy_pb2.PurgeUEAnswer() print_grpc(pur, self._print_grpc_payload, "PUR:") return pur
[ 4717, 7379 ]
def METHOD_NAME(name: Optional[pulumi.Input[str]] = None, project: Optional[pulumi.Input[Optional[str]]] = None, opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBackendServiceIamPolicyResult]: """ Use this data source to access information about an existing resource. :param str name: Used to find the parent resource to bind the IAM policy to :param str project: The ID of the project in which the resource belongs. If it is not provided, the project will be parsed from the identifier of the parent resource. If no project is provided in the parent identifier and no project is specified, the provider project is used. """ ...
[ 19, 3127, 549, 1694, 54, 146 ]
def METHOD_NAME(fmaps_in, shape): """Crop only the spacial dimensions to match shape. Args: fmaps_in: The input tensor. shape: A list (not a tensor) with the requested shape [_, _, z, y, x] or [_, _, y, x]. """ in_shape = fmaps_in.get_shape().as_list() offset = [0, 0] + [(in_shape[i] - shape[i]) // 2 for i in range(2, len(shape))] size = in_shape[0:2] + shape[2:] fmaps = tf.slice(fmaps_in, offset, size) return fmaps
[ 712, 6580 ]
def METHOD_NAME(model_name: str, model: nn.Module, input_signature: torch.Tensor, channel_divisibles: List[int], output_dir: str, onnx_export_kwargs: Mapping[str, Any] = {}, export_fp16_model=True, exclude_nodes: List[str] = []) -> None: expanding_runner = ExpandingRunner(model, input_signature) for channel_divisible in channel_divisibles: dir_path = os.path.join(output_dir, model_name + "_padded_{}".format(channel_divisible)) if not os.path.exists(dir_path): os.makedirs(dir_path) expanded_model, expanding_spec = expanding_runner.expand( channel_divisible, exclude_nodes) expanded_model.eval() torch.save(expanded_model.state_dict(), os.path.join(dir_path, model_name + ".pth")) with open(os.path.join(dir_path, "expanding_spec"), 'w') as f: f.write(expanding_spec.serialize()) torch.onnx.export( expanded_model, input_signature, os.path.join(dir_path, model_name + "_fp32.onnx"), export_params=True, opset_version=10, do_constant_folding=True, **onnx_export_kwargs) if export_fp16_model: expanded_model = expanded_model.cuda().half().eval() torch.onnx.export( expanded_model, input_signature.cuda().half(), os.path.join(dir_path, model_name + "_fp16.onnx"), export_params=True, opset_version=10, do_constant_folding=True, **onnx_export_kwargs)
[ 2450, 61, 294 ]
f METHOD_NAME(self):
[ 537 ]
def METHOD_NAME(name, path): """ Remove symbolic links determining the default commands. CLI Example: .. code-block:: bash salt '*' alternatives.remove name path """ cmd = [_get_cmd(), "--remove", name, path] out = __salt__["cmd.run_all"](cmd, python_shell=False) if out["retcode"] > 0: return out["stderr"] return out["stdout"]
[ 188 ]
def METHOD_NAME(self): # Modified gmpe mgmpe = CY14SiteTerm(gmpe_name='ChiouYoungs2014') # Set parameters sites = Dummy.get_site_collection(4, vs30=760., vs30measured=True, z1pt0=0.) rup = Dummy.get_rupture(mag=6.0) rup.dip = 90. rup.ztor = 0. rup.rrup = np.array([1., 10., 30., 70.]) rup.rx = np.array([1., 10., 30., 70.]) rup.rjb = np.array([1., 10., 30., 70.]) ctx = full_context(sites, rup) imt = PGA() stdt = [StdDev.TOTAL] # Compute results mean, stds = mgmpe.get_mean_and_stddevs(ctx, ctx, ctx, imt, stdt) # Compute the expected results gmpe = ChiouYoungs2014() mean_expected, stds_expected = gmpe.get_mean_and_stddevs( ctx, ctx, ctx, imt, stdt) # Test that for reference soil conditions the modified GMPE gives the # same results of the original gmpe np.testing.assert_almost_equal(mean, mean_expected, decimal=7) np.testing.assert_almost_equal(stds, stds_expected, decimal=2)
[ 9, 7110, 7111, 7112, 1483 ]
def METHOD_NAME(self): parameters = { **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "snapshotName", self.ctx.args.snapshot_name, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters
[ 274, 386 ]
def METHOD_NAME(self, request): # If the request is a format that only this plugin can handle, # we report that we can do it; a useful error will be raised # when simpleitk is not installed. For the more common formats # we only report that we can read if the library is installed. if request.extension in ITK_FORMATS: return True if has_module("itk.ImageIOBase") or has_module("SimpleITK"): return request.extension in ALL_FORMATS
[ 1046, 203 ]
async def METHOD_NAME(self): await self.AD.plugins.notify_plugin_started( self.name, self.namespace, self.get_metadata(), self.get_complete_state(), True, ) while not self.stopping: if self.current_event >= len(self.config["sequence"]["events"]) and ( "loop" in self.config["sequence"] and self.config["loop"] == 0 or "loop" not in self.config["sequence"] ): while not self.stopping: await asyncio.sleep(1) return None else: event = self.config["sequence"]["events"][self.current_event] await asyncio.sleep(event["offset"]) if "state" in event: entity = event["state"]["entity"] old_state = self.state[entity] new_state = event["state"]["newstate"] self.state[entity] = new_state ret = { "event_type": "state_changed", "data": {"entity_id": entity, "new_state": new_state, "old_state": old_state}, } self.logger.debug("*** State Update: %s ***", ret) await self.AD.state.process_event(self.namespace, copy.deepcopy(ret)) elif "event" in event: ret = { "event_type": event["event"]["event_type"], "data": event["event"]["data"], } self.logger.debug("*** Event: %s ***", ret) await self.AD.state.process_event(self.namespace, copy.deepcopy(ret)) elif "disconnect" in event: self.logger.debug("*** Disconnected ***") self.AD.plugins.notify_plugin_stopped(self.namespace) elif "connect" in event: self.logger.debug("*** Connected ***") await self.AD.plugins.notify_plugin_started(self.namespace) self.current_event += 1 if ( self.current_event >= len(self.config["sequence"]["events"]) and "loop" in self.config["sequence"] and self.config["sequence"]["loop"] == 1 ): self.current_event = 0
[ 19, 682 ]
def METHOD_NAME(self, field, options): options["choices"] = self.get_formatted_field_choices(field) return django.forms.MultipleChoiceField(**options)
[ 129, 5090, 101 ]
def METHOD_NAME(manager) -> None: """ Test the update process of legacy options. These options are not registered so we cannot reuse the use cases above. """ manager.set("sentry:something", "val") assert manager.get("sentry:something") == "val" with pytest.raises(AssertionError): manager.set("sentry:something_else", "val", channel=UpdateChannel.AUTOMATOR) assert ( manager.can_update("sentry:something_else", "val", channel=UpdateChannel.AUTOMATOR) == NotWritableReason.CHANNEL_NOT_ALLOWED )
[ 9, 3116, 1335 ]
def METHOD_NAME(self, config): super().METHOD_NAME(config) FeedgenExtension.activated = bool( config.get('feedgen_activate', False)) FeedgenExtension.base_url = config.get('feedgen_base_url')
[ 214, 4747, 200 ]
def METHOD_NAME(self, p = "const Vector&"): "Find the facet closest to the given point." return "unsigned"
[ 5221, 1890 ]
def METHOD_NAME(self, import_mock): """ Check if SystemExit is raised when an unavailable command is requested. """ import_mock.return_value = self._rpyc_mock self._mocked_conn.root.command.side_effect = AttributeError() with self.assertRaises(SystemExit): zapper_run("0.0.0.0", "command")
[ 9, 10029, 22, 909, 1660 ]
def METHOD_NAME(self, requests): """`execute` must be implemented in every Python model. `execute` function receives a list of pb_utils.InferenceRequest as the only argument. This function is called when an inference is requested for this model. Depending on the batching configuration (e.g. Dynamic Batching) used, `requests` may contain multiple requests. Every Python model, must create one pb_utils.InferenceResponse for every pb_utils.InferenceRequest in `requests`. If there is an error, you can set the error argument when creating a pb_utils.InferenceResponse. Parameters ---------- requests : list A list of pb_utils.InferenceRequest Returns ------- list A list of pb_utils.InferenceResponse. The length of this list must be the same as `requests` """ responses = [] # print("num:", len(requests), flush=True) for request in requests: data = pb_utils.get_input_tensor_by_name(request, self.input_names[0]) data = data.as_numpy() data = 1 / (1 + (np.exp((-data[0])))) probs = [] labels = [] for l, p in enumerate(data): if p > 0.5: labels.append(l) probs.append(p) labels = np.array(labels, dtype=self.output_dtype[0]) probs = np.array(probs, dtype=self.output_dtype[1]) # print(labels, probs) out_tensor1 = pb_utils.Tensor(self.output_names[0], labels) out_tensor2 = pb_utils.Tensor(self.output_names[1], probs) inference_response = pb_utils.InferenceResponse(output_tensors=[out_tensor1, out_tensor2]) responses.append(inference_response) return responses
[ 750 ]
def METHOD_NAME(self) -> pathlib.Path: if self.path: return self.path return pathlib.Path(super().METHOD_NAME)
[ 21, 551, 1190 ]
def METHOD_NAME(self): return { "Windows" : "windows", "Linux" : "linux", "Macos" : "mac", "Android" : "android", "iOS" : "ios" }.get(str(self.settings.os))
[ 19, 1030, 56, 2773 ]
def METHOD_NAME(self, filePath): """ This function uploads ontology to knowledge graph. Arguments: filePath - the file path of ontology to be uploaded """ javaFile = self.jpsBaseLib_view.java.io.File(filePath) self.kg_client.uploadFile(javaFile)
[ 172, 7534 ]
def METHOD_NAME(self, request, *args, **kwargs): uuid = kwargs.pop("uuid") self.realm = get_object_or_404(Realm, uuid=uuid, backend="saml") self.backend_instance = self.realm.backend_instance return super().METHOD_NAME(request, *args, **kwargs)
[ 2506 ]
def METHOD_NAME(self, ciphertext, aad, iv, key): al = encode_int(len(aad) * 8, 64) msg = aad + iv + ciphertext + al d = hmac.new(key, msg, self.hash_alg).digest() return d[:self.key_len]
[ 8264 ]
def METHOD_NAME(host,targetParentPID,procname): """ returns (parentPID,procPID) tuple for the procname with the specified parent """ cmdStr="ps -ef | grep '%s' | grep -v grep" % (procname) cmd=Command("ps",cmdStr,ctxt=REMOTE,remoteHost=host) cmd.run(validateAfter=True) sout=cmd.get_results().stdout logger.info(cmd.get_results().printResult()) if sout is None: return (0,0) lines=sout.split('\n') for line in lines: if line == '': continue fields=line.lstrip(' ').split() if len(fields) < 3: logger.info("not enough fields line: '%s'" % line) return (0,0) procPID=int(line.split()[1]) parentPID=int(line.split()[2]) if parentPID == targetParentPID: return (parentPID,procPID) logger.info("couldn't find process with name: %s which is a child of PID: %s" % (procname,targetParentPID)) return (0,0)
[ 19, 2305, 41, 935 ]
def METHOD_NAME(): class Point(ak.Record): def __getitem__(self, where): return ak.Array([1, 2, 3]) array = ak.Array( [[{"rho": 1, "phi": 1.0}], [], [{"rho": 2, "phi": 2.0}]], with_name="point", behavior={"point": Point}, ) assert array.to_list() == [ [{"rho": [1, 2, 3], "phi": [1, 2, 3]}], [], [{"rho": [1, 2, 3], "phi": [1, 2, 3]}], ] assert array[0].to_list() == [{"rho": [1, 2, 3], "phi": [1, 2, 3]}] assert array[0, 0].to_list() == {"rho": [1, 2, 3], "phi": [1, 2, 3]}
[ 9, 148, 24, 877 ]
def METHOD_NAME(self): hit_except = False try: pass except: hit_except = True self.assertFalse(hit_except)
[ 9, 1365, 1366, 654, 442 ]
def METHOD_NAME(self): self.assertEqual(120, self.harvester_class.total_time())
[ 9, 395, 104 ]
def METHOD_NAME(**attrs): # Make sure we have any requirements needed to interpret 'attrs'. _install_setup_requires(attrs) return distutils.core.METHOD_NAME(**attrs)
[ 102 ]
async def METHOD_NAME(self, search_id): return {'success': True}
[ 34, 539, 550 ]
def METHOD_NAME(tlist, y, inverse=False): """ Calculate the power spectrum corresponding to a two-time correlation function using FFT. Parameters ---------- tlist : array_like list/array of times :math:`t` which the correlation function is given. y : array_like list/array of correlations corresponding to time delays :math:`t`. inverse: boolean boolean parameter for using a positive exponent in the Fourier Transform instead. Default is False. Returns ------- w, S : tuple Returns an array of angular frequencies 'w' and the corresponding two-sided power spectrum 'S(w)'. """ tlist = np.asarray(tlist) N = tlist.shape[0] dt = tlist[1] - tlist[0] if not np.allclose(np.diff(tlist), dt * np.ones(N - 1, dtype=float)): raise ValueError('tlist must be equally spaced for FFT.') F = (N * scipy.fftpack.ifft(y)) if inverse else scipy.fftpack.fft(y) # calculate the frequencies for the components in F f = scipy.fftpack.fftfreq(N, dt) # re-order frequencies from most negative to most positive (centre on 0) idx = np.array([], dtype='int') idx = np.append(idx, np.where(f < 0.0)) idx = np.append(idx, np.where(f >= 0.0)) return 2 * np.pi * f[idx], 2 * dt * np.real(F[idx])
[ 1940, 4505, 4548 ]
METHOD_NAME(self):
[ 266, 4696 ]
def METHOD_NAME(): """Returns a collection of products that are installed in the system.""" products = [] for puid in get_installed_products_uids(): products.append(populate_product(puid)) return products
[ 19, 1255, 4866 ]
def METHOD_NAME(number): data = [] a = 0 f = open('tool.tbl','r') length = '' diameter = '' txt = '' x = 0 for data in f.readlines(): for i in range(0, 13): if data[i]: if data.find( 'T' + str(number) + ' ') > -1: if data.find('Z') > -1: txt = data[data.find('Z'): ] x = txt.find(' ') length = data[data.find('Z') +1 : data.find('Z') + x] if data.find('D') > -1: txt = data[data.find('D'): ] x = txt.find(' ') diameter = data[data.find('D') +1: data.find('D') + x] if data.find(';') > -1: txt = data[data.find(';'): ] x = txt.find('\n') desc = data[data.find(';') +1: data.find(';') + x] f2 = open('tool.dat', 'w') if f2 : f2.write(str(number) + ',' + diameter + ',' + length + ',' + desc) f2.close() f.close()
[ 0, -1 ]
def METHOD_NAME(self, f, args): self.map(f, [args] * self.size)
[ 784, 693 ]
def METHOD_NAME(): spectre = CpuVulns(context_wrap(INPUT_SPEC_STORE_BYPASS_3, path='/sys/devices/system/cpu/vulnerabilities/spec_store_bypass')) assert spectre.value == INPUT_SPEC_STORE_BYPASS_3 assert spectre.file_name == 'spec_store_bypass'
[ 9, 2265, -1, 1457, 1308, 8808, 490 ]
def METHOD_NAME(self) -> str: return pulumi.get(self, "channel_arn")
[ 307, 1059 ]
def METHOD_NAME(self) -> StatusResponse: """ Set up the connection required by the handler. Returns: HandlerStatusResponse """ return self.db.METHOD_NAME()
[ 707 ]
def METHOD_NAME(self, ctx): self.parameters['dimensions'] = []
[ 7576, 5164 ]
async def METHOD_NAME(self): should_this_thread_refresh = False async with self._lock: while self._is_token_expiring_soon(self._token): if self._some_thread_refreshing: if self._is_token_valid(self._token): return self._token await self._wait_till_lock_owner_finishes_refreshing() else: should_this_thread_refresh = True self._some_thread_refreshing = True break if should_this_thread_refresh: try: new_token = await self._token_refresher() if not self._is_token_valid(new_token): raise ValueError( "The token returned from the token_refresher is expired." ) async with self._lock: self._token = new_token self._some_thread_refreshing = False self._lock.notify_all() except: async with self._lock: self._some_thread_refreshing = False self._lock.notify_all() raise if self._proactive_refresh: self._schedule_refresh() return self._token
[ 86, 466, 61, 2394 ]
def METHOD_NAME(self): message = "data must be QuerySet-like (have count() and order_by()) or support list(data)" class Klass: pass class Bad: def __len__(self): pass invalid = [None, 1, Klass(), Bad()] for data in invalid: with self.subTest(data=data), self.assertRaisesMessage(ValueError, message): TableData.from_data(data)
[ 9, 532, 365 ]
def METHOD_NAME(self): parameters = { **self.serialize_url_param( "projectName", self.ctx.args.project_name, required=True, ), **self.serialize_url_param( "resourceGroupName", self.ctx.args.resource_group, required=True, ), **self.serialize_url_param( "storageMoverName", self.ctx.args.storage_mover_name, required=True, ), **self.serialize_url_param( "subscriptionId", self.ctx.subscription_id, required=True, ), } return parameters
[ 274, 386 ]
async def METHOD_NAME(request: web.BaseRequest): """ Request handler for creating and sending a request to a configured endorser Args: request: aiohttp request object Returns: The resulting connection record details """ context: AdminRequestContext = request["context"] tenant_wallet_id = context.profile.settings.get("wallet.id") tenant_mgr = context.inject(TenantManager) root_profile = tenant_mgr.profile profile = context.profile async with root_profile.session() as session: tenant_record = await TenantRecord.query_by_wallet_id(session, tenant_wallet_id) # issuer check if ( not tenant_record.connected_to_endorsers or not tenant_record.created_public_did or ( tenant_record.connected_to_endorsers and tenant_record.connected_to_endorsers == [] ) or (tenant_record.created_public_did and tenant_record.created_public_did == []) ): raise web.HTTPBadRequest( reason=( "Tenant is not configured as an issuer, cannot " "connect with endorser or create public did" ) ) endorser_srv = context.inject(EndorserConnectionService) info = endorser_srv.endorser_info(profile) if not info: raise web.HTTPConflict(reason="Endorser is not configured") request = await endorser_srv.connect_with_endorser(profile, context.injector) return web.json_response(request.serialize())
[ 7994, 550, 0 ]
def METHOD_NAME(self, bundle, **kwargs): domain = kwargs['domain'] pk = kwargs['pk'] try: user = self.Meta.object_class.get_by_user_id(pk, domain) except KeyError: user = None return user
[ 122, 19 ]
def METHOD_NAME( operands: Sequence[Operation | SSAValue], result_types: Sequence[Attribute], lower_bound: int | AffineMapAttr, upper_bound: int | AffineMapAttr, region: Region, step: int | AnyIntegerAttr = 1, ) -> For: if isinstance(lower_bound, int): lower_bound = AffineMapAttr( AffineMap(0, 0, (AffineExpr.constant(lower_bound),)) ) if isinstance(upper_bound, int): upper_bound = AffineMapAttr( AffineMap(0, 0, (AffineExpr.constant(upper_bound),)) ) if isinstance(step, int): step = IntegerAttr.from_index_int_value(step) attributes: dict[str, Attribute] = { "lower_bound": lower_bound, "upper_bound": upper_bound, "step": step, } return For.build( operands=[operands], result_types=[result_types], attributes=attributes, regions=[region], )
[ 280, 1216 ]
def METHOD_NAME(evt: Event, var: GameState, role): if not ENABLED or role != "alpha wolf": return can_bite = get_all_players(var, ("alpha wolf",)) - ALPHAS if can_bite: for alpha in can_bite: alpha.queue_message(messages["wolf_bite"]) User.send_messages()
[ 69, -1, 959 ]
def METHOD_NAME(): logger.info("post router-start hook") return True
[ -1, 72, 2501, 447, 1021 ]
def METHOD_NAME(self): """Called after each test, unloading all plugins.""" self.teardown_beets()
[ 531, 481 ]
def METHOD_NAME(string): return _encode(str(string))
[ 321 ]
def METHOD_NAME(self): class MyCallback1(CallbackData, prefix="test1"): foo: str bar: Optional[int] = None assert MyCallback1(foo="spam").pack() == "test1:spam:" assert MyCallback1(foo="spam", bar=42).pack() == "test1:spam:42" class MyCallback2(CallbackData, prefix="test2"): foo: Optional[str] = None bar: int assert MyCallback2(bar=42).pack() == "test2::42" assert MyCallback2(foo="spam", bar=42).pack() == "test2:spam:42" class MyCallback3(CallbackData, prefix="test3"): foo: Optional[str] = "experiment" bar: int assert MyCallback3(bar=42).pack() == "test3:experiment:42" assert MyCallback3(foo="spam", bar=42).pack() == "test3:spam:42"
[ 9, 1699, 665 ]
f METHOD_NAME(self):
[ 181, 10890 ]
def METHOD_NAME( self, location, # type: str sku=None, # type: Optional["_models.Sku"] **kwargs # type: Any ): # type: (...) -> "_models.Trial" """Return trial status for subscription by region. :param location: Azure region. :type location: str :param sku: The sku to check for trial availability. :type sku: ~avs_client.models.Sku :keyword callable cls: A custom type or function that will be passed the direct response :return: Trial, or the result of cls(response) :rtype: ~avs_client.models.Trial :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.Trial"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2022-05-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.METHOD_NAME.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1), 'location': self._serialize.url("location", location, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] if sku is not None: body_content = self._serialize.body(sku, 'Sku') else: body_content = None body_content_kwargs['content'] = body_content request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('Trial', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized
[ 250, 2943, 6477 ]
def METHOD_NAME(): value = ANSI("\x1b[32mHe\x1b[45mllo") assert to_formatted_text(value) == [ ("ansigreen", "H"), ("ansigreen", "e"), ("ansigreen bg:ansimagenta", "l"), ("ansigreen bg:ansimagenta", "l"), ("ansigreen bg:ansimagenta", "o"), ] # Bold and italic. value = ANSI("\x1b[1mhe\x1b[0mllo") assert to_formatted_text(value) == [ ("bold", "h"), ("bold", "e"), ("", "l"), ("", "l"), ("", "o"), ] # Zero width escapes. value = ANSI("ab\001cd\002ef") assert to_formatted_text(value) == [ ("", "a"), ("", "b"), ("[ZeroWidthEscape]", "cd"), ("", "e"), ("", "f"), ] assert isinstance(to_formatted_text(value), FormattedText)
[ 9, 1113, 1901 ]